VPCamera/SwiftProject/SwiftProject/Project/Util/VideoWriter.swift
2024-03-08 18:35:32 +08:00

182 lines
6.9 KiB
Swift

//
// VideoWriter.swift
// SpacialVideoConvertor
//
// Created by Andy Qua on 04/01/2024.
//
// Based on code from xaphod/VideoWriter.swift - https://gist.github.com/xaphod/de83379cc982108a5b38115957a247f9
//
import Foundation
import AVFoundation
import CoreImage
class VideoWriter {
fileprivate var writer: AVAssetWriter
fileprivate var writerInput: AVAssetWriterInput
fileprivate var writerInput_Audio:AVAssetWriterInput
fileprivate var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
fileprivate let queue: DispatchQueue
static var ciContext = CIContext.init() // we reuse a single context for performance reasons
let pixelSize: CGSize
var lastPresentationTime: CMTime?
init?(url: URL, width: Int, height: Int, orientation: CGAffineTransform, sessionStartTime: CMTime, isRealTime: Bool, queue: DispatchQueue) {
print("VideoWriter init: width=\(width) height=\(height), url=\(url)")
self.queue = queue
let outputSettings: [String:Any] = [
AVVideoCodecKey : AVVideoCodecType.h264, // or .hevc if you like
AVVideoWidthKey : width,
AVVideoHeightKey: height,
]
self.pixelSize = CGSize.init(width: width, height: height)
let input = AVAssetWriterInput.init(mediaType: .video, outputSettings: outputSettings)
input.expectsMediaDataInRealTime = isRealTime
input.transform = orientation
guard
let writer = try? AVAssetWriter.init(url: url, fileType: .mp4),
writer.canAdd(input),
sessionStartTime != .invalid
else {
return nil
}
let sourceBufferAttributes: [String:Any] = [
String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_32ARGB, // yes, ARGB is right here for images...
String(kCVPixelBufferWidthKey) : width,
String(kCVPixelBufferHeightKey) : height,
]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor.init(assetWriterInput: input, sourcePixelBufferAttributes: sourceBufferAttributes)
self.pixelBufferAdaptor = pixelBufferAdaptor
//
// let outputSettings_Audio:[String:Any] = [
// AVFormatIDKey:kAudioFormatLinearPCM,
// AVLinearPCMIsBigEndianKey:false,
// AVLinearPCMIsFloatKey:false,
// AVLinearPCMBitDepthKey:16,
// ]
let outputSettings_Audio = [
AVFormatIDKey: kAudioFormatLinearPCM, //
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVLinearPCMIsBigEndianKey:true,
AVLinearPCMIsFloatKey:true,
AVLinearPCMBitDepthKey:32,
AVLinearPCMIsNonInterleaved:false,
] as [String:Any]
// self.writerInput_Audio = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: outputSettings_Audio)
// let outputSettings_Audio = AVOutputSettingsAssistant.init(preset: .preset1920x1080)?.audioSettings
self.writerInput_Audio = AVAssetWriterInput.init(mediaType: .audio, outputSettings: outputSettings_Audio, sourceFormatHint: nil)
self.writerInput_Audio.expectsMediaDataInRealTime = false
if writer.canAdd(self.writerInput_Audio) {
writer.add(self.writerInput_Audio)
print("writer 添加input audio成功...")
}
else {
print("writer 添加input audio失败...")
}
writer.add(input)
writer.startWriting()
writer.startSession(atSourceTime: sessionStartTime)
if let error = writer.error {
NSLog("VideoWriter init: ERROR - \(error)")
return nil
}
self.writer = writer
self.writerInput = input
}
//
func addAudio(sample:CMSampleBuffer) {
let formatDesc:CMFormatDescription = // 1
CMSampleBufferGetFormatDescription(sample)!;
let mediaType:CMMediaType = CMFormatDescriptionGetMediaType(formatDesc);
if mediaType == kCMMediaType_Audio {
if self.writerInput_Audio.isReadyForMoreMediaData {
if self.writerInput_Audio.append(sample) == false {
print("追加音频失败....")
}
else{
print("audio 追加成功....")
}
}
else {
print("auiod 追加还未准备好...")
self.writerInput_Audio.requestMediaDataWhenReady(on: DispatchQueue.main) {
if self.writerInput_Audio.append(sample) == false {
print("追加音频失败....")
}
else{
print("audio 追加成功....")
}
}
}
}
else {
print("不是audio类型...")
}
}
func add(image: CIImage, presentationTime: CMTime) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
return false
}
if self.pixelBufferAdaptor.appendPixelBufferForImage(image, presentationTime: presentationTime) {
self.lastPresentationTime = presentationTime
return true
}
return false
}
func add(buffer: CVPixelBuffer, presentationTime: CMTime) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
return false
}
if self.pixelBufferAdaptor.append(buffer, withPresentationTime: presentationTime) {
self.lastPresentationTime = presentationTime
return true
}
return false
}
func add(sampleBuffer: CMSampleBuffer) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false {
print("VideoWriter: not ready for more data")
return false
}
if self.writerInput.append(sampleBuffer) {
self.lastPresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
return true
}
return false
}
func finish() async throws -> AVAsset? {
writerInput.markAsFinished()
print("VideoWriter: calling writer.finishWriting()")
await writer.finishWriting()
if self.writer.status != .completed {
print("VideoWriter finish: error in finishWriting - \(self.writer.error?.localizedDescription ?? "Unknown")")
return nil
}
let asset = AVURLAsset.init(url: self.writer.outputURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey : true])
let duration = try await CMTimeGetSeconds( asset.load(.duration) )
// can check for minimum duration here (ie. consider a failure if too short)
print("VideoWriter: finishWriting() complete, duration=\(duration)")
return asset
}
}