合成音频还存在些问题

This commit is contained in:
bluesea 2024-03-08 18:35:32 +08:00
parent df1b9de863
commit 55827d2fb0
3 changed files with 121 additions and 5 deletions

View File

@ -50,7 +50,7 @@ class SpatialVideoConvertor {
// //
let output = try await AVAssetReaderTrackOutput( let output_video = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!, track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [ outputSettings: [
AVVideoDecompressionPropertiesKey: [ AVVideoDecompressionPropertiesKey: [
@ -58,12 +58,49 @@ class SpatialVideoConvertor {
], ],
] ]
) )
assetReader.add(output) assetReader.add(output_video)
//
// let outputSettings_Audio:[String:Any] = [
// AVFormatIDKey:kAudioFormatLinearPCM,
// AVLinearPCMIsBigEndianKey:false,
// AVLinearPCMIsFloatKey:false,
// AVLinearPCMBitDepthKey:16
// ]
// let outputSettings_Audio = [
// AVFormatIDKey: kAudioFormatLinearPCM,
// AVSampleRateKey: 44100,
// AVNumberOfChannelsKey: 2,
//// AVEncoderBitRateKey: 128000,
// AVLinearPCMIsBigEndianKey:false,
// AVLinearPCMIsFloatKey:false,
// AVLinearPCMBitDepthKey:16
// ] as [String : Any]
let outputSettings_Audio = [
AVFormatIDKey: kAudioFormatLinearPCM, //
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
]
let output_audio = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .audio).first!,
outputSettings:outputSettings_Audio
)
assetReader.add(output_audio)
// let output_audio = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks(withMediaType: .audio), audioSettings: nil)
// assetReader.add(output_audio)
assetReader.startReading() assetReader.startReading()
let duration = try await asset.load(.duration) let duration = try await asset.load(.duration)
while let nextSampleBuffer = output.copyNextSampleBuffer() {
while let nextSampleBuffer = output_video.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return } guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
@ -131,15 +168,31 @@ class SpatialVideoConvertor {
} }
} }
while let nextAudioBuffer = output_audio.copyNextSampleBuffer() {
print("audio read buffer....")
vw!.addAudio(sample: nextAudioBuffer)
}
print( "status - \(assetReader.status)") print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")") print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "Finished") print( "Finished")
_ = try await vw!.finish()
// DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
// DispatchQueue.main.sync {
// try await vw!.finish()
// }
// }
// _ = try await vw!.finish()
} }
//ciimage //ciimage

View File

@ -15,6 +15,7 @@ import CoreImage
class VideoWriter { class VideoWriter {
fileprivate var writer: AVAssetWriter fileprivate var writer: AVAssetWriter
fileprivate var writerInput: AVAssetWriterInput fileprivate var writerInput: AVAssetWriterInput
fileprivate var writerInput_Audio:AVAssetWriterInput
fileprivate var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor fileprivate var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
fileprivate let queue: DispatchQueue fileprivate let queue: DispatchQueue
static var ciContext = CIContext.init() // we reuse a single context for performance reasons static var ciContext = CIContext.init() // we reuse a single context for performance reasons
@ -51,6 +52,38 @@ class VideoWriter {
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor.init(assetWriterInput: input, sourcePixelBufferAttributes: sourceBufferAttributes) let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor.init(assetWriterInput: input, sourcePixelBufferAttributes: sourceBufferAttributes)
self.pixelBufferAdaptor = pixelBufferAdaptor self.pixelBufferAdaptor = pixelBufferAdaptor
//
// let outputSettings_Audio:[String:Any] = [
// AVFormatIDKey:kAudioFormatLinearPCM,
// AVLinearPCMIsBigEndianKey:false,
// AVLinearPCMIsFloatKey:false,
// AVLinearPCMBitDepthKey:16,
// ]
let outputSettings_Audio = [
AVFormatIDKey: kAudioFormatLinearPCM, //
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVLinearPCMIsBigEndianKey:true,
AVLinearPCMIsFloatKey:true,
AVLinearPCMBitDepthKey:32,
AVLinearPCMIsNonInterleaved:false,
] as [String:Any]
// self.writerInput_Audio = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: outputSettings_Audio)
// let outputSettings_Audio = AVOutputSettingsAssistant.init(preset: .preset1920x1080)?.audioSettings
self.writerInput_Audio = AVAssetWriterInput.init(mediaType: .audio, outputSettings: outputSettings_Audio, sourceFormatHint: nil)
self.writerInput_Audio.expectsMediaDataInRealTime = false
if writer.canAdd(self.writerInput_Audio) {
writer.add(self.writerInput_Audio)
print("writer 添加input audio成功...")
}
else {
print("writer 添加input audio失败...")
}
writer.add(input) writer.add(input)
writer.startWriting() writer.startWriting()
writer.startSession(atSourceTime: sessionStartTime) writer.startSession(atSourceTime: sessionStartTime)
@ -64,6 +97,37 @@ class VideoWriter {
self.writerInput = input self.writerInput = input
} }
//
func addAudio(sample:CMSampleBuffer) {
let formatDesc:CMFormatDescription = // 1
CMSampleBufferGetFormatDescription(sample)!;
let mediaType:CMMediaType = CMFormatDescriptionGetMediaType(formatDesc);
if mediaType == kCMMediaType_Audio {
if self.writerInput_Audio.isReadyForMoreMediaData {
if self.writerInput_Audio.append(sample) == false {
print("追加音频失败....")
}
else{
print("audio 追加成功....")
}
}
else {
print("auiod 追加还未准备好...")
self.writerInput_Audio.requestMediaDataWhenReady(on: DispatchQueue.main) {
if self.writerInput_Audio.append(sample) == false {
print("追加音频失败....")
}
else{
print("audio 追加成功....")
}
}
}
}
else {
print("不是audio类型...")
}
}
func add(image: CIImage, presentationTime: CMTime) -> Bool { func add(image: CIImage, presentationTime: CMTime) -> Bool {
if self.writerInput.isReadyForMoreMediaData == false { if self.writerInput.isReadyForMoreMediaData == false {
return false return false

View File

@ -48,7 +48,6 @@ extension CVPixelBuffer {
CVPixelBufferLockBaseAddress(self, []) CVPixelBufferLockBaseAddress(self, [])
VideoWriter.ciContext.render(image, to: self) VideoWriter.ciContext.render(image, to: self)
// VideoWriter.ciContext.render(image, to: self, bounds: CGRectMake(0, 0, 2200, 1600), colorSpace: nil)
CVPixelBufferUnlockBaseAddress(self, []) CVPixelBufferUnlockBaseAddress(self, [])
} }