diff --git a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/VRVideoTransformController.swift b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/VRVideoTransformController.swift index d1bc200..eb1a4ee 100644 --- a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/VRVideoTransformController.swift +++ b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/VRVideoTransformController.swift @@ -592,7 +592,7 @@ extension VRVideoTransformController { print(progress) // DispatchQueue.main.async { [weak self] in if(progress > 0.99){ - DispatchQueue.main.asyncAfter(deadline: .now() + 2) { + DispatchQueue.main.asyncAfter(deadline: .now() + 20) { // 要执行的任务 self.exportVideo(url: outputVideoURL, outputURL: outputURL, width: width, height: height,codecType:codecType, dataRate: dataRate, horizontalDisparity: horizontalDisparity, horizontalFieldOfView: horizontalFieldOfView) { exportedAsset in DispatchQueue.main.async { diff --git a/SwiftProject/SwiftProject/Project/Util/SpatialVideoConvertor.swift b/SwiftProject/SwiftProject/Project/Util/SpatialVideoConvertor.swift index 803f177..44056fd 100644 --- a/SwiftProject/SwiftProject/Project/Util/SpatialVideoConvertor.swift +++ b/SwiftProject/SwiftProject/Project/Util/SpatialVideoConvertor.swift @@ -161,8 +161,8 @@ class SpatialVideoConvertor { let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer) if vw == nil { - vw = VideoWriter(url: outputFile, width: Int(cwidth), height: Int(cheight), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main) -// vw = VideoWriter(url: outputFile, width: Int(cwidth), height: Int(cheight), orientation: orientation, sessionStartTime: CMTimeMake(value: 0, timescale: 1), isRealTime: false, queue: .main) +// vw = VideoWriter(url: outputFile, width: Int(cwidth), height: Int(cheight), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: true, queue: .main) + vw = VideoWriter(url: outputFile, width: Int(cwidth), height: Int(cheight), orientation: orientation, sessionStartTime: CMTimeMake(value: 0, timescale: 1), isRealTime: true, queue: .main) } _ = vw!.add(image: newpb!, presentationTime: time) print( "Added frame at \(time)") @@ -176,17 +176,17 @@ class SpatialVideoConvertor { } -// let vw2 = vw! -// await vw!.addAudio(assetTrackOutput: output_audio){ -// _ = try! await vw2.finish() -// } + let vw2 = vw! + await vw!.addAudio(assetTrackOutput: output_audio){ + _ = try! await vw2.finish() + } print( "status - \(assetReader.status)") print( "status - \(assetReader.error?.localizedDescription ?? "None")") print( "Finished") - _ = try await vw!.finish() +// _ = try await vw!.finish() } diff --git a/SwiftProject/SwiftProject/Project/Util/VideoWriter.swift b/SwiftProject/SwiftProject/Project/Util/VideoWriter.swift index 6888a47..961b587 100644 --- a/SwiftProject/SwiftProject/Project/Util/VideoWriter.swift +++ b/SwiftProject/SwiftProject/Project/Util/VideoWriter.swift @@ -56,10 +56,10 @@ class VideoWriter { // 创建音频输入 // let outputSettings_Audio:[String:Any] = [ -// AVFormatIDKey:kAudioFormatLinearPCM, -// AVLinearPCMIsBigEndianKey:false, -// AVLinearPCMIsFloatKey:false, -// AVLinearPCMBitDepthKey:16, +// AVNumberOfChannelsKey: 2, +// AVSampleRateKey: 44100, +// AVFormatIDKey:kAudioFormatMPEG4AAC, +// AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue // ] let outputSettings_Audio = [ @@ -122,10 +122,7 @@ class VideoWriter { else { print("不是audio类型...") } - try! await Task.sleep(nanoseconds: 200_000_000) } - -// } print("audio读取完毕。。。。") await callback() print("audio func 执行完毕。。。。...") @@ -168,7 +165,6 @@ class VideoWriter { func finish() async throws -> AVAsset? { print("开始调用finish...") -// return nil writerInput_Audio.markAsFinished() writerInput.markAsFinished() print("VideoWriter: calling writer.finishWriting()")