录制视频,需要解决一些问题

This commit is contained in:
bluesea 2024-05-24 15:47:53 +08:00
parent 7821f7be4d
commit 22c31d4a7e
10 changed files with 510 additions and 219 deletions

View File

@ -23,6 +23,10 @@
00733EA92BFB462500D53BA8 /* CCSpatialShootController+SessionConfigure.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733EA82BFB462500D53BA8 /* CCSpatialShootController+SessionConfigure.swift */; };
00733EAB2BFB471100D53BA8 /* CCSpatialShootController+CaputreAction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733EAA2BFB471100D53BA8 /* CCSpatialShootController+CaputreAction.swift */; };
00733EAD2BFB47AE00D53BA8 /* CCSpatialShootController+Generate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733EAC2BFB47AE00D53BA8 /* CCSpatialShootController+Generate.swift */; };
00733ECB2BFDD32300D53BA8 /* SpatialVideoDataWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733ECA2BFDD32300D53BA8 /* SpatialVideoDataWriter.swift */; };
00733ECD2BFDDDE100D53BA8 /* CCSpatialShootController+GenerateImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733ECC2BFDDDE100D53BA8 /* CCSpatialShootController+GenerateImage.swift */; };
00733ECF2BFDDE2300D53BA8 /* CCSpatialShootController+GenerateVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733ECE2BFDDE2300D53BA8 /* CCSpatialShootController+GenerateVideo.swift */; };
00733EDF2BFF553100D53BA8 /* SVDWStack.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00733EDE2BFF553100D53BA8 /* SVDWStack.swift */; };
0073BD142BCE80F700721885 /* ZZHCustomPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD132BCE80F700721885 /* ZZHCustomPlayer.swift */; };
0073BD182BCF7B3400721885 /* ZZHCustomSlider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD172BCF7B3400721885 /* ZZHCustomSlider.swift */; };
0073BD1A2BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD192BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift */; };
@ -141,6 +145,10 @@
00733EA82BFB462500D53BA8 /* CCSpatialShootController+SessionConfigure.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CCSpatialShootController+SessionConfigure.swift"; sourceTree = "<group>"; };
00733EAA2BFB471100D53BA8 /* CCSpatialShootController+CaputreAction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CCSpatialShootController+CaputreAction.swift"; sourceTree = "<group>"; };
00733EAC2BFB47AE00D53BA8 /* CCSpatialShootController+Generate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CCSpatialShootController+Generate.swift"; sourceTree = "<group>"; };
00733ECA2BFDD32300D53BA8 /* SpatialVideoDataWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialVideoDataWriter.swift; sourceTree = "<group>"; };
00733ECC2BFDDDE100D53BA8 /* CCSpatialShootController+GenerateImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CCSpatialShootController+GenerateImage.swift"; sourceTree = "<group>"; };
00733ECE2BFDDE2300D53BA8 /* CCSpatialShootController+GenerateVideo.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CCSpatialShootController+GenerateVideo.swift"; sourceTree = "<group>"; };
00733EDE2BFF553100D53BA8 /* SVDWStack.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SVDWStack.swift; sourceTree = "<group>"; };
0073BD132BCE80F700721885 /* ZZHCustomPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomPlayer.swift; sourceTree = "<group>"; };
0073BD172BCF7B3400721885 /* ZZHCustomSlider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomSlider.swift; sourceTree = "<group>"; };
0073BD192BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomPlayerForVideoTask.swift; sourceTree = "<group>"; };
@ -322,6 +330,8 @@
00733EA82BFB462500D53BA8 /* CCSpatialShootController+SessionConfigure.swift */,
00733EAA2BFB471100D53BA8 /* CCSpatialShootController+CaputreAction.swift */,
00733EAC2BFB47AE00D53BA8 /* CCSpatialShootController+Generate.swift */,
00733ECE2BFDDE2300D53BA8 /* CCSpatialShootController+GenerateVideo.swift */,
00733ECC2BFDDDE100D53BA8 /* CCSpatialShootController+GenerateImage.swift */,
);
path = CCSpatialShootController;
sourceTree = "<group>";
@ -414,6 +424,8 @@
1E1EA28F2B933C8200A5D5D2 /* VideoWriter.swift */,
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */,
1EE5C5F92B8F97BF00EDFC2F /* SpatialVideoWriter.swift */,
00733ECA2BFDD32300D53BA8 /* SpatialVideoDataWriter.swift */,
00733EDE2BFF553100D53BA8 /* SVDWStack.swift */,
005580772B9F1525004B9567 /* ZZHHelper.swift */,
00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */,
00374AE02BC92B7C00F1F20F /* ZNetUtil.swift */,
@ -845,6 +857,7 @@
00B946252B67B7DE00DA668F /* CCSpatialPlayView.swift in Sources */,
0096624A2BB3B45200FCA65F /* ExternalSceneDelegate.swift in Sources */,
AFD9F5932B58C34A008716DE /* ImageProcessingShaders.metal in Sources */,
00733ECF2BFDDE2300D53BA8 /* CCSpatialShootController+GenerateVideo.swift in Sources */,
AF2120F02B4EA39D00400B7F /* BaseTableViewGroupedController.swift in Sources */,
1E1EA2962B936C9600A5D5D2 /* VideoConvertor2.swift in Sources */,
AF2120CA2B4E95DA00400B7F /* UITableView+Add.swift in Sources */,
@ -852,6 +865,7 @@
009DFB0E2BC8CFA2007B56E8 /* FeedbackView.swift in Sources */,
00BD87862BDE595F0014E8B3 /* CCSpatialPhotoDisplayEx.swift in Sources */,
AF2120C42B4E95DA00400B7F /* UIImage+Add.swift in Sources */,
00733ECD2BFDDDE100D53BA8 /* CCSpatialShootController+GenerateImage.swift in Sources */,
1EFAF0C02B8B7A59002A1773 /* VRPhotoTransformController.swift in Sources */,
AF2120D82B4E9AC500400B7F /* CCAddImageView.swift in Sources */,
00D33BF42B998BF700604A44 /* SpatialImageConvertor.swift in Sources */,
@ -870,6 +884,7 @@
AF2121092B4EA7E200400B7F /* CCRequestDefine.swift in Sources */,
005580782B9F1525004B9567 /* ZZHHelper.swift in Sources */,
AF2120C32B4E95DA00400B7F /* NSObject+Add.swift in Sources */,
00733EDF2BFF553100D53BA8 /* SVDWStack.swift in Sources */,
1EE5C5F72B8F973A00EDFC2F /* CCSpatialShootController.swift in Sources */,
006B61DE2BBCFB45003FCB49 /* CustomSheetCell.swift in Sources */,
0073BD142BCE80F700721885 /* ZZHCustomPlayer.swift in Sources */,
@ -915,6 +930,7 @@
1E02C9322B8990C600DD3143 /* CCDeviceOperationListView.swift in Sources */,
00BD87972BE10B800014E8B3 /* DisplayLinkProxy.swift in Sources */,
AF2120DA2B4E9BD400400B7F /* CCAlert.swift in Sources */,
00733ECB2BFDD32300D53BA8 /* SpatialVideoDataWriter.swift in Sources */,
006B61D12BBA5DB4003FCB49 /* MembershipProductView.swift in Sources */,
1EFB8C702B88DA4800C72119 /* CCBottomMenuCell.swift in Sources */,
AF2120FA2B4EA5BD00400B7F /* CCHomeController.swift in Sources */,

View File

@ -505,22 +505,6 @@
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "E9B8F98E-43F6-47FD-A113-118A931EACBE"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController/CCSpatialShootController+Generate.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "194"
endingLineNumber = "194"
landmarkName = "compositeSpatialPhoto()"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent

View File

@ -56,6 +56,30 @@ extension CCSpatialShootController {
//
@objc func shutterVideoButtonAction(_ sender: UIButton){
if !isRecordingVideoData {//
writeVideoQueuen.async {[weak self] in//
if let weakSelf = self {
weakSelf.removeExistingFile(at: weakSelf.outputVideoURL)
weakSelf.isRecordingVideoData = true
}
}
}
else {//
writeVideoQueuen.async {[weak self] in//
if let weakSelf = self {
weakSelf.isRecordingVideoData = false
weakSelf.svdWriter.writeVideoDataDeInit {result, err in
//
weakSelf.saveVideoToLibrary(videoURL: weakSelf.outputVideoURL)
}
}
}
}
if !self.isRecording {
//
print("录像中...")

View File

@ -57,32 +57,39 @@ extension CCSpatialShootController: AVCaptureAudioDataOutputSampleBufferDelegate
}
else if shootingMode == .CCShootingMode_Video {
if output == wideAngleCameraVideoDataOutput {//广
// leftEyeVideoURL = outputFileURL
}
else if output == ultraWideCameraVideoDataOutput {//广
// print("ultra Wide video recorded: \(outputFileURL)")
// rightEyeVideoURL = outputFileURL
}
else if output == wuCameraAudioDataOutput {//
if isRecordingVideoData {
if output == wideAngleCameraVideoDataOutput {//广
writeVideoQueuen.async {[weak self] in//
self?.svdWriter.exAddLeftSampleBuffer(sampleBuffer)
}
}
else if output == ultraWideCameraVideoDataOutput {//广
writeVideoQueuen.async {[weak self] in//
self?.svdWriter.exAddRightSampleBuffer(sampleBuffer)
}
}
else if output == wuCameraAudioDataOutput {//
writeVideoQueuen.async {[weak self] in//
self?.svdWriter.exAddAudioSampleBuffer(sampleBuffer)
}
}
}
// if let leftEyeVideoURL,
// let rightEyeVideoURL {
// createSpVideo()
// }
}
}
//
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("录制视频掉帧了...")
}
//samplebufferuiimage
func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer,orientation:AVCaptureVideoOrientation ) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
@ -109,195 +116,44 @@ extension CCSpatialShootController: AVCaptureAudioDataOutputSampleBufferDelegate
//MARK:
func createSpVideo(){
//
// if(rightEyeVideoURL != nil && leftEyeVideoURL != nil){
//
// Task {
// spatialVideoWriter.writeSpatialVideo(leftEyeVideoURL: leftEyeVideoURL!, rightEyeVideoURL: rightEyeVideoURL!, outputVideoURL: outputVideoURL!) {[weak self] success, error in
// DispatchQueue.main.async {
// SVProgressHUD.dismiss()
// print("SVProgressHUD.dismiss..2222.....")
// }
// if success {
// print("")
// if let ovrul = self?.outputVideoURL{
// self?.saveVideoToLibrary(videoURL:ovrul)
// }
//
// } else if let error = error {
// print("......error\(error)")
//
// DispatchQueue.main.async {
// SVProgressHUD.showInfo(withStatus: "\(NSLocalizedString("", comment: "")):\(error.localizedDescription)")
// }
// }
// else {
// print("not success......")
// }
// }
// }
// }
}
private func saveVideoToLibrary(videoURL: URL) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
}) {[weak self] success, error in
if success {
print("保存成功")
self?.getAlbumFirstPhoto()
} else if let error = error {
print("保存失败")
}
}
}
//
func compositeSpatialPhoto(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
var oo = imageCGImagePropertyOrientation.rawValue
// let orientation_cf = CFNumberCreate(nil, CFNumberType.intType, &oo)
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
// kCGImagePropertyTIFFDictionary:[
// kCGImagePropertyOrientation:orientation_cf,
// ],
//
// kCGImagePropertyOrientation:orientation_cf as Any,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
// kCGImagePropertyTIFFDictionary:[
// kCGImagePropertyOrientation:orientation_cf,
// ],
// kCGImagePropertyOrientation:orientation_cf as Any,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let leftImg = img1//fixOrientation(img1)
let rightImg = img2//fixOrientation(img2)
let p_dic1:CFDictionary = properties1 as CFDictionary
CGImageDestinationAddImage(destination, leftImg.cgImage!,p_dic1)
let p_dic2:CFDictionary = properties2 as CFDictionary
CGImageDestinationAddImage(destination, rightImg.cgImage!, p_dic2)
let rr = CGImageDestinationFinalize(destination)
if rr == false {
print("ee..")
}
savePhoto(url)
//
func removeExistingFile(at outputVideoURL: URL) {
if FileManager.default.fileExists(atPath: outputVideoURL.path) {
do {
try FileManager.default.removeItem(atPath: outputVideoURL.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
}
}
//
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) {[weak self] success, error in
DispatchQueue.main.async {
SVProgressHUD.dismiss()
print("SVProgressHUD.dismiss..1111.....")
}
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
DispatchQueue.main.async {
SVProgressHUD.showInfo(withStatus: "\(NSLocalizedString("空间图片保存失败", comment: "")): \(error.localizedDescription)")
}
} else {
print("Photo saved to library successfully.")
DispatchQueue.main.async {
self?.getAlbumFirstPhoto()
}
}
}
}
}

View File

@ -0,0 +1,137 @@
//
// CCSpatialShootController+GenerateImage.swift
// SwiftProject
//
// Created by aaa on 2024/5/22.
//
import Foundation
import AVFoundation
import Photos
import AVKit
import VideoToolbox
import SVProgressHUD
import Firebase
import CoreMotion
extension CCSpatialShootController {
//
func compositeSpatialPhoto(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
var oo = imageCGImagePropertyOrientation.rawValue
// let orientation_cf = CFNumberCreate(nil, CFNumberType.intType, &oo)
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
// kCGImagePropertyTIFFDictionary:[
// kCGImagePropertyOrientation:orientation_cf,
// ],
//
// kCGImagePropertyOrientation:orientation_cf as Any,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
// kCGImagePropertyTIFFDictionary:[
// kCGImagePropertyOrientation:orientation_cf,
// ],
// kCGImagePropertyOrientation:orientation_cf as Any,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let leftImg = img1//fixOrientation(img1)
let rightImg = img2//fixOrientation(img2)
let p_dic1:CFDictionary = properties1 as CFDictionary
CGImageDestinationAddImage(destination, leftImg.cgImage!,p_dic1)
let p_dic2:CFDictionary = properties2 as CFDictionary
CGImageDestinationAddImage(destination, rightImg.cgImage!, p_dic2)
let rr = CGImageDestinationFinalize(destination)
if rr == false {
print("ee..")
}
savePhoto(url)
}
//
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) {[weak self] success, error in
DispatchQueue.main.async {
SVProgressHUD.dismiss()
print("SVProgressHUD.dismiss..1111.....")
}
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
DispatchQueue.main.async {
SVProgressHUD.showInfo(withStatus: "\(NSLocalizedString("空间图片保存失败", comment: "")): \(error.localizedDescription)")
}
} else {
print("Photo saved to library successfully.")
DispatchQueue.main.async {
self?.getAlbumFirstPhoto()
}
}
}
}
}

View File

@ -0,0 +1,35 @@
//
// CCSpatialShootController+GenerateVideo.swift
// SwiftProject
//
// Created by aaa on 2024/5/22.
//
import Foundation
import Photos
extension CCSpatialShootController {
//MARK:
// func createSpVideo(vd:ZVideoData){
// svdWriter.writeVideoData(leftSamplebuffer: vd.leftSampleBuffer!, rightSamplebuffer: vd.rightSampleBuffer!, audioSamplebuffer: vd.audioSampleBuffer!)
// }
func saveVideoToLibrary(videoURL: URL) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
}) {[weak self] success, error in
if success {
print("保存成功")
self?.getAlbumFirstPhoto()
} else if let error = error {
print("保存失败")
}
}
}
}

View File

@ -13,8 +13,25 @@ import VideoToolbox
import SVProgressHUD
import Firebase
import CoreMotion
//class ZVideoData:NSObject {
// var leftSampleBuffer:CMSampleBuffer?
// var rightSampleBuffer:CMSampleBuffer?
// var audioSampleBuffer:CMSampleBuffer?
// var isReady:Bool {
// get {
// return leftSampleBuffer != nil && rightSampleBuffer != nil && audioSampleBuffer != nil
// }
// }
//
// override init() {
// super.init()
// }
//}
class CCSpatialShootController: BaseController {
let writeVideoQueuen = dispatch_queue_serial_t(label: "ccspatial writeVideoQueuen")//
let kNowTimeToUserDefaultKey_SpatialShootController:String = "kNowTimeToUserDefaultKey_SpatialShootController"
var wideAngleCameraDevice:AVCaptureDevice?//使
@ -22,6 +39,14 @@ class CCSpatialShootController: BaseController {
var isTakePhoto_ultraCamera = false
var isTakePhoto_wideCamera = false
let takePhotoSemaphore = DispatchSemaphore(value: 1)
var isRecordingVideoData = false //
// var videoData:ZVideoData = ZVideoData()
lazy var svdWriter:SpatialVideoDataWriter = {
let writer = SpatialVideoDataWriter()
writer.outputVideoURL = outputVideoURL
return writer
}()
//AVCaptureSession ---
var session = AVCaptureMultiCamSession()//
@ -47,10 +72,10 @@ class CCSpatialShootController: BaseController {
var leftEyeVideoURL:URL?
var rightEyeVideoURL:URL?
var outputVideoURL: URL?
let outputVideoURL: URL = URL.documentsDirectory.appendingPathComponent("output.MOV")
let motionManager = CMMotionManager()
var imgs:NSMutableArray = NSMutableArray() //广
let spatialVideoWriter = SpatialVideoWriter()
// let spatialVideoWriter = SpatialVideoWriter()
//================================
//
@ -244,7 +269,6 @@ class CCSpatialShootController: BaseController {
super.viewDidLoad()
ZZHHelper.setNowTimeToUserDefaultWithKey(kNowTimeToUserDefaultKey_SpatialShootController)
outputVideoURL = URL.documentsDirectory.appendingPathComponent("output.MOV")
configureSession() //
setUI()

View File

@ -0,0 +1,31 @@
//
// SVDWStack.swift
// SwiftProject
//
// Created by aaa on 2024/5/23.
//
import Foundation
struct SVDWStack<Element: Equatable> {
private var storage: [Element] = []
mutating func push(_ element: Element) {
storage.append(element)
}
mutating func pop() -> Element? {
return storage.popLast()
}
func top() -> Element? {
return storage.last
}
func isEmpty() -> Bool {
return storage.isEmpty
}
mutating func clear() {
storage.removeAll()
}
}

View File

@ -0,0 +1,184 @@
//
// SpatialVideoWriter.swift
// tdvideo
//
// Created by mac on 2024/2/22.
//
import UIKit
import AVFoundation
import VideoToolbox
import Photos
class SpatialVideoDataWriter {
var assetWriter:AVAssetWriter!
var videoSettings: [String: Any]!
var input_video:AVAssetWriterInput!
let inputSettings_Audio = [
AVFormatIDKey: kAudioFormatLinearPCM, //
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVLinearPCMIsBigEndianKey:true,
AVLinearPCMIsFloatKey:true,
AVLinearPCMBitDepthKey:32,
AVLinearPCMIsNonInterleaved:false,
] as [String:Any]
var writerInput_Audio_left:AVAssetWriterInput!
var adaptor_inputVideo:AVAssetWriterInputTaggedPixelBufferGroupAdaptor!
var isReady:Bool {
get {
return assetWriter != nil
}
}
var haveStartedSession:Bool = false
var outputVideoURL:URL!
var leftSampleBuffer:SVDWStack<CMSampleBuffer> = SVDWStack()
var rightSampleBuffer:SVDWStack<CMSampleBuffer> = SVDWStack()
var audioSmapleBuffer:SVDWStack<CMSampleBuffer> = SVDWStack()
//MARK: - Function
func exAddLeftSampleBuffer(_ buffer:CMSampleBuffer) {
initWriterWithSmapleBuffer(buffer)
leftSampleBuffer.push(buffer)
autoWriteData()
}
func exAddRightSampleBuffer(_ buffer:CMSampleBuffer) {
initWriterWithSmapleBuffer(buffer)
rightSampleBuffer.push(buffer)
autoWriteData()
}
func exAddAudioSampleBuffer(_ buffer:CMSampleBuffer) {
audioSmapleBuffer.push(buffer)
autoWriteData()
}
//
func autoWriteData(){
if !leftSampleBuffer.isEmpty() && !rightSampleBuffer.isEmpty() && !audioSmapleBuffer.isEmpty() {
writeVideoData(leftSamplebuffer: leftSampleBuffer.pop()!, rightSamplebuffer: rightSampleBuffer.pop()!, audioSamplebuffer: audioSmapleBuffer.pop()!)
}
}
func initWriterWithSmapleBuffer(_ buffer:CMSampleBuffer){
if !self.isReady {
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
if let pixelBuffer {
let videoWidth = CVPixelBufferGetWidth(pixelBuffer)
let videoHeight = CVPixelBufferGetHeight(pixelBuffer)
writeVideoDataInit(videoWidth: KScreenHeight, videoHeight: KScreenWidth)
}
}
}
//,
func writeVideoDataInit(videoWidth:Double,videoHeight:Double) {
do {
//
assetWriter = try AVAssetWriter(outputURL: outputVideoURL, fileType: .mov)
videoSettings = [
AVVideoWidthKey: videoWidth,
AVVideoHeightKey: videoHeight,
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoCompressionPropertiesKey: [
kVTCompressionPropertyKey_MVHEVCVideoLayerIDs: [0, 1] as CFArray,
kCMFormatDescriptionExtension_HorizontalFieldOfView: 90_000, // asset-specific, in thousandths of a degree
kVTCompressionPropertyKey_HorizontalDisparityAdjustment: 200, // asset-specific
]
]
input_video = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
input_video.expectsMediaDataInRealTime = true
adaptor_inputVideo = AVAssetWriterInputTaggedPixelBufferGroupAdaptor(assetWriterInput: input_video)
assetWriter.add(input_video)
//
writerInput_Audio_left = AVAssetWriterInput.init(mediaType: .audio, outputSettings: inputSettings_Audio)
writerInput_Audio_left.expectsMediaDataInRealTime = true
if assetWriter.canAdd(writerInput_Audio_left) {
assetWriter.add(writerInput_Audio_left)
print("assetWriter 添加writerInput_Audio_left成功...")
}
else {
print("assetWriter 添加writerInput_Audio_left失败...")
}
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
}
catch {
print("writeVideoDataInit 初始化遇到问题:\(error)")
}
}
//,
func writeVideoDataDeInit(completion: @escaping (Bool, Error?) -> Void) {
writerInput_Audio_left.markAsFinished()
input_video.markAsFinished()
assetWriter.finishWriting {
print("assetWriter.finishWriting 可以保存")
completion(true, nil)
}
haveStartedSession = false
assetWriter = nil
input_video = nil
adaptor_inputVideo = nil
writerInput_Audio_left = nil
}
//,
func writeVideoData(leftSamplebuffer:CMSampleBuffer,rightSamplebuffer:CMSampleBuffer,audioSamplebuffer:CMSampleBuffer) {
//
guard let leftCVPixelBuffer = CMSampleBufferGetImageBuffer(leftSamplebuffer) ,
let rightCVPixelBuffer = CMSampleBufferGetImageBuffer(rightSamplebuffer) else {
print("获取左右眼像素缓冲区失败")
return
}
if ( !haveStartedSession ) {
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(leftSamplebuffer))
haveStartedSession = true
}
// CMTaggedBuffer
let left = CMTaggedBuffer(tags: [.stereoView(.leftEye), .videoLayerID(0)], pixelBuffer: leftCVPixelBuffer)
let right = CMTaggedBuffer(tags: [.stereoView(.rightEye), .videoLayerID(1)], pixelBuffer: rightCVPixelBuffer)
while !adaptor_inputVideo.assetWriterInput.isReadyForMoreMediaData {
// writerInput
Thread.sleep(forTimeInterval: 0.1) //
}
let appendResult = adaptor_inputVideo.appendTaggedBuffers([left, right], withPresentationTime: leftSamplebuffer.presentationTimeStamp)
print("appendVideoImage samplebuffer Frame Result :\(appendResult)")
//
if writerInput_Audio_left.isReadyForMoreMediaData {
if writerInput_Audio_left.append(audioSamplebuffer) == false {
print("追加音频失败.....")
}
else{
print("audio 追加成功....")
}
}
else {
print("audio 追加还未准备好...")
}
}
}