完成:

1.视频的四种格式转换:FSBS、HSBS、HOU、FOU;

注:发现一个问题,根据之前那个ios哥们留下来的算法,转出来的视频的第一帧感觉有问题
This commit is contained in:
bluesea 2024-03-08 11:40:55 +08:00
parent 38cde26cd9
commit df1b9de863
6 changed files with 335 additions and 57 deletions

View File

@ -11,6 +11,8 @@
00B946232B67B26D00DA668F /* ddd_video.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 00B946222B67B26D00DA668F /* ddd_video.mp4 */; };
00B946252B67B7DE00DA668F /* CCSpatialPlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00B946242B67B7DE00DA668F /* CCSpatialPlayView.swift */; };
00D33BF42B998BF700604A44 /* SpatialImageConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */; };
00D33BF62B99A19900604A44 /* SpatialVideoConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */; };
00D33BFA2B9AB21A00604A44 /* ZZHAVExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */; };
04E1D3F12B68EDFE00743F2F /* CCWebController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04E1D3F02B68EDFE00743F2F /* CCWebController.swift */; };
1E02C9322B8990C600DD3143 /* CCDeviceOperationListView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */; };
1E02C9342B89916C00DD3143 /* CCDeviceOperationListCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */; };
@ -93,6 +95,8 @@
00B946222B67B26D00DA668F /* ddd_video.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = ddd_video.mp4; sourceTree = "<group>"; };
00B946242B67B7DE00DA668F /* CCSpatialPlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCSpatialPlayView.swift; sourceTree = "<group>"; };
00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialImageConvertor.swift; sourceTree = "<group>"; };
00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialVideoConvertor.swift; sourceTree = "<group>"; };
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHAVExtension.swift; sourceTree = "<group>"; };
04E1D3F02B68EDFE00743F2F /* CCWebController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCWebController.swift; sourceTree = "<group>"; };
1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListView.swift; sourceTree = "<group>"; };
1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListCell.swift; sourceTree = "<group>"; };
@ -223,7 +227,9 @@
children = (
1E1EA2952B936C9600A5D5D2 /* VideoConvertor2.swift */,
00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */,
00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */,
1E1EA28F2B933C8200A5D5D2 /* VideoWriter.swift */,
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */,
1EE5C5F92B8F97BF00EDFC2F /* SpatialVideoWriter.swift */,
);
path = Util;
@ -629,6 +635,7 @@
AF2120C32B4E95DA00400B7F /* NSObject+Add.swift in Sources */,
1EE5C5F72B8F973A00EDFC2F /* CCSpatialShootController.swift in Sources */,
1EFB8C782B88E2F600C72119 /* UIColor+CCExtension.swift in Sources */,
00D33BF62B99A19900604A44 /* SpatialVideoConvertor.swift in Sources */,
AF2120EE2B4EA34E00400B7F /* BaseTableViewPlainController.swift in Sources */,
1E1EA28C2B93272700A5D5D2 /* CCSpatialDisplayTypeView.swift in Sources */,
AF2120C62B4E95DA00400B7F /* UIView+Add.swift in Sources */,
@ -641,6 +648,7 @@
1E1EA2902B933C8200A5D5D2 /* VideoWriter.swift in Sources */,
00B946212B67AC9D00DA668F /* CCSpatialPlayController.swift in Sources */,
AF2120E62B4E9DE000400B7F /* CCTableSwitchView.swift in Sources */,
00D33BFA2B9AB21A00604A44 /* ZZHAVExtension.swift in Sources */,
1EFAF0B82B8AF1B8002A1773 /* CCSpaceAlbumPopView.swift in Sources */,
AF2120E02B4E9C8000400B7F /* Timer+Add.swift in Sources */,
AF2120DE2B4E9C3500400B7F /* AppDelegate+Add.swift in Sources */,

View File

@ -229,15 +229,19 @@ class VRPhotoTransformController: BaseController {
case .hsbs_3D:
let result:UIImage = SpatialImageConvertor.convertToHSBS(imageData: sourceImageData!)!
UIImageWriteToSavedPhotosAlbum(result, self, #selector(completeSaveImg(_:error:contextInfo:)), nil)
break
case .fsbs_3D:
let result:UIImage = SpatialImageConvertor.convertToFSBS(imageData: sourceImageData!)!
UIImageWriteToSavedPhotosAlbum(result, self, #selector(completeSaveImg(_:error:contextInfo:)), nil)
break
case .hou_3d:
let result:UIImage = SpatialImageConvertor.convertToHOU(imageData: sourceImageData!)!
UIImageWriteToSavedPhotosAlbum(result, self, #selector(completeSaveImg(_:error:contextInfo:)), nil)
break
case .fou_3D:
let result:UIImage = SpatialImageConvertor.convertToFOU(imageData: sourceImageData!)!
UIImageWriteToSavedPhotosAlbum(result, self, #selector(completeSaveImg(_:error:contextInfo:)), nil)
break
default:
print("不执行...")

View File

@ -69,7 +69,7 @@ class VRVideoTransformController: BaseController {
//3D
var selected3DFormat:Video3DFormat = .HSBS
let spatialVideoConver = SpatialVideoConvertor()
/*
HEVC(H.265MPEG-H Part 2) High Efficiency Video CodingH.265MPEG-H Part 2
@ -404,8 +404,7 @@ class VRVideoTransformController: BaseController {
switch model.type {
case .format3D:
print("3D 格式")
case .hsbs_3D ,.fsbs_3D,.hou_3d,.fou_3D:
print("3D 格式")
// print("3D ")
/*
3D HSBS
3D FSBS
@ -416,7 +415,7 @@ class VRVideoTransformController: BaseController {
self.selected3DFormat = .HSBS
self.according3DFormatShowList(format: .HSBS)
}
let action2 = UIAlertAction(title: "3D FSBS左右格式)" , style: .default) { (action:UIAlertAction) in
let action2 = UIAlertAction(title: "3D FSBS左右格式)" , style: .default) { (action:UIAlertAction) in
self.selected3DFormat = .FSBS
self.according3DFormatShowList(format: .FSBS)
}
@ -546,6 +545,8 @@ class VRVideoTransformController: BaseController {
// default:
// print("")
// break
default:
break
}
}
@ -560,7 +561,7 @@ extension VRVideoTransformController {
let videoURL = sourceVideoURL
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let outputURL = documentsDirectory.appendingPathComponent("output_path.mp4")
let outputURL = documentsDirectory.appendingPathComponent("output_path.mov")
do {
try FileManager.default.removeItem(atPath: outputURL.path)
@ -584,18 +585,38 @@ extension VRVideoTransformController {
let horizontalDisparity: Float = 0.0
let horizontalFieldOfView: Float = 90.0
exportVideo(url: videoURL!, outputURL: outputURL, width: width, height: height,codecType:codecType, dataRate: dataRate, horizontalDisparity: horizontalDisparity, horizontalFieldOfView: horizontalFieldOfView) { exportedAsset in
// AVAsset
//
DispatchQueue.main.async {
let exportedPlayerItem = AVPlayerItem(asset: exportedAsset!)
self.mAvPlayer!.player!.replaceCurrentItem(with: exportedPlayerItem)
self.mAvPlayer!.player!.play()
let outputVideoURL = URL.documentsDirectory.appending(path:"convertor_one.mov")
let videoOriginalAsset:AVAsset? = AVAsset(url: sourceVideoURL!)
Task {
try await spatialVideoConver.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL ,type: self.selected3DFormat) { [self] progress in
print(progress)
// DispatchQueue.main.async { [weak self] in
if(progress > 0.99){
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
//
self.exportVideo(url: outputVideoURL, outputURL: outputURL, width: width, height: height,codecType:codecType, dataRate: dataRate, horizontalDisparity: horizontalDisparity, horizontalFieldOfView: horizontalFieldOfView) { exportedAsset in
DispatchQueue.main.async {
PHPhotoLibrary.shared().performChanges {
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
} completionHandler: { isSuccess, error in
if isSuccess {
SVProgressHUD.showSuccess(withStatus: "成功导出视频")
} else {
SVProgressHUD.showSuccess(withStatus: "导出视频失败")
}
}
}
}
}
}
// }
}
}
}
private func exportVideo(url: URL, outputURL: URL, width: Int, height: Int,codecType:AVVideoCodecType, dataRate: Int, horizontalDisparity: Float, horizontalFieldOfView: Float, completion: @escaping (AVAsset?) -> Void) {
let asset = AVAsset(url: url)

View File

@ -0,0 +1,234 @@
//
// SpatialVideoConvertor.swift
// SwiftProject
//
// Created by aaa on 2024/3/7.
//
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
class SpatialVideoConvertor {
///
var leftEyeImage: CVPixelBuffer?
///
var rightEyeImage: CVPixelBuffer?
func convertVideo( asset : AVAsset, outputFile: URL,type:Video3DFormat, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
let assetReader = try AVAssetReader(asset: asset)
// print("")
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
var vw:VideoWriter?
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
leftEyeImage = leftEyePixelBuffer
rightEyeImage = rightEyePixelBuffer
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
let left = UIImage(ciImage: lciImage )
let right = UIImage(ciImage: rciImage )
var newpb:CIImage?
var cwidth:CGFloat
var cheight:CGFloat
switch type {
case .HSBS:
cwidth = left.size.width
cheight = left.size.height
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight:cheight )
break
case .FSBS:
cwidth = left.size.width*2
cheight = left.size.height
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight: cheight)
break
case .HOU:
cwidth = left.size.width
cheight = left.size.height
newpb = joinImages_ou(left: left, right: right, imgWidth: cwidth, imgHeight: cheight)
break
case .FOU:
cwidth = left.size.width
cheight = left.size.height*2
newpb = joinImages_ou(left: left, right: right, imgWidth: cwidth, imgHeight: cheight)
break
}
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
if vw == nil {
vw = VideoWriter(url: outputFile, width: Int(cwidth), height: Int(cheight), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
_ = vw!.add(image: newpb!, presentationTime: time)
print( "Added frame at \(time)")
// callback with progress
progress?( Float(time.value)/Float(duration.value))
// This sleep is needed to stop memory blooming - keeps around 280Mb rather than spiraling up to 8+Gig!
try await Task.sleep(nanoseconds: 3_000_000)
}
}
print( "status - \(assetReader.status)")
print( "status - \(assetReader.error?.localizedDescription ?? "None")")
print( "Finished")
_ = try await vw!.finish()
}
//ciimage
func isSpatialImage2(from ciImage: CIImage) {
let context = CIContext()
guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
return
}
let dataProvider = CGDataProvider(data: cgImage.dataProvider!.data! as CFData)
let imageSource = CGImageSourceCreateWithDataProvider(dataProvider!, nil)
let frameCount = CGImageSourceGetCount(imageSource!)
print(frameCount)
for index in 0..<frameCount {
let properties = CGImageSourceCopyPropertiesAtIndex(imageSource!, index, nil) as? [CFString: Any]
print(properties as Any)
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource!, index, nil) else {
continue
}
print(frameImage)
}
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
// SBS
func joinImages_sbs( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imgWidth/2, height:imgHeight))
right.draw(in: CGRect(x:imgWidth/2, y:0, width:imgWidth/2, height:imgHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
// DispatchQueue.main.async {
// var iv:UIImageView? = KWindow?.viewWithTag(9988) as? UIImageView
// if let imgv = iv {
// imgv.image = image
// }
// else {
// iv = UIImageView(frame: CGRect(origin: CGPoint(x: 0, y: 400), size: CGSize(width: KScreenWidth, height: image.size.height*KScreenWidth/image.size.width)))
// iv!.tag = 9988
// KWindow?.addSubview(iv!)
// }
// }
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
// OU
func joinImages_ou( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imgWidth, height:imgHeight/2))
right.draw(in: CGRect(x:0, y:imgHeight/2, width:imgWidth, height:imgHeight/2))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
}

View File

@ -115,47 +115,3 @@ class VideoWriter {
return asset
}
}
extension AVAssetWriterInputPixelBufferAdaptor {
func appendPixelBufferForImage(_ image: CIImage, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
guard let pixelBufferPool = self.pixelBufferPool else {
print("appendPixelBufferForImage: ERROR - missing pixelBufferPool") // writer can have error: writer.error=\(String(describing: self.writer.error))
return
}
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee, status == 0 {
pixelBuffer.fillPixelBufferFromImage(image)
appendSucceeded = self.append(pixelBuffer, withPresentationTime: presentationTime)
if !appendSucceeded {
// If a result of NO is returned, clients can check the value of AVAssetWriter.status to determine whether the writing operation completed, failed, or was cancelled. If the status is AVAssetWriterStatusFailed, AVAsset.error will contain an instance of NSError that describes the failure.
print("VideoWriter appendPixelBufferForImage: ERROR appending")
}
pixelBufferPointer.deinitialize(count: 1)
} else {
print("VideoWriter appendPixelBufferForImage: ERROR - Failed to allocate pixel buffer from pool, status=\(status)") // -6680 = kCVReturnInvalidPixelFormat
}
pixelBufferPointer.deallocate()
}
return appendSucceeded
}
}
extension CVPixelBuffer {
func fillPixelBufferFromImage(_ image: CIImage) {
CVPixelBufferLockBaseAddress(self, [])
VideoWriter.ciContext.render(image, to: self)
CVPixelBufferUnlockBaseAddress(self, [])
}
}

View File

@ -0,0 +1,55 @@
//
// ZZHAVExtension.swift
// SwiftProject
//
// Created by aaa on 2024/3/8.
//
import Foundation
import AVFoundation
import CoreImage
extension AVAssetWriterInputPixelBufferAdaptor {
func appendPixelBufferForImage(_ image: CIImage, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
guard let pixelBufferPool = self.pixelBufferPool else {
print("appendPixelBufferForImage: ERROR - missing pixelBufferPool") // writer can have error: writer.error=\(String(describing: self.writer.error))
return
}
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee, status == 0 {
pixelBuffer.fillPixelBufferFromImage(image)
appendSucceeded = self.append(pixelBuffer, withPresentationTime: presentationTime)
if !appendSucceeded {
// If a result of NO is returned, clients can check the value of AVAssetWriter.status to determine whether the writing operation completed, failed, or was cancelled. If the status is AVAssetWriterStatusFailed, AVAsset.error will contain an instance of NSError that describes the failure.
print("VideoWriter appendPixelBufferForImage: ERROR appending")
}
pixelBufferPointer.deinitialize(count: 1)
} else {
print("VideoWriter appendPixelBufferForImage: ERROR - Failed to allocate pixel buffer from pool, status=\(status)") // -6680 = kCVReturnInvalidPixelFormat
}
pixelBufferPointer.deallocate()
}
return appendSucceeded
}
}
extension CVPixelBuffer {
func fillPixelBufferFromImage(_ image: CIImage) {
CVPixelBufferLockBaseAddress(self, [])
VideoWriter.ciContext.render(image, to: self)
// VideoWriter.ciContext.render(image, to: self, bounds: CGRectMake(0, 0, 2200, 1600), colorSpace: nil)
CVPixelBufferUnlockBaseAddress(self, [])
}
}