diff --git a/SwiftProject/SwiftProject.xcodeproj/project.pbxproj b/SwiftProject/SwiftProject.xcodeproj/project.pbxproj index 1e3323a..a00b875 100644 --- a/SwiftProject/SwiftProject.xcodeproj/project.pbxproj +++ b/SwiftProject/SwiftProject.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 003624662BA3F0AB0080D014 /* ZZHImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */; }; 005580782B9F1525004B9567 /* ZZHHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 005580772B9F1525004B9567 /* ZZHHelper.swift */; }; 00B946212B67AC9D00DA668F /* CCSpatialPlayController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00B946202B67AC9C00DA668F /* CCSpatialPlayController.swift */; }; 00B946232B67B26D00DA668F /* ddd_video.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 00B946222B67B26D00DA668F /* ddd_video.mp4 */; }; @@ -93,6 +94,7 @@ /* End PBXBuildFile section */ /* Begin PBXFileReference section */ + 003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHImageExtension.swift; sourceTree = ""; }; 005580772B9F1525004B9567 /* ZZHHelper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHHelper.swift; sourceTree = ""; }; 00B946202B67AC9C00DA668F /* CCSpatialPlayController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCSpatialPlayController.swift; sourceTree = ""; }; 00B946222B67B26D00DA668F /* ddd_video.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = ddd_video.mp4; sourceTree = ""; }; @@ -233,6 +235,7 @@ children = ( 1E1EA2952B936C9600A5D5D2 /* VideoConvertor2.swift */, 00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */, + 003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */, 00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */, 1E1EA28F2B933C8200A5D5D2 /* VideoWriter.swift */, 00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */, @@ -662,6 +665,7 @@ AF2120E02B4E9C8000400B7F /* Timer+Add.swift in Sources */, AF2120DE2B4E9C3500400B7F /* AppDelegate+Add.swift in Sources */, 1EFB8C6E2B88D9D800C72119 /* CCBottomMenuPopView.swift in Sources */, + 003624662BA3F0AB0080D014 /* ZZHImageExtension.swift in Sources */, 1E1EA2942B9364F000A5D5D2 /* CCSpatialVideoDisplayController.swift in Sources */, AFD9F5952B58D029008716DE /* MetalPlayer.swift in Sources */, AF2121002B4EA5FE00400B7F /* CCRegisterController.swift in Sources */, diff --git a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift index 260c781..240805e 100644 --- a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift +++ b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift @@ -164,7 +164,8 @@ class CCSpatialPhotoDisplayController: BaseController { imgData = originalData //展示 - let image = UIImage(data: originalData!) + var image = UIImage(data: originalData!) + image = image?.getUpImg() mImgView.image = image let isSpatial = isSpatialImage(originalData: originalData!) @@ -260,13 +261,13 @@ class CCSpatialPhotoDisplayController: BaseController { if(selectedIndex == 0){ //空间照片 --- 单眼2D(展示原照片或者广角或者主摄其中一个) -// let image = UIImage(contentsOfFile: photoOriginalURL!.path) - let image = UIImage(data: photoOriginalData!) + var image = UIImage(data: photoOriginalData!) + image = image?.getUpImg() mImgView.image = image }else if(selectedIndex == 1){ //平行眼 - // mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130) + let newpb = joinImages( leftImage: lciImage, rightImage:rciImage ) let lastImg = convertCIImageToUIImage(ciImage: newpb)! DispatchQueue.main.async { [weak self] in @@ -325,13 +326,15 @@ class CCSpatialPhotoDisplayController: BaseController { let lastImg = compositeFilter.outputImage! DispatchQueue.main.async { [weak self] in - self!.mImgView.image = UIImage(ciImage: lastImg) + var ri = UIImage(ciImage: lastImg) + ri = ri.getUpImg() + self!.mImgView.image = ri } } }else if(selectedIndex == 3){ //交叉眼 - // mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130) + let newpb = joinImages( leftImage:rciImage , rightImage:lciImage ) let lastImg = convertCIImageToUIImage(ciImage: newpb)! DispatchQueue.main.async { [weak self] in @@ -359,37 +362,6 @@ class CCSpatialPhotoDisplayController: BaseController { return false } print(properties) - /* - [ProfileName: sRGB IEC61966-2.1, {TIFF}: { - Orientation = 1; - TileLength = 512; - TileWidth = 512; - }, PixelWidth: 4032, PixelHeight: 3024, {HEIF}: { - CameraExtrinsics = { - CoordinateSystemID = 0; - Position = ( - "-0.019238", - 0, - 0 - ); - Rotation = ( - 1, - 0, - 0, - 0, - 1, - 0, - 0, - 0, - 1 - ); - }; - }, Depth: 8, Orientation: 1, ColorModel: RGB] - - - 判断是否包含:{HEIF} 代表空间图片 - - */ //这里判断两张图片,gif也可能是两张 // let frameCount = CGImageSourceGetCount(imageSource) @@ -462,94 +434,6 @@ class CCSpatialPhotoDisplayController: BaseController { } - func convertVideo( inputFile : URL, outputFile: URL, progress: ((Float)->())? = nil ) async throws { - do { - try FileManager.default.removeItem(atPath: outputFile.path) - print("视频文件删除成功") - } catch { - print("删除视频文件出错:\(error)") - } - - // Load the AVAsset - let asset = AVAsset(url: inputFile) - let assetReader = try AVAssetReader(asset: asset) - - - //检查是否为空间视频 - let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata) - let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" } - if spacialCharacteristics.count == 0 { - print("该视频不是空间视频") - } - - //获取输入视频的方向和大小(用于设置输出方向) - let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset) - - //输出宽度为宽度的一半 - //我们有两个并排的视频,我们保持长宽比 - let vw:VideoWriter? - if(type == 3){ - vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main) - } - else{ - vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main) - } - - //加载音轨 - let output = try await AVAssetReaderTrackOutput( - track: asset.loadTracks(withMediaType: .video).first!, - outputSettings: [ - AVVideoDecompressionPropertiesKey: [ - kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray, - ], - ] - ) - assetReader.add(output) - assetReader.startReading() - let duration = try await asset.load(.duration) - - if let playerItem = player.currentItem { - playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { request in - - print(request.sourceImage) - } - } - - while let nextSampleBuffer = output.copyNextSampleBuffer() { - guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return } - - let leftEyeBuffer = taggedBuffers.first(where: { - $0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye) - })?.buffer - let rightEyeBuffer = taggedBuffers.first(where: { - $0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye) - })?.buffer - - if let leftEyeBuffer, - let rightEyeBuffer, - case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer, - case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer { - - let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer) - let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer) - //交叉眼 - let newpb = joinImages( leftImage: lciImage, rightImage:rciImage ) - let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer) - _ = vw!.add(image: newpb, presentationTime: time) - // print( "Added frame at \(time)") - progress?( Float(time.value)/Float(duration.value)) - - - // try await Task.sleep(nanoseconds: 3_000_000) - } - } - - _ = try await vw!.finish() - print( "Finished") - - - } - func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) { guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first @@ -572,8 +456,10 @@ class CCSpatialPhotoDisplayController: BaseController { //将两张图片合成一张图片 func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage { - let left = UIImage(ciImage: leftImage ) - let right = UIImage(ciImage: rightImage ) + var left = UIImage(ciImage: leftImage ) + left = left.getUpImg() + var right = UIImage(ciImage: rightImage ) + right = right.getUpImg() let imageWidth = left.size.width/2 + right.size.width/2 let imageHeight = left.size.height/2 @@ -611,14 +497,6 @@ class CCSpatialPhotoDisplayController: BaseController { } - /* - // MARK: - Navigation - - // In a storyboard-based application, you will often want to do a little preparation before navigation - override func prepare(for segue: UIStoryboardSegue, sender: Any?) { - // Get the new view controller using segue.destination. - // Pass the selected object to the new view controller. - } - */ + } diff --git a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController.swift b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController.swift index 4b50798..995f667 100644 --- a/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController.swift +++ b/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController.swift @@ -30,8 +30,8 @@ class CCSpatialShootController: BaseController { var isRecording = false - - + var imageCGImagePropertyOrientation:CGImagePropertyOrientation = CGImagePropertyOrientation.left//由于cgimage无法保留图像的方向 + var imageOrientation:UIImage.Orientation = UIImage.Orientation.up var leftEyeVideoURL:URL? var rightEyeVideoURL:URL? @@ -836,15 +836,23 @@ class CCSpatialShootController: BaseController { print("未知") case .portrait: print("竖屏") + imageCGImagePropertyOrientation = .up + imageOrientation = .up showHorizontalScreenTips() case .portraitUpsideDown: print("颠倒竖屏") + imageCGImagePropertyOrientation = .down + imageOrientation = .down showHorizontalScreenTips() case .landscapeLeft: print("设备向左旋转横屏") + imageCGImagePropertyOrientation = .left + imageOrientation = .left hidenHorizontalScreenTips() case .landscapeRight: print("设备向右旋转横屏") + imageCGImagePropertyOrientation = .right + imageOrientation = .right hidenHorizontalScreenTips() case .faceUp: print("屏幕朝上") @@ -1016,19 +1024,42 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { } } + func convertOrigation(orientation:UIImage.Orientation) -> CGImagePropertyOrientation{ + switch orientation { + case UIImage.Orientation.left: + return CGImagePropertyOrientation.left + case UIImage.Orientation.right: + return CGImagePropertyOrientation.right + case UIImage.Orientation.up: + return CGImagePropertyOrientation.up + case UIImage.Orientation.down: + return CGImagePropertyOrientation.down + case UIImage.Orientation.leftMirrored: + return CGImagePropertyOrientation.leftMirrored + case UIImage.Orientation.rightMirrored: + return CGImagePropertyOrientation.rightMirrored + case UIImage.Orientation.upMirrored: + return CGImagePropertyOrientation.upMirrored + case UIImage.Orientation.downMirrored: + return CGImagePropertyOrientation.downMirrored + @unknown default: + return CGImagePropertyOrientation.up + } + } + //合成空间图片 func compositeSpatialPhoto(){ let img1:UIImage = imgs[0] as! UIImage let img2:UIImage = imgs[1] as! UIImage - let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height) - let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height) + +// let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height) +// let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height) let url = URL.documentsDirectory.appending(path:"aaa12.HEIC") - let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)! - + imageCGImagePropertyOrientation = .left let properties1 = [ kCGImagePropertyGroups: [ kCGImagePropertyGroupIndex: 0, @@ -1036,6 +1067,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { kCGImagePropertyGroupImageIndexLeft: 0, kCGImagePropertyGroupImageIndexRight: 1, ], + kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation, kCGImagePropertyHEIFDictionary: [ kIIOMetadata_CameraExtrinsicsKey: [ kIIOCameraExtrinsics_CoordinateSystemID: 0, @@ -1051,7 +1083,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { ] ] ] - ] + ] as [CFString : Any] let properties2 = [ kCGImagePropertyGroups: [ @@ -1060,6 +1092,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { kCGImagePropertyGroupImageIndexLeft: 0, kCGImagePropertyGroupImageIndexRight: 1, ], + kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation, kCGImagePropertyHEIFDictionary: [ kIIOMetadata_CameraExtrinsicsKey: [ kIIOCameraExtrinsics_CoordinateSystemID: 0, @@ -1075,21 +1108,32 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { ] ] ] - ] + ] as [CFString : Any] - let leftImg = fixOrientation(img1) - let rightImg = fixOrientation(img2) + let leftImg = img1//fixOrientation(img1) + let rightImg = img2//fixOrientation(img2) + +// CGImageDestinationSetProperties(destination, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary) +// CGImageDestinationSetProperties(destination,properties1 as CFDictionary) +// CGImageDestinationSetProperties(destination,properties2 as CFDictionary) CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary) CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary) - CGImageDestinationFinalize(destination) +// CGImageDestinationAddImage(destination, leftImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary) +//// CGImageDestinationAddImage(destination, rightImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary) + + let rr = CGImageDestinationFinalize(destination) + if rr == false { + print("ee..") + } + + - let image = UIImage(contentsOfFile: url.path()) let source = CGImageSourceCreateWithURL(url as CFURL, nil)! guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else { return } - print(properties22) + print("ssss:\(properties22)") savePhoto(url) @@ -1097,6 +1141,11 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate { //修正图片的方向 func fixOrientation(_ image: UIImage) -> UIImage { + return image + +// return UIImage(cgImage: image.cgImage!, scale: image.scale, orientation: imageOrientation) + + // No-op if the orientation is already correct guard image.imageOrientation != .up else { return image } diff --git a/SwiftProject/SwiftProject/Project/Util/SpatialImageConvertor.swift b/SwiftProject/SwiftProject/Project/Util/SpatialImageConvertor.swift index 297a958..ebef89d 100644 --- a/SwiftProject/SwiftProject/Project/Util/SpatialImageConvertor.swift +++ b/SwiftProject/SwiftProject/Project/Util/SpatialImageConvertor.swift @@ -60,8 +60,19 @@ class SpatialImageConvertor { let rciImage = CIImage(cgImage: frames[1]) let left_uiimage = UIImage(ciImage: lciImage ) + + let left_uiimage_new = left_uiimage.getUpImg() + let right_uiimage = UIImage(ciImage: rciImage ) - return (left_uiimage,right_uiimage) + let right_uiimage_new = right_uiimage.getUpImg() + return (left_uiimage_new,right_uiimage_new) + } + + + class func convertCIImageToCGImage(ciImage:CIImage) -> CGImage { + let ciContext = CIContext.init() + let cgImage:CGImage = ciContext.createCGImage(ciImage, from: ciImage.extent)! + return cgImage } class func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? { diff --git a/SwiftProject/SwiftProject/Project/Util/ZZHImageExtension.swift b/SwiftProject/SwiftProject/Project/Util/ZZHImageExtension.swift new file mode 100644 index 0000000..1631b71 --- /dev/null +++ b/SwiftProject/SwiftProject/Project/Util/ZZHImageExtension.swift @@ -0,0 +1,52 @@ +// +// ZZHImageExtension.swift +// SwiftProject +// +// Created by aaa on 2024/3/15. +// + +import Foundation +import UIKit + +extension UIImage { + func rotate(radians: CGFloat) -> UIImage { + let rotatedSize = CGRect(origin: .zero, size: size) + .applying(CGAffineTransform(rotationAngle: CGFloat(radians))) + .integral.size + UIGraphicsBeginImageContext(rotatedSize) + if let context = UIGraphicsGetCurrentContext() { + let origin = CGPoint(x: rotatedSize.width / 2.0, + y: rotatedSize.height / 2.0) + context.translateBy(x: origin.x, y: origin.y) + context.rotate(by: radians) + draw(in: CGRect(x: -origin.y, y: -origin.x, + width: size.width, height: size.height)) + let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() + UIGraphicsEndImageContext() + + return rotatedImage ?? self + } + + return self + } + + //返回正向图片 + func getUpImg()->UIImage { + let dg = getRotateFrom() + let rotatedImage = rotate(radians:dg) + return rotatedImage + } + + func getRotateFrom() ->CGFloat { + print("..or:\(self.imageOrientation)") + if self.imageOrientation == .left { + return .pi/2 + } else if self.imageOrientation == .right{ + return -.pi/2 + } + else { + return -.pi/2 + } + } +} +