解决图片旋转问题,但是目前设定图片的方向写入图片本身还未解决

This commit is contained in:
bluesea 2024-03-15 14:40:02 +08:00
parent 998e20260c
commit 40c3a4747e
5 changed files with 144 additions and 150 deletions

View File

@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
003624662BA3F0AB0080D014 /* ZZHImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */; };
005580782B9F1525004B9567 /* ZZHHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 005580772B9F1525004B9567 /* ZZHHelper.swift */; };
00B946212B67AC9D00DA668F /* CCSpatialPlayController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00B946202B67AC9C00DA668F /* CCSpatialPlayController.swift */; };
00B946232B67B26D00DA668F /* ddd_video.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 00B946222B67B26D00DA668F /* ddd_video.mp4 */; };
@ -93,6 +94,7 @@
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHImageExtension.swift; sourceTree = "<group>"; };
005580772B9F1525004B9567 /* ZZHHelper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHHelper.swift; sourceTree = "<group>"; };
00B946202B67AC9C00DA668F /* CCSpatialPlayController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCSpatialPlayController.swift; sourceTree = "<group>"; };
00B946222B67B26D00DA668F /* ddd_video.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = ddd_video.mp4; sourceTree = "<group>"; };
@ -233,6 +235,7 @@
children = (
1E1EA2952B936C9600A5D5D2 /* VideoConvertor2.swift */,
00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */,
003624652BA3F0AB0080D014 /* ZZHImageExtension.swift */,
00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */,
1E1EA28F2B933C8200A5D5D2 /* VideoWriter.swift */,
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */,
@ -662,6 +665,7 @@
AF2120E02B4E9C8000400B7F /* Timer+Add.swift in Sources */,
AF2120DE2B4E9C3500400B7F /* AppDelegate+Add.swift in Sources */,
1EFB8C6E2B88D9D800C72119 /* CCBottomMenuPopView.swift in Sources */,
003624662BA3F0AB0080D014 /* ZZHImageExtension.swift in Sources */,
1E1EA2942B9364F000A5D5D2 /* CCSpatialVideoDisplayController.swift in Sources */,
AFD9F5952B58D029008716DE /* MetalPlayer.swift in Sources */,
AF2121002B4EA5FE00400B7F /* CCRegisterController.swift in Sources */,

View File

@ -164,7 +164,8 @@ class CCSpatialPhotoDisplayController: BaseController {
imgData = originalData
//
let image = UIImage(data: originalData!)
var image = UIImage(data: originalData!)
image = image?.getUpImg()
mImgView.image = image
let isSpatial = isSpatialImage(originalData: originalData!)
@ -260,13 +261,13 @@ class CCSpatialPhotoDisplayController: BaseController {
if(selectedIndex == 0){
// --- 2D(广)
// let image = UIImage(contentsOfFile: photoOriginalURL!.path)
let image = UIImage(data: photoOriginalData!)
var image = UIImage(data: photoOriginalData!)
image = image?.getUpImg()
mImgView.image = image
}else if(selectedIndex == 1){
//
// mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
@ -325,13 +326,15 @@ class CCSpatialPhotoDisplayController: BaseController {
let lastImg = compositeFilter.outputImage!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = UIImage(ciImage: lastImg)
var ri = UIImage(ciImage: lastImg)
ri = ri.getUpImg()
self!.mImgView.image = ri
}
}
}else if(selectedIndex == 3){
//
// mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage:rciImage , rightImage:lciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
@ -359,37 +362,6 @@ class CCSpatialPhotoDisplayController: BaseController {
return false
}
print(properties)
/*
[ProfileName: sRGB IEC61966-2.1, {TIFF}: {
Orientation = 1;
TileLength = 512;
TileWidth = 512;
}, PixelWidth: 4032, PixelHeight: 3024, {HEIF}: {
CameraExtrinsics = {
CoordinateSystemID = 0;
Position = (
"-0.019238",
0,
0
);
Rotation = (
1,
0,
0,
0,
1,
0,
0,
0,
1
);
};
}, Depth: 8, Orientation: 1, ColorModel: RGB]
{HEIF}
*/
//gif
// let frameCount = CGImageSourceGetCount(imageSource)
@ -462,94 +434,6 @@ class CCSpatialPhotoDisplayController: BaseController {
}
func convertVideo( inputFile : URL, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
// Load the AVAsset
let asset = AVAsset(url: inputFile)
let assetReader = try AVAssetReader(asset: asset)
//
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("该视频不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
let vw:VideoWriter?
if(type == 3){
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
else{
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
if let playerItem = player.currentItem {
playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { request in
print(request.sourceImage)
}
}
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
//
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
// print( "Added frame at \(time)")
progress?( Float(time.value)/Float(duration.value))
// try await Task.sleep(nanoseconds: 3_000_000)
}
}
_ = try await vw!.finish()
print( "Finished")
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
@ -572,8 +456,10 @@ class CCSpatialPhotoDisplayController: BaseController {
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
var left = UIImage(ciImage: leftImage )
left = left.getUpImg()
var right = UIImage(ciImage: rightImage )
right = right.getUpImg()
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
@ -611,14 +497,6 @@ class CCSpatialPhotoDisplayController: BaseController {
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}

View File

@ -30,8 +30,8 @@ class CCSpatialShootController: BaseController {
var isRecording = false
var imageCGImagePropertyOrientation:CGImagePropertyOrientation = CGImagePropertyOrientation.left//cgimage
var imageOrientation:UIImage.Orientation = UIImage.Orientation.up
var leftEyeVideoURL:URL?
var rightEyeVideoURL:URL?
@ -836,15 +836,23 @@ class CCSpatialShootController: BaseController {
print("未知")
case .portrait:
print("竖屏")
imageCGImagePropertyOrientation = .up
imageOrientation = .up
showHorizontalScreenTips()
case .portraitUpsideDown:
print("颠倒竖屏")
imageCGImagePropertyOrientation = .down
imageOrientation = .down
showHorizontalScreenTips()
case .landscapeLeft:
print("设备向左旋转横屏")
imageCGImagePropertyOrientation = .left
imageOrientation = .left
hidenHorizontalScreenTips()
case .landscapeRight:
print("设备向右旋转横屏")
imageCGImagePropertyOrientation = .right
imageOrientation = .right
hidenHorizontalScreenTips()
case .faceUp:
print("屏幕朝上")
@ -1016,19 +1024,42 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
}
}
func convertOrigation(orientation:UIImage.Orientation) -> CGImagePropertyOrientation{
switch orientation {
case UIImage.Orientation.left:
return CGImagePropertyOrientation.left
case UIImage.Orientation.right:
return CGImagePropertyOrientation.right
case UIImage.Orientation.up:
return CGImagePropertyOrientation.up
case UIImage.Orientation.down:
return CGImagePropertyOrientation.down
case UIImage.Orientation.leftMirrored:
return CGImagePropertyOrientation.leftMirrored
case UIImage.Orientation.rightMirrored:
return CGImagePropertyOrientation.rightMirrored
case UIImage.Orientation.upMirrored:
return CGImagePropertyOrientation.upMirrored
case UIImage.Orientation.downMirrored:
return CGImagePropertyOrientation.downMirrored
@unknown default:
return CGImagePropertyOrientation.up
}
}
//
func compositeSpatialPhoto(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height)
let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height)
// let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height)
// let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height)
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
imageCGImagePropertyOrientation = .left
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
@ -1036,6 +1067,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
@ -1051,7 +1083,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
]
]
]
]
] as [CFString : Any]
let properties2 = [
kCGImagePropertyGroups: [
@ -1060,6 +1092,7 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
@ -1075,21 +1108,32 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
]
]
]
]
] as [CFString : Any]
let leftImg = fixOrientation(img1)
let rightImg = fixOrientation(img2)
let leftImg = img1//fixOrientation(img1)
let rightImg = img2//fixOrientation(img2)
// CGImageDestinationSetProperties(destination, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
// CGImageDestinationSetProperties(destination,properties1 as CFDictionary)
// CGImageDestinationSetProperties(destination,properties2 as CFDictionary)
CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary)
CGImageDestinationFinalize(destination)
// CGImageDestinationAddImage(destination, leftImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
//// CGImageDestinationAddImage(destination, rightImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
let rr = CGImageDestinationFinalize(destination)
if rr == false {
print("ee..")
}
let image = UIImage(contentsOfFile: url.path())
let source = CGImageSourceCreateWithURL(url as CFURL, nil)!
guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else {
return
}
print(properties22)
print("ssss:\(properties22)")
savePhoto(url)
@ -1097,6 +1141,11 @@ extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
//
func fixOrientation(_ image: UIImage) -> UIImage {
return image
// return UIImage(cgImage: image.cgImage!, scale: image.scale, orientation: imageOrientation)
// No-op if the orientation is already correct
guard image.imageOrientation != .up else { return image }

View File

@ -60,8 +60,19 @@ class SpatialImageConvertor {
let rciImage = CIImage(cgImage: frames[1])
let left_uiimage = UIImage(ciImage: lciImage )
let left_uiimage_new = left_uiimage.getUpImg()
let right_uiimage = UIImage(ciImage: rciImage )
return (left_uiimage,right_uiimage)
let right_uiimage_new = right_uiimage.getUpImg()
return (left_uiimage_new,right_uiimage_new)
}
class func convertCIImageToCGImage(ciImage:CIImage) -> CGImage {
let ciContext = CIContext.init()
let cgImage:CGImage = ciContext.createCGImage(ciImage, from: ciImage.extent)!
return cgImage
}
class func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? {

View File

@ -0,0 +1,52 @@
//
// ZZHImageExtension.swift
// SwiftProject
//
// Created by aaa on 2024/3/15.
//
import Foundation
import UIKit
extension UIImage {
func rotate(radians: CGFloat) -> UIImage {
let rotatedSize = CGRect(origin: .zero, size: size)
.applying(CGAffineTransform(rotationAngle: CGFloat(radians)))
.integral.size
UIGraphicsBeginImageContext(rotatedSize)
if let context = UIGraphicsGetCurrentContext() {
let origin = CGPoint(x: rotatedSize.width / 2.0,
y: rotatedSize.height / 2.0)
context.translateBy(x: origin.x, y: origin.y)
context.rotate(by: radians)
draw(in: CGRect(x: -origin.y, y: -origin.x,
width: size.width, height: size.height))
let rotatedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return rotatedImage ?? self
}
return self
}
//
func getUpImg()->UIImage {
let dg = getRotateFrom()
let rotatedImage = rotate(radians:dg)
return rotatedImage
}
func getRotateFrom() ->CGFloat {
print("..or:\(self.imageOrientation)")
if self.imageOrientation == .left {
return .pi/2
} else if self.imageOrientation == .right{
return -.pi/2
}
else {
return -.pi/2
}
}
}