VPCamera3/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift

618 lines
25 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CCSpatialPhotoDisplayController.swift
// SwiftProject
//
// Created by Zhang, Joyce on 2024/3/2.
//
import UIKit
import AVFoundation
import VideoToolbox
import CoreVideo
import UIKit
import ImageIO
import CoreImage
import Photos
enum VideoReaderError : Error {
case invalidVideo
case notSpacialVideo
}
class CCSpatialPhotoDisplayController: BaseController {
var player:AVPlayer = AVPlayer()
var outputVideoURL:URL?
//
var photoOriginalURL:URL?
var photoOriginalData:Data?
var imgData:Data?
//
var type = 0
lazy var mTopImgView:UIImageView = {
//393*236
let view = UIImageView(frame: CGRect(x: 0, y: 0, width: SCREEN_Width, height: SCREEN_Height * 236/393))
view.image = UIImage.init(named: "BG_Top")
return view
}()
lazy var transformButton: UIButton = {
//76*56
let transformButton = UIButton.init(type: UIButton.ButtonType.custom)
transformButton.tag = 201
transformButton.isSelected = false
transformButton.backgroundColor = UIColor(hexString: "#5326D6")
transformButton.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
let img2:UIImage = UIImage.init(named: "transform_button" as String)!
transformButton.setImage(img2, for: UIControl.State.normal)
transformButton.frame = CGRect(x: 0, y: 0, width: 56, height: 36)
transformButton.layer.cornerRadius = 18
transformButton.layer.masksToBounds = true
transformButton.centerY = StatuBar_Height + NavBar_Height * 0.5
transformButton.right = SCREEN_Width - 24
return transformButton
}()
lazy var mTopCenterTypeButton: UIButton = {
//173*36
let button = UIButton()
button.backgroundColor = UIColor.hexStringToColor(hexString: "#1F1E20")
button.tag = 202
button.isSelected = false
button.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
button.frame = CGRect(x: 2, y: 10, width: SCREEN_Width * 0.4, height: 36)
button.centerY = StatuBar_Height + NavBar_Height * 0.5
button.centerX = SCREEN_Width * 0.5
button.clipsToBounds = true
button.layer.cornerRadius = 18
button.layer.borderWidth = 1
button.layer.borderColor = UIColor.white.cgColor
button.setTitle("单眼2D", for: UIControl.State.normal)
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
button.updateBtnEdgeInsets(style: .Right, space: 10)
button.setTitleColor(UIColor.white, for: UIControl.State.normal)
button.titleLabel?.font = KFont_Medium(14)
return button
}()
lazy var mImgView:UIImageView = {
let imageView = UIImageView()
imageView.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
imageView.contentMode = .scaleAspectFit
imageView.backgroundColor = UIColor.hexStringToColor(hexString: "#383739")
// let image = UIImage(contentsOfFile: photoOriginalURL!.path)
// imageView.image = image
imageView.isUserInteractionEnabled = true
// let tapGesture = UITapGestureRecognizer(target: self, action: #selector(imageTapped(_:)))
// imageView.addGestureRecognizer(tapGesture)
return imageView
}()
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
lazy var menuView: CCSpatialDisplayTypeView = {
//icon
// let popData = [(icon:"type_check",title:"2D",isHiden:false),
// (icon:"type_check",title:"",isHiden:false),
// (icon:"type_check",title:"",isHiden:false),
// (icon:"type_check",title:"",isHiden:false)]
//
let parameters:[CCSpatialDisplayTypeConfigure] = [
.PopMenuTextColor(UIColor.white),
.popMenuItemHeight(40),
.PopMenuTextFont(KFont_Medium(12)),
.PopMenuBackgroudColor(UIColor(hexString: "#1F1E20"))
]
//init (testarrow)
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters)
return popMenu
}()
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor(hexString: "#060507")
// Do any additional setup after loading the view.
// let path = Bundle.main.path(forResource: "img3", ofType: "HEIC")
// photoOriginalURL = URL.init(filePath: path!)
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
//
var originalData:Data?
if photoOriginalURL != nil {
do {
originalData = try Data(contentsOf: photoOriginalURL!)
}catch let error as NSError {
print(error)
}
}else if photoOriginalData != nil {
originalData = photoOriginalData
}
imgData = originalData
//
let image = UIImage(data: originalData!)
mImgView.image = image
let isSpatial = isSpatialImage(originalData: originalData!)
if !isSpatial {
print("这不是一张空间图片")
return
}
//
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
// self.setLeftBtnImg(imgStr1: "", imgStr2: "spatial_back_button")
self.setNavgationBarColorImg(color: .clear)
self.setNavgationBarLine(color: .clear)
self.view.addSubview(mTopImgView)
self.view.bringSubviewToFront(self.navtionBar!)
// navtionBar?.addSubview(backButton)
navtionBar?.addSubview(transformButton)
navtionBar?.addSubview(mTopCenterTypeButton)
self.view.addSubview(mImgView)
}
//MARK: - action
@objc public func navgationButtonClick2(sender:UIButton){
if sender.tag == 200 {
//
}else if sender.tag == 201 {
//
let transVC = VRPhotoTransformController()
transVC.sourceImageData = photoOriginalData
self.navigationController?.pushViewController(transVC, animated: true)
}else if sender.tag == 202 {
//
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)
menuView.show()
//click
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
print("block select \(index)")
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
self?.selectedSpatialType(selectedIndex: index)
}
}
}
func selectedSpatialType(selectedIndex:Int) {
//
/*
let popData = [/*(icon:"saoyisao",title:""),*/
(icon:"",title:"2D",isHiden:true),
(icon:"",title:"",isHiden:false),
(icon:"",title:"",isHiden:false),
(icon:"",title:"",isHiden:false)]
*/
print("选中了第 \(selectedIndex) 个选项")
player.pause()
NotificationCenter.default.removeObserver(self)
mImgView.frame = CGRect.init(x: 0, y: 200, width: self.view.frame.size.width, height: 240)
// guard let imageSource = CGImageSourceCreateWithURL(photoOriginalURL! as CFURL, nil) else {
// return
// }
guard let imageSource = CGImageSourceCreateWithData(imgData! as CFData, nil) else {
return
}
// print(imageSource)
let frameCount = CGImageSourceGetCount(imageSource)
var frames: [CGImage] = []
for index in 0..<frameCount {
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource, index, nil) else {
continue
}
frames.append(frameImage)
}
if(frames.count < 2){return}
let lciImage = CIImage(cgImage: frames.first!)
let rciImage = CIImage(cgImage: frames[1])
if(selectedIndex == 0){
// --- 2D(广)
// let image = UIImage(contentsOfFile: photoOriginalURL!.path)
let image = UIImage(data: photoOriginalData!)
mImgView.image = image
}else if(selectedIndex == 1){
//
// mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = lastImg
}
}else if(selectedIndex == 2){
//
// mImgView.frame = CGRect.init(x: 0, y: 180, width: self.view.frame.size.width, height: 380)
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.5, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
0.5, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
//
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
// CIScreenBlendMode:
// CIHardLightBlendMode: 使
// CILightenBlendMode:
// CIColorDodgeBlendMode: 使
// CIColorBurnBlendMode: 使
// CIDarkenBlendMode:
// CILinearDodgeBlendMode: 使线
// CIMultiplyBlendMode:
// CISourceOverCompositing:
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
// let sharpenedFilter = CIFilter(name: "CISharpenLuminance")!
// sharpenedFilter.setValue(compositeFilter.outputImage, forKey: kCIInputImageKey)
// sharpenedFilter.setValue(2, forKey: kCIInputSharpnessKey)
// let colorControlsFilter = CIFilter(name: "CIColorControls")!
// colorControlsFilter.setValue(sharpenedFilter.outputImage, forKey: kCIInputImageKey)
// colorControlsFilter.setValue(0.7, forKey: kCIInputSaturationKey)
let lastImg = compositeFilter.outputImage!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = UIImage(ciImage: lastImg)
}
}
}else if(selectedIndex == 3){
//
// mImgView.frame = CGRect.init(x: 0, y: 100, width: self.view.frame.size.width, height: 130)
let newpb = joinImages( leftImage:rciImage , rightImage:lciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = lastImg
}
}
}
//MARK: -
//
// let makerAppleProperties = imageProperties["{HEIF}"]
func isSpatialImage(originalData: Data) -> Bool {
//data
guard let imageSource = CGImageSourceCreateWithData(originalData as CFData, nil) else {
return false
}
//url
// guard let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil) else {
// return false
// }
guard let properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 1, nil) as? [CFString: Any] else {
return false
}
print(properties)
/*
[ProfileName: sRGB IEC61966-2.1, {TIFF}: {
Orientation = 1;
TileLength = 512;
TileWidth = 512;
}, PixelWidth: 4032, PixelHeight: 3024, {HEIF}: {
CameraExtrinsics = {
CoordinateSystemID = 0;
Position = (
"-0.019238",
0,
0
);
Rotation = (
1,
0,
0,
0,
1,
0,
0,
0,
1
);
};
}, Depth: 8, Orientation: 1, ColorModel: RGB]
{HEIF}
*/
//gif
// let frameCount = CGImageSourceGetCount(imageSource)
// if(frameCount == 1){
// return false
// }
return true
}
//MARK: -
func createCVPixelBuffer(from image: UIImage, with frame: CGRect) -> CVPixelBuffer? {
let options: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: true,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
]
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(frame.width),
Int(frame.height),
kCVPixelFormatType_32BGRA,
options as CFDictionary,
&pixelBuffer)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
let pixelData = CVPixelBufferGetBaseAddress(buffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pixelData,
width: Int(frame.width),
height: Int(frame.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) else {
return nil
}
context.translateBy(x: -frame.origin.x, y: -frame.origin.y)
context.draw(image.cgImage!, in: CGRect(origin: .zero, size: image.size))
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
//
func joinImages2( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
func convertVideo( inputFile : URL, outputFile: URL, progress: ((Float)->())? = nil ) async throws {
do {
try FileManager.default.removeItem(atPath: outputFile.path)
print("视频文件删除成功")
} catch {
print("删除视频文件出错:\(error)")
}
// Load the AVAsset
let asset = AVAsset(url: inputFile)
let assetReader = try AVAssetReader(asset: asset)
//
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("该视频不是空间视频")
}
//()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)
//
//
let vw:VideoWriter?
if(type == 3){
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
else{
vw = VideoWriter(url: outputFile, width: Int(videoSize.width), height: Int(videoSize.height/2), orientation: orientation, sessionStartTime: CMTime(value: 1, timescale: 30 ), isRealTime: false, queue: .main)
}
//
let output = try await AVAssetReaderTrackOutput(
track: asset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader.add(output)
assetReader.startReading()
let duration = try await asset.load(.duration)
if let playerItem = player.currentItem {
playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { request in
print(request.sourceImage)
}
}
while let nextSampleBuffer = output.copyNextSampleBuffer() {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
//
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let time = CMSampleBufferGetOutputPresentationTimeStamp(nextSampleBuffer)
_ = vw!.add(image: newpb, presentationTime: time)
// print( "Added frame at \(time)")
progress?( Float(time.value)/Float(duration.value))
// try await Task.sleep(nanoseconds: 3_000_000)
}
}
_ = try await vw!.finish()
print( "Finished")
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}
return nil
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
func pixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
var pixelBuffer: CVPixelBuffer?
let attributes: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: kCFBooleanTrue,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: kCFBooleanTrue
]
let width = Int(ciImage.extent.width)
let height = Int(ciImage.extent.height)
let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, attributes as CFDictionary, &pixelBuffer)
if status == kCVReturnSuccess, let pixelBuffer = pixelBuffer {
let context = CIContext()
context.render(ciImage, to: pixelBuffer)
return pixelBuffer
}
return nil
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}