VPCamera3/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialPhotoDisplayController.swift

620 lines
26 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CCSpatialPhotoDisplayController.swift
// SwiftProject
//
// Created by Zhang, Joyce on 2024/3/2.
//
import UIKit
import AVFoundation
import VideoToolbox
import CoreVideo
import UIKit
import ImageIO
import CoreImage
import Photos
import SVProgressHUD
import Firebase
enum VideoReaderError : Error {
case invalidVideo
case notSpacialVideo
}
class CCSpatialPhotoDisplayController: BaseController {
let kNowTimeToUserDefaultKey_PhotoDisplayController:String = "kNowTimeToUserDefaultKey_PhotoDisplayController"
//
// lazy var externalWindow:UIWindow = {
// return UIWindow(frame: self.view.bounds)
// }()
var externalVC:ZZHExternalViewController?
var externalDispalylink:CADisplayLink?
var externalImageView:UIImageView?
// var player:AVPlayer = AVPlayer()
var outputVideoURL:URL?
var dvc:UIDocumentViewController?
//
// var photoOriginalURL:URL?
var photoOriginalData:Data?
var imageAsset:PHAsset?
var imgData:Data?
var imgOritentation:UIImage.Orientation = .left
//
var type = 0
lazy var mTopImgView:UIImageView = {
//393*236
let view = UIImageView(frame: CGRect(x: 0, y: 0, width: SCREEN_Width, height: SCREEN_Height * 236/393))
view.image = UIImage.init(named: "BG_Top")
return view
}()
lazy var transformButton: UIButton = {
//76*56
let transformButton = UIButton.init(type: UIButton.ButtonType.custom)
transformButton.tag = 201
transformButton.isSelected = false
transformButton.backgroundColor = UIColor(hexString: "#5326D6")
transformButton.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
let img2:UIImage = UIImage.init(named: "transform_button" as String)!
transformButton.setImage(img2, for: UIControl.State.normal)
transformButton.frame = CGRect(x: 0, y: 0, width: 56, height: 36)
transformButton.layer.cornerRadius = 18
transformButton.layer.masksToBounds = true
transformButton.centerY = StatuBar_Height + NavBar_Height * 0.5
transformButton.right = SCREEN_Width - 24
return transformButton
}()
lazy var mTopCenterTypeButton: UIButton = {
//173*36
let button = UIButton()
button.backgroundColor = UIColor.hexStringToColor(hexString: "#1F1E20")
button.tag = 202
button.isSelected = false
button.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
button.frame = CGRect(x: 2, y: 10, width: SCREEN_Width * 0.4, height: 36)
button.centerY = StatuBar_Height + NavBar_Height * 0.5
button.centerX = SCREEN_Width * 0.5
button.clipsToBounds = true
button.layer.cornerRadius = 18
button.layer.borderWidth = 1
button.layer.borderColor = UIColor.white.cgColor
button.setTitle("平行眼", for: UIControl.State.normal)
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
button.updateBtnEdgeInsets(style: .Right, space: 10)
button.setTitleColor(UIColor.white, for: UIControl.State.normal)
button.titleLabel?.font = KFont_Medium(14)
return button
}()
lazy var mImgView:UIImageView = {
let imageView = UIImageView()
imageView.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
imageView.contentMode = .scaleAspectFit
imageView.backgroundColor = UIColor.clear
imageView.isUserInteractionEnabled = true
return imageView
}()
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
lazy var menuView: CCSpatialDisplayTypeView = {
//icon
//
let parameters:[CCSpatialDisplayTypeConfigure] = [
.PopMenuTextColor(UIColor.white),
.popMenuItemHeight(40),
.PopMenuTextFont(KFont_Medium(12)),
.PopMenuBackgroudColor(UIColor(hexString: "#1F1E20"))
]
//init (testarrow)
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){ [weak self] in
//dissmiss
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
}
return popMenu
}()
var isAirPlayActive:Bool = false {
didSet{
//UI
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
}
}
deinit {
print("spatioal photo display deinit...\(self)")
NotificationCenter.default.removeObserver(self)
}
override func viewDidLoad() {
super.viewDidLoad()
ZZHHelper.setNowTimeToUserDefaultWithKey(kNowTimeToUserDefaultKey_PhotoDisplayController)
print("spatioal photo viewdidload...\(self)")
// return
//
NotificationCenter.default.addObserver(self, selector: #selector(exScreenWillConnectNotification(notification:)), name: UIScene.willConnectNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(exScreenDisConnectNotification(notification:)), name: UIScene.didDisconnectNotification, object: nil)
// AirPlay
// NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(notification:)), name: AVAudioSession.routeChangeNotification, object: nil)
self.view.backgroundColor = UIColor(hexString: "#060507")
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
//
var originalData:Data?
originalData = photoOriginalData
imgData = originalData
//
let image = UIImage(data: originalData!)
// mImgView.image = image
imgOritentation = image?.imageOrientation ?? .left
selectedSpatialType(selectedIndex: 0)
let isSpatial = isSpatialImage(originalData: originalData!)
if !isSpatial {
print("这不是一张空间图片")
return
}
//
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
// self.setLeftBtnImg(imgStr1: "", imgStr2: "spatial_back_button")
self.setNavgationBarColorImg(color: .clear)
self.setNavgationBarLine(color: .clear)
self.view.addSubview(mTopImgView)
self.view.bringSubviewToFront(self.navtionBar!)
// navtionBar?.addSubview(backButton)
navtionBar?.addSubview(transformButton)
navtionBar?.addSubview(mTopCenterTypeButton)
self.view.addSubview(mImgView)
checkAirPlayStatus()
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_PhotoDisplayController)
Analytics.logEvent("preview_pv", parameters: ["refer_page":"图片预览页面","duration":sec])
}
//MARK: -
@objc func exScreenWillConnectNotification(notification:Notification){
print("exScreenWillConnectNotification>>>>>")
airPlayStatusDidChange(notification: notification)
}
@objc func exScreenDisConnectNotification(notification:Notification){
print("exScreenDisConnectNotification>>>>>")
airPlayStatusDidChange(notification: notification)
}
func airPlayStatusDidChange(notification:Notification) {
checkAirPlayStatus()
if(isAirPlayActive) {
Analytics.logEvent("streaming_live", parameters: ["refer_page":"图片预览页面"])
ZZHHelper.setNowTimeToUserDefaultWithKey("kNowTimeToUserDefaultKey_PhotoDisplayController_airplay")
}
else {
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey("kNowTimeToUserDefaultKey_PhotoDisplayController_airplay")
Analytics.logEvent("streaming_live", parameters: ["refer_page":"图片预览页面","duration":sec])
}
}
private func checkAirPlayStatus() {
print("设备连接变化 photodisplaycontroller>>>>>")
let currentRoute = AVAudioSession.sharedInstance().currentRoute
self.isAirPlayActive = currentRoute.outputs.contains { output in
return output.portType == AVAudioSession.Port.HDMI ||
output.portType == AVAudioSession.Port.airPlay
}
}
func setttinisScreenMirroring(isScreenMirroring:Bool){
//
if(isScreenMirroring){
print("变化为已连接。。。。")
UIApplication.shared.connectedScenes.forEach { us in
print("uisence:\(us)\n")
let ws = us as! UIWindowScene
if ws.session.role.rawValue == "UIWindowSceneSessionRoleExternalDisplayNonInteractive"{
externalVC = ws.windows.first?.rootViewController as? ZZHExternalViewController
// if let has_exvc = exvc {
// has_exvc.imageView?.image = mImgView.image
// }
externalDispalylink = ws.windows.first?.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
externalDispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
}
}
//
// let sessions:Set<UISceneSession> = UIApplication.shared.openSessions
// sessions.forEach { ss in
// print("role:\(ss.role.rawValue)")
// }
// //
// let otherScreenSessions:Set<UISceneSession> = sessions.filter {
// $0.role.rawValue.contains("External")
// }
//
// otherScreenSessions.forEach { ssn in
// print("ssn role:\(ssn.role.rawValue) first:\(otherScreenSessions.first)")
// }
//
// UIApplication.shared.connectedScenes.forEach { us in
// print("conn sc:\(us)")
// }
// if let session = otherScreenSessions.first{
// let ssar = UISceneSessionActivationRequest.init(session: session)
// UIApplication.shared.activateSceneSession(for: ssar) { err in
// print("error:\(err)")
// }
// guard let scene = session.scene else {
// return
// }
// self.externalWindow.windowScene = scene as! UIWindowScene
// let nvc = UIViewController()
// self.externalWindow.rootViewController = nvc
// var imageView = UIImageView(frame: CGRectMake(0, 0, KScreenHeight*2, KScreenHeight*2))
// imageView.contentMode = .scaleAspectFit
// externalImageView = imageView
// nvc.view.addSubview(imageView)
// self.externalWindow.isHidden = false
// var dispalylink:CADisplayLink? = self.externalWindow.windowScene?.windows.first?.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
// dispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
// }
// else {
// print("....")
// }
}
else{
print("变化为 断开。。。。")
externalVC = nil
externalDispalylink?.invalidate()
externalDispalylink = nil
// if let imv = externalImageView{
// imv .removeFromSuperview()
// }
// externalImageView = nil
// self.externalWindow.isHidden = true
// self.externalWindow.rootViewController = nil
}
}
// @objc func hasNewWindowExternal(notification:Notification){
// print("hasNewWindowExternal notifi...")
// let window = notification.userInfo?["newwindow"] as! UIWindow
// let imageView = UIImageView(frame: CGRectMake(0, 0, KScreenHeight*2, KScreenHeight*2))
// imageView.contentMode = .scaleAspectFit
// externalImageView = imageView
// window.rootViewController?.view.addSubview(imageView)
// let dispalylink:CADisplayLink? = window.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
// dispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
//
// }
@objc func displayUpdate(caDisplayLink:CADisplayLink) {
// if externalImageView?.image != mImgView.image {
// externalImageView?.image = mImgView.image
// }
if let has_exvc = externalVC {
has_exvc.imageView?.image = mImgView.image
}
}
//MARK: - action
@objc public func navgationButtonClick2(sender:UIButton){
if sender.tag == 200 {
//
}else if sender.tag == 201 {
//
let transVC = VRPhotoTransformController()
transVC.sourceImageData = photoOriginalData
transVC.imageAsset = imageAsset
self.navigationController?.pushViewController(transVC, animated: true)
Analytics.logEvent("preview_trans_click", parameters: nil)
}else if sender.tag == 202 {
//
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)
menuView.show()
//click
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
print("block select \(index)")
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
self?.selectedSpatialType(selectedIndex: index)
let title = self?.typeData[index].title
self?.mTopCenterTypeButton.setTitle(title, for: UIControl.State.normal)
}
}
}
func selectedSpatialType(selectedIndex:Int) {
//
print("选中了第 \(selectedIndex) 个选项")
// player.pause()
mImgView.frame = CGRect.init(x: 0, y: 200, width: self.view.frame.size.width, height: 240)
guard let imageSource = CGImageSourceCreateWithData(imgData! as CFData, nil) else {
return
}
let frameCount = CGImageSourceGetCount(imageSource)
var frames: [CGImage] = []
for index in 0..<frameCount {
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource, index, nil) else {
continue
}
frames.append(frameImage)
}
if(frames.count < 2){return}
let lciImage = CIImage(cgImage: frames.first!)
let rciImage = CIImage(cgImage: frames[1])
if(selectedIndex == 1){
// --- 2D(广)
let image = UIImage(data: photoOriginalData!)
mImgView.image = image
}else if(selectedIndex == 0){
//
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = lastImg
}
}else if(selectedIndex == 2){
//
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.5, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
0.5, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
//
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
// CIScreenBlendMode:
// CIHardLightBlendMode: 使
// CILightenBlendMode:
// CIColorDodgeBlendMode: 使
// CIColorBurnBlendMode: 使
// CIDarkenBlendMode:
// CILinearDodgeBlendMode: 使线
// CIMultiplyBlendMode:
// CISourceOverCompositing:
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
let lastImg = compositeFilter.outputImage!
DispatchQueue.main.async { [weak self] in
var ri = UIImage(ciImage: lastImg)
ri = ri.getUpImg(imgOrigOritentation: self?.imgOritentation ?? .left)
self!.mImgView.image = ri
}
}
}else if(selectedIndex == 3){
//
let newpb = joinImages( leftImage:rciImage , rightImage:lciImage )
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
DispatchQueue.main.async { [weak self] in
self!.mImgView.image = lastImg
}
}
}
//MARK: -
//
// let makerAppleProperties = imageProperties["{HEIF}"]
func isSpatialImage(originalData: Data) -> Bool {
//data
guard let imageSource = CGImageSourceCreateWithData(originalData as CFData, nil) else {
return false
}
//url
guard let properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 1, nil) as? [CFString: Any] else {
return false
}
print(properties)
return true
}
//MARK: -
func createCVPixelBuffer(from image: UIImage, with frame: CGRect) -> CVPixelBuffer? {
let options: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: true,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
]
var pixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(frame.width),
Int(frame.height),
kCVPixelFormatType_32BGRA,
options as CFDictionary,
&pixelBuffer)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
let pixelData = CVPixelBufferGetBaseAddress(buffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pixelData,
width: Int(frame.width),
height: Int(frame.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) else {
return nil
}
context.translateBy(x: -frame.origin.x, y: -frame.origin.y)
context.draw(image.cgImage!, in: CGRect(origin: .zero, size: image.size))
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
//
func joinImages2( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
else{throw VideoReaderError.invalidVideo}
let naturalSize = try await track.load(.naturalSize)
let naturalTransform = try await track.load(.preferredTransform)
let size = naturalSize.applying(naturalTransform)
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
}
func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}
return nil
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
var left = UIImage(ciImage: leftImage )
left = left.getUpImg(imgOrigOritentation: imgOritentation)
var right = UIImage(ciImage: rightImage )
right = right.getUpImg(imgOrigOritentation: imgOritentation)
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
func pixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
var pixelBuffer: CVPixelBuffer?
let attributes: [String: Any] = [
kCVPixelBufferCGImageCompatibilityKey as String: kCFBooleanTrue,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: kCFBooleanTrue
]
let width = Int(ciImage.extent.width)
let height = Int(ciImage.extent.height)
let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, attributes as CFDictionary, &pixelBuffer)
if status == kCVReturnSuccess, let pixelBuffer = pixelBuffer {
let context = CIContext()
context.render(ciImage, to: pixelBuffer)
return pixelBuffer
}
return nil
}
}