620 lines
26 KiB
Swift
620 lines
26 KiB
Swift
//
|
||
// CCSpatialPhotoDisplayController.swift
|
||
// SwiftProject
|
||
//
|
||
// Created by Zhang, Joyce on 2024/3/2.
|
||
//
|
||
|
||
import UIKit
|
||
import AVFoundation
|
||
import VideoToolbox
|
||
import CoreVideo
|
||
import UIKit
|
||
import ImageIO
|
||
import CoreImage
|
||
import Photos
|
||
import SVProgressHUD
|
||
import Firebase
|
||
enum VideoReaderError : Error {
|
||
case invalidVideo
|
||
case notSpacialVideo
|
||
}
|
||
|
||
class CCSpatialPhotoDisplayController: BaseController {
|
||
let kNowTimeToUserDefaultKey_PhotoDisplayController:String = "kNowTimeToUserDefaultKey_PhotoDisplayController"
|
||
//保留外联屏幕
|
||
// lazy var externalWindow:UIWindow = {
|
||
// return UIWindow(frame: self.view.bounds)
|
||
// }()
|
||
|
||
var externalVC:ZZHExternalViewController?
|
||
var externalDispalylink:CADisplayLink?
|
||
var externalImageView:UIImageView?
|
||
|
||
// var player:AVPlayer = AVPlayer()
|
||
|
||
var outputVideoURL:URL?
|
||
var dvc:UIDocumentViewController?
|
||
|
||
//图片源数据
|
||
// var photoOriginalURL:URL?
|
||
var photoOriginalData:Data?
|
||
var imageAsset:PHAsset?
|
||
|
||
var imgData:Data?
|
||
var imgOritentation:UIImage.Orientation = .left
|
||
//空间视频 交叉眼 红蓝立体 高斯模糊
|
||
var type = 0
|
||
|
||
|
||
lazy var mTopImgView:UIImageView = {
|
||
//393*236
|
||
let view = UIImageView(frame: CGRect(x: 0, y: 0, width: SCREEN_Width, height: SCREEN_Height * 236/393))
|
||
|
||
view.image = UIImage.init(named: "BG_Top")
|
||
return view
|
||
}()
|
||
|
||
|
||
|
||
|
||
lazy var transformButton: UIButton = {
|
||
|
||
//76*56
|
||
let transformButton = UIButton.init(type: UIButton.ButtonType.custom)
|
||
|
||
transformButton.tag = 201
|
||
transformButton.isSelected = false
|
||
transformButton.backgroundColor = UIColor(hexString: "#5326D6")
|
||
transformButton.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
|
||
let img2:UIImage = UIImage.init(named: "transform_button" as String)!
|
||
transformButton.setImage(img2, for: UIControl.State.normal)
|
||
transformButton.frame = CGRect(x: 0, y: 0, width: 56, height: 36)
|
||
transformButton.layer.cornerRadius = 18
|
||
transformButton.layer.masksToBounds = true
|
||
transformButton.centerY = StatuBar_Height + NavBar_Height * 0.5
|
||
transformButton.right = SCREEN_Width - 24
|
||
|
||
return transformButton
|
||
}()
|
||
|
||
lazy var mTopCenterTypeButton: UIButton = {
|
||
//173*36
|
||
let button = UIButton()
|
||
button.backgroundColor = UIColor.hexStringToColor(hexString: "#1F1E20")
|
||
button.tag = 202
|
||
button.isSelected = false
|
||
button.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
|
||
button.frame = CGRect(x: 2, y: 10, width: SCREEN_Width * 0.4, height: 36)
|
||
button.centerY = StatuBar_Height + NavBar_Height * 0.5
|
||
button.centerX = SCREEN_Width * 0.5
|
||
button.clipsToBounds = true
|
||
button.layer.cornerRadius = 18
|
||
button.layer.borderWidth = 1
|
||
button.layer.borderColor = UIColor.white.cgColor
|
||
button.setTitle("平行眼", for: UIControl.State.normal)
|
||
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||
button.updateBtnEdgeInsets(style: .Right, space: 10)
|
||
button.setTitleColor(UIColor.white, for: UIControl.State.normal)
|
||
button.titleLabel?.font = KFont_Medium(14)
|
||
return button
|
||
}()
|
||
|
||
lazy var mImgView:UIImageView = {
|
||
|
||
let imageView = UIImageView()
|
||
imageView.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
|
||
imageView.contentMode = .scaleAspectFit
|
||
imageView.backgroundColor = UIColor.clear
|
||
|
||
imageView.isUserInteractionEnabled = true
|
||
return imageView
|
||
}()
|
||
|
||
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"平行眼",isHiden:false),
|
||
(icon:"type_check",title:"单眼2D",isHiden:false),
|
||
|
||
(icon:"type_check",title:"红蓝立体",isHiden:false),
|
||
(icon:"type_check",title:"交叉眼",isHiden:false)]
|
||
|
||
lazy var menuView: CCSpatialDisplayTypeView = {
|
||
//数据源(icon可不填)
|
||
|
||
//设置参数
|
||
let parameters:[CCSpatialDisplayTypeConfigure] = [
|
||
.PopMenuTextColor(UIColor.white),
|
||
.popMenuItemHeight(40),
|
||
.PopMenuTextFont(KFont_Medium(12)),
|
||
.PopMenuBackgroudColor(UIColor(hexString: "#1F1E20"))
|
||
]
|
||
|
||
|
||
//init (test随机生成点位置,注意:arrow点是基于屏幕的位置)
|
||
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
|
||
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){ [weak self] in
|
||
//dissmiss回调
|
||
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||
}
|
||
return popMenu
|
||
}()
|
||
|
||
var isAirPlayActive:Bool = false {
|
||
didSet{
|
||
//更新串流UI
|
||
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
|
||
}
|
||
}
|
||
|
||
|
||
deinit {
|
||
print("spatioal photo display deinit...\(self)")
|
||
NotificationCenter.default.removeObserver(self)
|
||
}
|
||
|
||
|
||
|
||
override func viewDidLoad() {
|
||
super.viewDidLoad()
|
||
ZZHHelper.setNowTimeToUserDefaultWithKey(kNowTimeToUserDefaultKey_PhotoDisplayController)
|
||
print("spatioal photo viewdidload...\(self)")
|
||
// return
|
||
//添加外接屏幕链接通知
|
||
NotificationCenter.default.addObserver(self, selector: #selector(exScreenWillConnectNotification(notification:)), name: UIScene.willConnectNotification, object: nil)
|
||
NotificationCenter.default.addObserver(self, selector: #selector(exScreenDisConnectNotification(notification:)), name: UIScene.didDisconnectNotification, object: nil)
|
||
|
||
|
||
// 监听 AirPlay 设备的连接状态
|
||
// NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(notification:)), name: AVAudioSession.routeChangeNotification, object: nil)
|
||
|
||
self.view.backgroundColor = UIColor(hexString: "#060507")
|
||
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
|
||
|
||
//获取图片源数据
|
||
var originalData:Data?
|
||
originalData = photoOriginalData
|
||
|
||
imgData = originalData
|
||
//展示
|
||
let image = UIImage(data: originalData!)
|
||
// mImgView.image = image
|
||
imgOritentation = image?.imageOrientation ?? .left
|
||
selectedSpatialType(selectedIndex: 0)
|
||
let isSpatial = isSpatialImage(originalData: originalData!)
|
||
if !isSpatial {
|
||
print("这不是一张空间图片")
|
||
return
|
||
}
|
||
|
||
//设置返回按钮图片
|
||
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
|
||
// self.setLeftBtnImg(imgStr1: "", imgStr2: "spatial_back_button")
|
||
self.setNavgationBarColorImg(color: .clear)
|
||
self.setNavgationBarLine(color: .clear)
|
||
|
||
self.view.addSubview(mTopImgView)
|
||
self.view.bringSubviewToFront(self.navtionBar!)
|
||
// navtionBar?.addSubview(backButton)
|
||
navtionBar?.addSubview(transformButton)
|
||
navtionBar?.addSubview(mTopCenterTypeButton)
|
||
self.view.addSubview(mImgView)
|
||
|
||
|
||
checkAirPlayStatus()
|
||
|
||
|
||
}
|
||
|
||
override func viewDidDisappear(_ animated: Bool) {
|
||
super.viewDidDisappear(animated)
|
||
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_PhotoDisplayController)
|
||
Analytics.logEvent("preview_pv", parameters: ["refer_page":"图片预览页面","duration":sec])
|
||
}
|
||
|
||
//MARK: - 外接屏幕 链接 与 断开
|
||
@objc func exScreenWillConnectNotification(notification:Notification){
|
||
print("exScreenWillConnectNotification>>>>>")
|
||
airPlayStatusDidChange(notification: notification)
|
||
}
|
||
|
||
@objc func exScreenDisConnectNotification(notification:Notification){
|
||
print("exScreenDisConnectNotification>>>>>")
|
||
airPlayStatusDidChange(notification: notification)
|
||
}
|
||
|
||
func airPlayStatusDidChange(notification:Notification) {
|
||
checkAirPlayStatus()
|
||
|
||
if(isAirPlayActive) {
|
||
Analytics.logEvent("streaming_live", parameters: ["refer_page":"图片预览页面"])
|
||
ZZHHelper.setNowTimeToUserDefaultWithKey("kNowTimeToUserDefaultKey_PhotoDisplayController_airplay")
|
||
}
|
||
else {
|
||
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey("kNowTimeToUserDefaultKey_PhotoDisplayController_airplay")
|
||
Analytics.logEvent("streaming_live", parameters: ["refer_page":"图片预览页面","duration":sec])
|
||
}
|
||
}
|
||
|
||
|
||
private func checkAirPlayStatus() {
|
||
print("设备连接变化 photodisplaycontroller>>>>>")
|
||
let currentRoute = AVAudioSession.sharedInstance().currentRoute
|
||
self.isAirPlayActive = currentRoute.outputs.contains { output in
|
||
return output.portType == AVAudioSession.Port.HDMI ||
|
||
output.portType == AVAudioSession.Port.airPlay
|
||
}
|
||
}
|
||
|
||
func setttinisScreenMirroring(isScreenMirroring:Bool){
|
||
//已连接
|
||
if(isScreenMirroring){
|
||
print("变化为已连接。。。。")
|
||
UIApplication.shared.connectedScenes.forEach { us in
|
||
print("uisence:\(us)\n")
|
||
let ws = us as! UIWindowScene
|
||
if ws.session.role.rawValue == "UIWindowSceneSessionRoleExternalDisplayNonInteractive"{
|
||
externalVC = ws.windows.first?.rootViewController as? ZZHExternalViewController
|
||
// if let has_exvc = exvc {
|
||
// has_exvc.imageView?.image = mImgView.image
|
||
// }
|
||
|
||
externalDispalylink = ws.windows.first?.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
|
||
externalDispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
|
||
|
||
}
|
||
|
||
|
||
}
|
||
// 获取所有打开的会话
|
||
// let sessions:Set<UISceneSession> = UIApplication.shared.openSessions
|
||
// sessions.forEach { ss in
|
||
// print("role:\(ss.role.rawValue)")
|
||
// }
|
||
// // 过滤会话以找到来自其他屏幕的场景
|
||
// let otherScreenSessions:Set<UISceneSession> = sessions.filter {
|
||
// $0.role.rawValue.contains("External")
|
||
// }
|
||
//
|
||
// otherScreenSessions.forEach { ssn in
|
||
// print("ssn role:\(ssn.role.rawValue) first:\(otherScreenSessions.first)")
|
||
// }
|
||
//
|
||
// UIApplication.shared.connectedScenes.forEach { us in
|
||
// print("conn sc:\(us)")
|
||
// }
|
||
|
||
|
||
|
||
// if let session = otherScreenSessions.first{
|
||
// let ssar = UISceneSessionActivationRequest.init(session: session)
|
||
// UIApplication.shared.activateSceneSession(for: ssar) { err in
|
||
// print("error:\(err)")
|
||
// }
|
||
// guard let scene = session.scene else {
|
||
// return
|
||
// }
|
||
// self.externalWindow.windowScene = scene as! UIWindowScene
|
||
// let nvc = UIViewController()
|
||
// self.externalWindow.rootViewController = nvc
|
||
// var imageView = UIImageView(frame: CGRectMake(0, 0, KScreenHeight*2, KScreenHeight*2))
|
||
// imageView.contentMode = .scaleAspectFit
|
||
// externalImageView = imageView
|
||
// nvc.view.addSubview(imageView)
|
||
// self.externalWindow.isHidden = false
|
||
// var dispalylink:CADisplayLink? = self.externalWindow.windowScene?.windows.first?.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
|
||
// dispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
|
||
// }
|
||
// else {
|
||
// print("未发现外接屏....")
|
||
// }
|
||
}
|
||
else{
|
||
print("变化为 断开。。。。")
|
||
externalVC = nil
|
||
externalDispalylink?.invalidate()
|
||
externalDispalylink = nil
|
||
// if let imv = externalImageView{
|
||
// imv .removeFromSuperview()
|
||
// }
|
||
// externalImageView = nil
|
||
// self.externalWindow.isHidden = true
|
||
// self.externalWindow.rootViewController = nil
|
||
|
||
|
||
}
|
||
}
|
||
|
||
// @objc func hasNewWindowExternal(notification:Notification){
|
||
// print("hasNewWindowExternal notifi...")
|
||
// let window = notification.userInfo?["newwindow"] as! UIWindow
|
||
// let imageView = UIImageView(frame: CGRectMake(0, 0, KScreenHeight*2, KScreenHeight*2))
|
||
// imageView.contentMode = .scaleAspectFit
|
||
// externalImageView = imageView
|
||
// window.rootViewController?.view.addSubview(imageView)
|
||
// let dispalylink:CADisplayLink? = window.screen.displayLink(withTarget: self, selector: #selector(displayUpdate(caDisplayLink:)))
|
||
// dispalylink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
|
||
//
|
||
// }
|
||
|
||
@objc func displayUpdate(caDisplayLink:CADisplayLink) {
|
||
// if externalImageView?.image != mImgView.image {
|
||
// externalImageView?.image = mImgView.image
|
||
// }
|
||
|
||
if let has_exvc = externalVC {
|
||
has_exvc.imageView?.image = mImgView.image
|
||
}
|
||
|
||
}
|
||
|
||
|
||
//MARK: - action
|
||
@objc public func navgationButtonClick2(sender:UIButton){
|
||
|
||
if sender.tag == 200 {
|
||
//左边按钮
|
||
}else if sender.tag == 201 {
|
||
//右边按钮
|
||
let transVC = VRPhotoTransformController()
|
||
transVC.sourceImageData = photoOriginalData
|
||
transVC.imageAsset = imageAsset
|
||
self.navigationController?.pushViewController(transVC, animated: true)
|
||
Analytics.logEvent("preview_trans_click", parameters: nil)
|
||
}else if sender.tag == 202 {
|
||
//中间按钮
|
||
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)
|
||
menuView.show()
|
||
|
||
//click
|
||
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
|
||
print("block select \(index)")
|
||
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||
self?.selectedSpatialType(selectedIndex: index)
|
||
let title = self?.typeData[index].title
|
||
self?.mTopCenterTypeButton.setTitle(title, for: UIControl.State.normal)
|
||
}
|
||
}
|
||
}
|
||
|
||
func selectedSpatialType(selectedIndex:Int) {
|
||
// 处理分段选择器值改变事件
|
||
|
||
print("选中了第 \(selectedIndex) 个选项")
|
||
// player.pause()
|
||
mImgView.frame = CGRect.init(x: 0, y: 200, width: self.view.frame.size.width, height: 240)
|
||
|
||
guard let imageSource = CGImageSourceCreateWithData(imgData! as CFData, nil) else {
|
||
return
|
||
}
|
||
|
||
let frameCount = CGImageSourceGetCount(imageSource)
|
||
var frames: [CGImage] = []
|
||
for index in 0..<frameCount {
|
||
guard let frameImage = CGImageSourceCreateImageAtIndex(imageSource, index, nil) else {
|
||
continue
|
||
}
|
||
frames.append(frameImage)
|
||
}
|
||
|
||
if(frames.count < 2){return}
|
||
let lciImage = CIImage(cgImage: frames.first!)
|
||
let rciImage = CIImage(cgImage: frames[1])
|
||
|
||
if(selectedIndex == 1){
|
||
//空间照片 --- 单眼2D(展示原照片或者广角或者主摄其中一个)
|
||
let image = UIImage(data: photoOriginalData!)
|
||
mImgView.image = image
|
||
|
||
}else if(selectedIndex == 0){
|
||
//平行眼
|
||
|
||
let newpb = joinImages( leftImage: lciImage, rightImage:rciImage )
|
||
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
|
||
DispatchQueue.main.async { [weak self] in
|
||
self!.mImgView.image = lastImg
|
||
}
|
||
|
||
}else if(selectedIndex == 2){
|
||
//红蓝立体
|
||
let redColorMatrix: [CGFloat] = [
|
||
0.0, 0.0, 0.0, 0.0, 0.0, // 红色通道
|
||
0.0, 0.0, 0.0, 0.0, 0.0, // 绿色通道
|
||
0.0, 0.0, 0.5, 0.0, 0.0, // 蓝色通道
|
||
0.0, 0.0, 0.0, 1.0, 0.0 // 透明通道
|
||
]
|
||
|
||
let blueColorMatrix: [CGFloat] = [
|
||
0.5, 0.0, 0.0, 0.0, 0.0, // 红色通道
|
||
0.0, 0.0, 0.0, 0.0, 0.0, // 绿色通道
|
||
0.0, 0.0, 0.0, 0.0, 0.0, // 蓝色通道
|
||
0.0, 0.0, 0.0, 1.0, 0.0 // 透明通道
|
||
]
|
||
|
||
let redFilter = CIFilter(name: "CIColorMatrix")!
|
||
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
|
||
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
|
||
|
||
let blueFilter = CIFilter(name: "CIColorMatrix")!
|
||
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
|
||
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
|
||
|
||
// 获取处理后的图像
|
||
if let redOutputImage = redFilter.outputImage,
|
||
let blueOutputImage = blueFilter.outputImage {
|
||
|
||
// CIScreenBlendMode: 通过将颜色通道值反转并相乘,然后将结果反转回来,将两个图像合成为屏幕混合效果。
|
||
// CIHardLightBlendMode: 使用源图像的亮度来决定如何混合两个图像。较亮的像素将更多地影响结果。
|
||
// CILightenBlendMode: 比较两个图像的像素,并选择较亮的像素作为最终结果。
|
||
// CIColorDodgeBlendMode: 使用源图像的颜色信息来增加目标图像的颜色亮度。
|
||
// CIColorBurnBlendMode: 使用源图像的颜色信息来降低目标图像的颜色亮度。
|
||
// CIDarkenBlendMode: 比较两个图像的像素,并选择较暗的像素作为最终结果。
|
||
// CILinearDodgeBlendMode: 使用线性增加的方式将两个图像相加,产生一种亮度叠加的效果。
|
||
// CIMultiplyBlendMode: 将两个图像的像素值相乘,产生一种乘法混合效果。
|
||
// CISourceOverCompositing: 将源图像放在目标图像上方,产生一种覆盖混合效果。
|
||
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
|
||
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
|
||
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
|
||
let lastImg = compositeFilter.outputImage!
|
||
DispatchQueue.main.async { [weak self] in
|
||
var ri = UIImage(ciImage: lastImg)
|
||
ri = ri.getUpImg(imgOrigOritentation: self?.imgOritentation ?? .left)
|
||
self!.mImgView.image = ri
|
||
}
|
||
}
|
||
|
||
}else if(selectedIndex == 3){
|
||
//交叉眼
|
||
let newpb = joinImages( leftImage:rciImage , rightImage:lciImage )
|
||
let lastImg = convertCIImageToUIImage(ciImage: newpb)!
|
||
DispatchQueue.main.async { [weak self] in
|
||
self!.mImgView.image = lastImg
|
||
}
|
||
}
|
||
}
|
||
|
||
//MARK: - 判断是不是空间照片
|
||
//判断是不是空间照片 非空间照片只有一帧
|
||
//空间照片包含 let makerAppleProperties = imageProperties["{HEIF}"]
|
||
func isSpatialImage(originalData: Data) -> Bool {
|
||
//通过data获取
|
||
guard let imageSource = CGImageSourceCreateWithData(originalData as CFData, nil) else {
|
||
return false
|
||
}
|
||
|
||
//通过url获取
|
||
guard let properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 1, nil) as? [CFString: Any] else {
|
||
return false
|
||
}
|
||
print(properties)
|
||
return true
|
||
}
|
||
|
||
//MARK: - 判断是不是空间照片
|
||
func createCVPixelBuffer(from image: UIImage, with frame: CGRect) -> CVPixelBuffer? {
|
||
let options: [String: Any] = [
|
||
kCVPixelBufferCGImageCompatibilityKey as String: true,
|
||
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
|
||
]
|
||
|
||
var pixelBuffer: CVPixelBuffer?
|
||
let status = CVPixelBufferCreate(kCFAllocatorDefault,
|
||
Int(frame.width),
|
||
Int(frame.height),
|
||
kCVPixelFormatType_32BGRA,
|
||
options as CFDictionary,
|
||
&pixelBuffer)
|
||
|
||
guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
|
||
return nil
|
||
}
|
||
|
||
CVPixelBufferLockBaseAddress(buffer, [])
|
||
let pixelData = CVPixelBufferGetBaseAddress(buffer)
|
||
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
||
|
||
guard let context = CGContext(data: pixelData,
|
||
width: Int(frame.width),
|
||
height: Int(frame.height),
|
||
bitsPerComponent: 8,
|
||
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
|
||
space: colorSpace,
|
||
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) else {
|
||
return nil
|
||
}
|
||
|
||
context.translateBy(x: -frame.origin.x, y: -frame.origin.y)
|
||
context.draw(image.cgImage!, in: CGRect(origin: .zero, size: image.size))
|
||
|
||
CVPixelBufferUnlockBaseAddress(buffer, [])
|
||
|
||
return buffer
|
||
}
|
||
|
||
|
||
|
||
//将两张图片合成一张图片
|
||
func joinImages2( leftImage:CIImage, rightImage:CIImage) -> CIImage {
|
||
let left = UIImage(ciImage: leftImage )
|
||
let right = UIImage(ciImage: rightImage )
|
||
|
||
let imageWidth = left.size.width/2 + right.size.width/2
|
||
let imageHeight = left.size.height/2
|
||
|
||
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
|
||
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
|
||
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
|
||
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
|
||
let image = UIGraphicsGetImageFromCurrentImageContext()!
|
||
UIGraphicsEndImageContext();
|
||
|
||
let ci = CIImage(cgImage: image.cgImage!)
|
||
return ci
|
||
}
|
||
|
||
|
||
|
||
func getOrientationAndResolutionSizeForVideo(asset:AVAsset) async throws -> (CGAffineTransform, CGSize) {
|
||
guard let track = try await asset.loadTracks(withMediaType: AVMediaType.video).first
|
||
else{throw VideoReaderError.invalidVideo}
|
||
let naturalSize = try await track.load(.naturalSize)
|
||
let naturalTransform = try await track.load(.preferredTransform)
|
||
let size = naturalSize.applying(naturalTransform)
|
||
return (naturalTransform, CGSize(width: abs(size.width), height: abs(size.height)) )
|
||
}
|
||
|
||
|
||
func convertCIImageToUIImage(ciImage: CIImage) -> UIImage? {
|
||
let context = CIContext(options: nil)
|
||
if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) {
|
||
let uiImage = UIImage(cgImage: cgImage)
|
||
return uiImage
|
||
}
|
||
return nil
|
||
}
|
||
|
||
//将两张图片合成一张图片
|
||
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
|
||
var left = UIImage(ciImage: leftImage )
|
||
left = left.getUpImg(imgOrigOritentation: imgOritentation)
|
||
var right = UIImage(ciImage: rightImage )
|
||
right = right.getUpImg(imgOrigOritentation: imgOritentation)
|
||
|
||
let imageWidth = left.size.width/2 + right.size.width/2
|
||
let imageHeight = left.size.height/2
|
||
|
||
let newImageSize = CGSize(width:imageWidth, height: imageHeight);
|
||
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
|
||
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
|
||
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
|
||
let image = UIGraphicsGetImageFromCurrentImageContext()!
|
||
UIGraphicsEndImageContext();
|
||
|
||
let ci = CIImage(cgImage: image.cgImage!)
|
||
return ci
|
||
}
|
||
|
||
func pixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
|
||
var pixelBuffer: CVPixelBuffer?
|
||
let attributes: [String: Any] = [
|
||
kCVPixelBufferCGImageCompatibilityKey as String: kCFBooleanTrue,
|
||
kCVPixelBufferCGBitmapContextCompatibilityKey as String: kCFBooleanTrue
|
||
]
|
||
|
||
let width = Int(ciImage.extent.width)
|
||
let height = Int(ciImage.extent.height)
|
||
|
||
let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, attributes as CFDictionary, &pixelBuffer)
|
||
|
||
if status == kCVReturnSuccess, let pixelBuffer = pixelBuffer {
|
||
let context = CIContext()
|
||
context.render(ciImage, to: pixelBuffer)
|
||
return pixelBuffer
|
||
}
|
||
|
||
return nil
|
||
}
|
||
|
||
|
||
|
||
|
||
}
|