VPCamera3/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController.swift
2024-03-13 15:54:24 +08:00

593 lines
23 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CCSpatialVideoDisplayController.swift
// SwiftProject
//
// Created by Zhang, Joyce on 2024/3/2.
//
import UIKit
import AVFoundation
import Photos
import AVKit
import VideoToolbox
enum SpatialType : Int {
/*
2D
*/
case monocular2D
case parallelEyes
case redBlueSolid
case crossedEyes
//使
case hsbs
case fsbs
}
class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDelegate {
var isAirPlayActive:Bool = false {
didSet{
//selectedIndextypeData
if(isAirPlayActive){
self.selectedIndex = .hsbs//hsbs
self.typeData = [(icon:"type_check",title:"3D HSBS",isHiden:false),
(icon:"type_check",title:"3D FSBS",isHiden:false),
]
}
else{
self.selectedIndex = .monocular2D//monocular2D
self.typeData = [(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
}
//UI
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
}
}
var assetReader:AVAssetReader?
var assetOutput:AVAssetReaderTrackOutput?
var assetTrack:AVAssetTrack?
var link = false//
var isPlaying = false//
var selectedIndex:SpatialType = .monocular2D//
{
didSet{
//
}
}
let videoTranserConvertor = PlayByTransferConvertor()
lazy var videoOriginalAsset:AVAsset = {
let asset = AVAsset(url: sourceVideoURL!)
return asset
}()
var videoOriginalPHAsset:PHAsset?
//
var sourceVideoURL:URL?
let outputVideoURL:URL = URL.documentsDirectory.appending(path:"output1111.mp4")
var imgData:Data?
var player:AVPlayer?
// var playerLay:AVPlayerLayer?
var playerController:AVPlayerViewController?
lazy var mTopImgView:UIImageView = {
//393*236
let view = UIImageView(frame: CGRect(x: 0, y: 0, width: SCREEN_Width, height: SCREEN_Height * 236/393))
view.image = UIImage.init(named: "BG_Top")
return view
}()
lazy var transformButton: UIButton = {
//76*56
let transformButton = UIButton.init(type: UIButton.ButtonType.custom)
transformButton.tag = 201
transformButton.isSelected = false
transformButton.backgroundColor = UIColor(hexString: "#5326D6")
transformButton.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
let img2:UIImage = UIImage.init(named: "transform_button" as String)!
transformButton.setImage(img2, for: UIControl.State.normal)
transformButton.frame = CGRect(x: 0, y: 0, width: 56, height: 36)
transformButton.layer.cornerRadius = 18
transformButton.layer.masksToBounds = true
transformButton.centerY = StatuBar_Height + NavBar_Height * 0.5
transformButton.right = SCREEN_Width - 24
return transformButton
}()
lazy var mTopCenterTypeButton: UIButton = {
//173*36
let button = UIButton()
button.backgroundColor = UIColor.hexStringToColor(hexString: "#1F1E20")
button.tag = 202
button.isSelected = false
button.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
button.frame = CGRect(x: 2, y: 10, width: SCREEN_Width * 0.4, height: 36)
button.centerY = StatuBar_Height + NavBar_Height * 0.5
button.centerX = SCREEN_Width * 0.5
button.clipsToBounds = true
button.layer.cornerRadius = 18
button.layer.borderWidth = 1
button.layer.borderColor = UIColor.white.cgColor
button.setTitle("单眼2D", for: UIControl.State.normal)
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
button.updateBtnEdgeInsets(style: .Right, space: 10)
button.setTitleColor(UIColor.white, for: UIControl.State.normal)
button.titleLabel?.font = KFont_Medium(14)
return button
}()
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
{
didSet {
menuView.setData(datas: self.typeData)
}
}
lazy var menuView: CCSpatialDisplayTypeView = {
//
let parameters:[CCSpatialDisplayTypeConfigure] = [
.PopMenuTextColor(UIColor.white),
.popMenuItemHeight(40),
.PopMenuTextFont(KFont_Medium(12)),
.PopMenuBackgroudColor(UIColor(hexString: "#1F1E20"))
]
//init (testarrow)
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){
//dissmiss
self.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
}
return popMenu
}()
lazy var progressView: UIProgressView = {
var view = UIProgressView (progressViewStyle:.default)
view.frame = CGRect(x: 24, y: SCREEN_Height - KStatusBarHeight - 20, width: SCREEN_Width-48, height: 36)
view.progress = 0.0 //50%
return view
}()
lazy var tipsButton: UIButton = {
//76*56
let button = UIButton.init(type: UIButton.ButtonType.custom)
button.tag = 203
button.backgroundColor = .clear
button.addTarget(self, action: #selector(navgationButtonClick2(sender:)), for: UIControl.Event.touchUpInside)
let img2:UIImage = UIImage.init(named: "tips_button" as String)!
button.setImage(img2, for: UIControl.State.normal)
button.frame = CGRect(x: 0, y: 0, width: 130, height: 30)
button.titleLabel?.font = KFont_Regular(14)
button.titleLabel?.adjustsFontSizeToFitWidth = true
button.updateBtnEdgeInsets(style: .Left, space: 8)
// button.layer.cornerRadius = 18
// button.layer.masksToBounds = true
button.centerY = progressView.top - 70
button.centerX = self.view.centerX
return button
}()
//
@objc func notification_PlayerEndTime(notification:Notification){
print("PlayerEndTime....")
}
//
@objc func notification_PlayerTimeJump(notification:Notification){
// self.player?.play()
// Task {
// await self.loadVideoAssetReader()
// DispatchQueue.main.async {
// self.player?.play()
// }
// }
print("time jump....")
}
override func viewDidLoad() {
super.viewDidLoad()
Task {
print("ccs viewdid load ---------")
await self.loadVideoAssetReader()
}
let playerItem = self.getPlayerItem()
player = AVPlayer(playerItem: playerItem)
self.view.backgroundColor = UIColor(hexString: "#060507")
//
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
// self.setLeftBtnImg(imgStr1: "", imgStr2: "spatial_back_button")
self.setNavgationBarColorImg(color: .clear)
self.setNavgationBarLine(color: .clear)
self.view.addSubview(mTopImgView)
self.view.bringSubviewToFront(self.navtionBar!)
// navtionBar?.addSubview(backButton)
navtionBar?.addSubview(transformButton)
navtionBar?.addSubview(mTopCenterTypeButton)
// self.view.addSubview(progressView)
// playerLay = AVPlayerLayer(player: self.player)
// playerLay!.backgroundColor = UIColor.clear.cgColor
// playerLay!.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
playerController = AVPlayerViewController()
playerController?.player = player
playerController?.delegate = self
playerController!.view.backgroundColor = .clear
playerController?.view.frame = CGRect.init(x: 0, y: 170, width: self.view.frame.size.width, height: 400)
self.addChild(playerController!)
self.view.addSubview(playerController!.view)
self.view.addSubview(tipsButton)
// AirPlay
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
self.player!.play()
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerEndTime(notification:)), name: AVPlayerItem.didPlayToEndTimeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerTimeJump(notification:)), name: AVPlayerItem.timeJumpedNotification, object: nil)
}
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
//
temItem.videoComposition = AVVideoComposition(asset: temItem.asset) { [self] request in
let compositionTime = request.compositionTime
let end:CMTime = CMTimeMake(value: Int64(compositionTime.value+1), timescale: compositionTime.timescale)
let tr = CMTimeRange(start: compositionTime, end: end)
self.quickLoadAReader(timeRange: tr)
var ciImg:CIImage? = nil
switch self.selectedIndex {
case .crossedEyes://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .fsbs:
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .hsbs:
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .parallelEyes://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .monocular2D:
ciImg = request.sourceImage
break
case .redBlueSolid://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
}
if let ciImg {
request.finish(with: ciImg, context: nil)
}
}
return temItem
}
//assetReaderoutput
func loadVideoAssetReader() async {
do {
if self.assetTrack == nil {
assetTrack = try await self.videoOriginalAsset.loadTracks(withMediaType: .video).first!
}
let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
self.quickLoadAReader(timeRange: timeRange)
} catch {
print("Error loading video: \(error)")
}
}
func quickLoadAReader(timeRange:CMTimeRange) {
if(assetOutput != nil){
assetOutput?.markConfigurationAsFinal()
}
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = AVAssetReaderTrackOutput(
track: self.assetTrack!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetOutput?.supportsRandomAccess = true
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// AirPlay
checkAirPlayStatus()
}
//MARK: -
@objc private func airPlayStatusDidChange(_ notification: Notification) {
checkAirPlayStatus()
}
private func checkAirPlayStatus() {
print("设备连接变化")
let currentRoute = AVAudioSession.sharedInstance().currentRoute
self.isAirPlayActive = currentRoute.outputs.contains { output in
return output.portType == AVAudioSession.Port.HDMI ||
output.portType == AVAudioSession.Port.airPlay
}
}
func setttinisScreenMirroring(isScreenMirroring:Bool){
//
if(isScreenMirroring){
print("已连接")
link = true
isPlaying = true
playerController!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerController!.player!.allowsExternalPlayback = true
//
// mTopCenterTypeButton.setTitle("", for: UIControl.State.normal)
mTopCenterTypeButton.setImage(UIImage.init(named: "linked_button"), for: .normal)
mTopCenterTypeButton.setTitleColor(UIColor(hexString: "#D0C0FF"), for: .normal)
mTopCenterTypeButton.layer.borderColor = UIColor(hexString: "#D0C0FF").cgColor
mTopCenterTypeButton.backgroundColor = UIColor(hexString: "#5326D6")
mTopCenterTypeButton.updateBtnEdgeInsets(style: .Left, space: 5)
//
if isPlaying {
// ---
menuView.showFooterView(isShow: true, showText: "结束串流")
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal)
}else{
// ---
menuView.showFooterView(isShow: true, showText: "开始串流")
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal)
}
}else{
//
print("未连接")
link = false
isPlaying = false
// AirPlay
playerController!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerController!.player!.allowsExternalPlayback = false
if self.selectedIndex == .monocular2D {
mTopCenterTypeButton.setTitle("单眼2D", for: UIControl.State.normal)
}else if self.selectedIndex == .redBlueSolid {
mTopCenterTypeButton.setTitle("红蓝立体", for: UIControl.State.normal)
}else if self.selectedIndex == .crossedEyes {
mTopCenterTypeButton.setTitle("交叉眼", for: UIControl.State.normal)
}
else if self.selectedIndex == .parallelEyes {
mTopCenterTypeButton.setTitle("平行眼", for: UIControl.State.normal)
}
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
mTopCenterTypeButton.setTitleColor(UIColor.white, for: UIControl.State.normal)
mTopCenterTypeButton.layer.borderColor = UIColor.white.cgColor
mTopCenterTypeButton.backgroundColor = UIColor(hexString: "#1F1E20")
mTopCenterTypeButton.updateBtnEdgeInsets(style: .Right, space: 10)
//
menuView.showFooterView(isShow: false, showText: "")
}
}
deinit {
print("patialvideodisplaycontroler deinit......")
assetReader?.cancelReading()
NotificationCenter.default.removeObserver(self)
}
//
// func play(){
//
// let playerItem = AVPlayerItem(asset: videoOriginalAsset)
// playerLay.player = AVPlayer(playerItem: playerItem)
// playerLay.player!.play()
// playerLay.backgroundColor = UIColor.clear.cgColor
// }
//MARK: - action
@objc public func navgationButtonClick2(sender:UIButton){
if sender.tag == 200 {
//
}else if sender.tag == 201 {
//
let transVC = VRVideoTransformController()
transVC.videoOriginalPHAsset = self.videoOriginalPHAsset
transVC.sourceVideoURL = sourceVideoURL
self.navigationController?.pushViewController(transVC, animated: true)
}else if sender.tag == 202 {
//
menuView.show()
if link {
//
if isPlaying {
// ---
menuView.showFooterView(isShow: true, showText: "结束串流")
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal)
}else{
// ---
menuView.showFooterView(isShow: true, showText: "开始串流")
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal)
}
}else{
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)
}
//click
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
print("block select \(index)")
self?.selectedSpatialType(selectedIndex: index)
let title = self?.typeData[index].title
self?.mTopCenterTypeButton.setTitle(title, for: UIControl.State.normal)
}
//
menuView.tapFooterActionBlock = {
self.startOrEndExternalVR()
}
}else if sender.tag == 203 {
//tips
let view = CCDeviceOperationListView.init(frame: CGRectMake(0, 0, KScreenWidth, KScreenHeight))
KWindow?.addSubview(view)
}
}
func startOrEndExternalVR() {
///
if(link == true){
isPlaying = !isPlaying
if(isPlaying == true){
// AirPlay
playerController!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerController!.player!.allowsExternalPlayback = true
}else{
playerController!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerController!.player!.allowsExternalPlayback = false
}
}
}
func selectedSpatialType(selectedIndex:Int){
var si = selectedIndex
if self.typeData.count == 2 {
si = selectedIndex + 4
}
let rsi : SpatialType = SpatialType(rawValue: si) ?? .monocular2D
// player!.play()
self.selectedIndex = rsi
//
// if(selectedIndex == 1){
//// Task {
//// try await videoConverter.convertStereoscopicVideoToSpatialVideo(sourceVideoURL: sourceVideoURL!,outputVideoURL: outputVideoURL!){[weak self] progress in
//// print(progress)
//// DispatchQueue.main.async { [weak self] in
//// self!.btn3!.setTitle("=" + String(progress), for: UIControl.State.normal)
//// if(progress > 0.99){
//// self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
//// self!.play()
//// }
//// }
//// }
//// }
// }
//
// if(selectedIndex == 4){
// Task {
// convertor2.type = 4
//
// try await convertor2.convertVideo(asset: videoTempAsset!, outputFile: outputVideoURL! ) { [self] progress in
// print(progress)
// DispatchQueue.main.async { [weak self] in
// self?.progressView.setProgress(progress, animated: true)
// if(progress > 0.99){
// self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
// self!.play()
// }
// }
//
// }
// }
// }
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
//AVPlayerViewControllerDelegate
}