实现边转边播
This commit is contained in:
parent
ff75b1ec71
commit
51b3cd596f
@ -14,6 +14,7 @@
|
||||
00D33BF42B998BF700604A44 /* SpatialImageConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */; };
|
||||
00D33BF62B99A19900604A44 /* SpatialVideoConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */; };
|
||||
00D33BFA2B9AB21A00604A44 /* ZZHAVExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */; };
|
||||
00ED6B342BA04AC200915BDE /* PlayByTransferConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */; };
|
||||
04E1D3F12B68EDFE00743F2F /* CCWebController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04E1D3F02B68EDFE00743F2F /* CCWebController.swift */; };
|
||||
1E02C9322B8990C600DD3143 /* CCDeviceOperationListView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */; };
|
||||
1E02C9342B89916C00DD3143 /* CCDeviceOperationListCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */; };
|
||||
@ -99,6 +100,7 @@
|
||||
00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialImageConvertor.swift; sourceTree = "<group>"; };
|
||||
00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialVideoConvertor.swift; sourceTree = "<group>"; };
|
||||
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHAVExtension.swift; sourceTree = "<group>"; };
|
||||
00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayByTransferConvertor.swift; sourceTree = "<group>"; };
|
||||
04E1D3F02B68EDFE00743F2F /* CCWebController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCWebController.swift; sourceTree = "<group>"; };
|
||||
1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListView.swift; sourceTree = "<group>"; };
|
||||
1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListCell.swift; sourceTree = "<group>"; };
|
||||
@ -234,6 +236,7 @@
|
||||
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */,
|
||||
1EE5C5F92B8F97BF00EDFC2F /* SpatialVideoWriter.swift */,
|
||||
005580772B9F1525004B9567 /* ZZHHelper.swift */,
|
||||
00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */,
|
||||
);
|
||||
path = Util;
|
||||
sourceTree = "<group>";
|
||||
@ -673,6 +676,7 @@
|
||||
AF2120DA2B4E9BD400400B7F /* CCAlert.swift in Sources */,
|
||||
1EFB8C702B88DA4800C72119 /* CCBottomMenuCell.swift in Sources */,
|
||||
AF2120FA2B4EA5BD00400B7F /* CCHomeController.swift in Sources */,
|
||||
00ED6B342BA04AC200915BDE /* PlayByTransferConvertor.swift in Sources */,
|
||||
AF2121072B4EA63000400B7F /* CCHomeData.swift in Sources */,
|
||||
AF2120FE2B4EA5F100400B7F /* CCLoginController.swift in Sources */,
|
||||
AF2120CE2B4E979500400B7F /* CCTextField.swift in Sources */,
|
||||
|
||||
@ -131,7 +131,10 @@ class CCSpatialPhotoDisplayController: BaseController {
|
||||
|
||||
//init (test随机生成点位置,注意:arrow点是基于屏幕的位置)
|
||||
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
|
||||
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters)
|
||||
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){
|
||||
//dissmiss回调
|
||||
self.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||||
}
|
||||
return popMenu
|
||||
}()
|
||||
|
||||
|
||||
@ -8,8 +8,8 @@
|
||||
import UIKit
|
||||
import AVFoundation
|
||||
import Photos
|
||||
|
||||
|
||||
import AVKit
|
||||
import VideoToolbox
|
||||
enum SpatialType : Int {
|
||||
|
||||
/*
|
||||
@ -24,32 +24,70 @@ enum SpatialType : Int {
|
||||
case parallelEyes
|
||||
case redBlueSolid
|
||||
case crossedEyes
|
||||
//以下两种标记:外部串流时使用
|
||||
case hsbs
|
||||
case fsbs
|
||||
}
|
||||
|
||||
class CCSpatialVideoDisplayController: BaseController {
|
||||
|
||||
var isAirPlayActive:Bool = false {
|
||||
didSet{
|
||||
//跟新selectedIndex、typeData的值
|
||||
if(isAirPlayActive){
|
||||
self.selectedIndex = .hsbs//在串流模式下,默认选项为hsbs
|
||||
self.typeData = [(icon:"type_check",title:"3D HSBS",isHiden:false),
|
||||
(icon:"type_check",title:"3D FSBS",isHiden:false),
|
||||
]
|
||||
}
|
||||
else{
|
||||
self.selectedIndex = .monocular2D//在非串流模式下,默认选项为monocular2D
|
||||
self.typeData = [(icon:"type_check",title:"单眼2D",isHiden:false),
|
||||
(icon:"type_check",title:"平行眼",isHiden:false),
|
||||
(icon:"type_check",title:"红蓝立体",isHiden:false),
|
||||
(icon:"type_check",title:"交叉眼",isHiden:false)]
|
||||
}
|
||||
//更新串流UI
|
||||
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
|
||||
}
|
||||
}
|
||||
|
||||
var assetReader:AVAssetReader?
|
||||
|
||||
var assetOutput:AVAssetReaderTrackOutput?
|
||||
|
||||
|
||||
|
||||
var link = false//是否已连接设备
|
||||
var isPlaying = false//是否正在播放
|
||||
|
||||
var selectedIndex:SpatialType = .monocular2D//记录当前选择的菜单选项
|
||||
{
|
||||
didSet{
|
||||
//进行相应解码操作,边解边播
|
||||
}
|
||||
}
|
||||
|
||||
let convertor2 = VideoConvertor2()
|
||||
let videoTranserConvertor = PlayByTransferConvertor()
|
||||
|
||||
lazy var videoOriginalAsset:AVAsset = {
|
||||
let asset = AVAsset(url: sourceVideoURL!)
|
||||
return asset
|
||||
}()
|
||||
|
||||
var videoOriginalAsset:AVAsset?
|
||||
var videoTempAsset:AVAsset?
|
||||
var videoOriginalPHAsset:PHAsset?
|
||||
|
||||
//图片源数据
|
||||
var sourceVideoURL:URL?
|
||||
var outputVideoURL:URL?
|
||||
let outputVideoURL:URL = URL.documentsDirectory.appending(path:"output1111.mp4")
|
||||
|
||||
|
||||
var imgData:Data?
|
||||
//空间视频 交叉眼 红蓝立体 平行眼 高斯模糊
|
||||
var type = 0
|
||||
|
||||
var player:AVPlayer = AVPlayer()
|
||||
var player:AVPlayer?
|
||||
|
||||
var playerLay:AVPlayerLayer?
|
||||
|
||||
|
||||
|
||||
lazy var mTopImgView:UIImageView = {
|
||||
@ -102,26 +140,19 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
return button
|
||||
}()
|
||||
|
||||
lazy var playerLay:AVPlayerLayer = {
|
||||
|
||||
let view = AVPlayerLayer()
|
||||
view.backgroundColor = UIColor.black.cgColor
|
||||
view.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
|
||||
return view
|
||||
}()
|
||||
|
||||
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"单眼2D",isHiden:false),
|
||||
(icon:"type_check",title:"平行眼",isHiden:false),
|
||||
(icon:"type_check",title:"红蓝立体",isHiden:false),
|
||||
(icon:"type_check",title:"交叉眼",isHiden:false)]
|
||||
{
|
||||
didSet {
|
||||
menuView.setData(datas: self.typeData)
|
||||
}
|
||||
}
|
||||
|
||||
lazy var menuView: CCSpatialDisplayTypeView = {
|
||||
//数据源(icon可不填)
|
||||
// let popData = [(icon:"type_check",title:"单眼2D",isHiden:false),
|
||||
// (icon:"type_check",title:"平行眼",isHiden:false),
|
||||
// (icon:"type_check",title:"红蓝立体",isHiden:false),
|
||||
// (icon:"type_check",title:"交叉眼",isHiden:false)]
|
||||
|
||||
//设置参数
|
||||
let parameters:[CCSpatialDisplayTypeConfigure] = [
|
||||
.PopMenuTextColor(UIColor.white),
|
||||
@ -133,7 +164,10 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
|
||||
//init (test随机生成点位置,注意:arrow点是基于屏幕的位置)
|
||||
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
|
||||
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters)
|
||||
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){
|
||||
//dissmiss回调
|
||||
self.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||||
}
|
||||
return popMenu
|
||||
}()
|
||||
|
||||
@ -171,15 +205,14 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
Task {
|
||||
await self.loadVideoAssetReader()
|
||||
}
|
||||
|
||||
let playerItem = self.getPlayerItem()
|
||||
player = AVPlayer(playerItem: playerItem)
|
||||
|
||||
self.view.backgroundColor = UIColor(hexString: "#060507")
|
||||
// Do any additional setup after loading the view.
|
||||
|
||||
|
||||
|
||||
// let path = Bundle.main.path(forResource: "img3", ofType: "HEIC")
|
||||
// photoOriginalURL = URL.init(filePath: path!)
|
||||
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
|
||||
|
||||
//设置返回按钮图片
|
||||
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
|
||||
@ -193,24 +226,76 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
navtionBar?.addSubview(transformButton)
|
||||
navtionBar?.addSubview(mTopCenterTypeButton)
|
||||
self.view.addSubview(progressView)
|
||||
self.view.layer.addSublayer(playerLay)
|
||||
|
||||
playerLay = AVPlayerLayer(player: self.player)
|
||||
playerLay!.backgroundColor = UIColor.clear.cgColor
|
||||
playerLay!.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
|
||||
|
||||
self.view.layer.addSublayer(playerLay!)
|
||||
self.view.addSubview(tipsButton)
|
||||
|
||||
|
||||
|
||||
if sourceVideoURL != nil {
|
||||
outputVideoURL = URL.documentsDirectory.appending(path:"output1111.mp4")
|
||||
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
|
||||
videoTempAsset = videoOriginalAsset
|
||||
play()
|
||||
}else{
|
||||
print("这不是一张空间图片")
|
||||
}
|
||||
|
||||
// 监听 AirPlay 设备的连接状态
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
|
||||
self.player!.play()
|
||||
}
|
||||
|
||||
func getPlayerItem() -> AVPlayerItem {
|
||||
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
|
||||
//实时播放回调可以做滤镜操作
|
||||
temItem.videoComposition = AVVideoComposition(asset: temItem.asset) { [self] request in
|
||||
let compositionTime = request.compositionTime
|
||||
var ciImg:CIImage? = nil
|
||||
switch self.selectedIndex {
|
||||
case .crossedEyes://交叉眼
|
||||
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
|
||||
break
|
||||
case .fsbs:
|
||||
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
|
||||
break
|
||||
case .hsbs:
|
||||
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
|
||||
break
|
||||
case .parallelEyes://平行眼
|
||||
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
|
||||
break
|
||||
case .monocular2D:
|
||||
ciImg = request.sourceImage
|
||||
break
|
||||
case .redBlueSolid://红蓝立体
|
||||
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
|
||||
break
|
||||
|
||||
}
|
||||
// if let ciImg {
|
||||
request.finish(with: ciImg!, context: nil)
|
||||
// }
|
||||
}
|
||||
return temItem
|
||||
}
|
||||
|
||||
//加载assetReader和output
|
||||
func loadVideoAssetReader() async {
|
||||
do {
|
||||
if(assetReader != nil && assetReader!.status == .reading){
|
||||
assetReader?.cancelReading()
|
||||
}
|
||||
assetReader = try AVAssetReader(asset: self.videoOriginalAsset)
|
||||
assetOutput = try await AVAssetReaderTrackOutput(
|
||||
track: self.videoOriginalAsset.loadTracks(withMediaType: .video).first!,
|
||||
outputSettings: [
|
||||
AVVideoDecompressionPropertiesKey: [
|
||||
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
|
||||
],
|
||||
]
|
||||
)
|
||||
assetReader!.timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
|
||||
assetReader!.add(assetOutput!)
|
||||
assetReader!.startReading()
|
||||
} catch {
|
||||
print("Error loading video: \(error)")
|
||||
}
|
||||
}
|
||||
|
||||
override func viewDidAppear(_ animated: Bool) {
|
||||
@ -227,12 +312,10 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
private func checkAirPlayStatus() {
|
||||
print("设备连接变化")
|
||||
let currentRoute = AVAudioSession.sharedInstance().currentRoute
|
||||
let isAirPlayActive = currentRoute.outputs.contains { output in
|
||||
self.isAirPlayActive = currentRoute.outputs.contains { output in
|
||||
return output.portType == AVAudioSession.Port.HDMI ||
|
||||
output.portType == AVAudioSession.Port.airPlay
|
||||
}
|
||||
|
||||
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
|
||||
}
|
||||
|
||||
func setttinisScreenMirroring(isScreenMirroring:Bool){
|
||||
@ -242,8 +325,8 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
print("已连接")
|
||||
link = true
|
||||
isPlaying = true
|
||||
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
|
||||
playerLay.player!.allowsExternalPlayback = true
|
||||
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
|
||||
playerLay!.player!.allowsExternalPlayback = true
|
||||
|
||||
//串流播放中
|
||||
// mTopCenterTypeButton.setTitle("串流播放中", for: UIControl.State.normal)
|
||||
@ -254,8 +337,6 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
mTopCenterTypeButton.backgroundColor = UIColor(hexString: "#5326D6")
|
||||
mTopCenterTypeButton.updateBtnEdgeInsets(style: .Left, space: 5)
|
||||
|
||||
playerLay.player!.play()
|
||||
|
||||
|
||||
//展示弹出列表尾部
|
||||
if isPlaying {
|
||||
@ -275,8 +356,8 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
link = false
|
||||
isPlaying = false
|
||||
// 当前未连接到 AirPlay 设备
|
||||
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
playerLay.player!.allowsExternalPlayback = false
|
||||
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
playerLay!.player!.allowsExternalPlayback = false
|
||||
|
||||
if self.selectedIndex == .monocular2D {
|
||||
mTopCenterTypeButton.setTitle("单眼2D", for: UIControl.State.normal)
|
||||
@ -307,13 +388,13 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
|
||||
|
||||
//播放
|
||||
func play(){
|
||||
|
||||
let playerItem = AVPlayerItem(asset: videoTempAsset!)
|
||||
playerLay.player = AVPlayer(playerItem: playerItem)
|
||||
playerLay.player!.play()
|
||||
playerLay.backgroundColor = UIColor.clear.cgColor
|
||||
}
|
||||
// func play(){
|
||||
//
|
||||
// let playerItem = AVPlayerItem(asset: videoOriginalAsset)
|
||||
// playerLay.player = AVPlayer(playerItem: playerItem)
|
||||
// playerLay.player!.play()
|
||||
// playerLay.backgroundColor = UIColor.clear.cgColor
|
||||
// }
|
||||
|
||||
|
||||
//MARK: - action
|
||||
@ -347,17 +428,16 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
}
|
||||
|
||||
|
||||
//click
|
||||
//顶部中间的按钮选中某个选项后的回调click
|
||||
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
|
||||
print("block select \(index)")
|
||||
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
|
||||
self?.selectedSpatialType(selectedIndex: index)
|
||||
|
||||
let title = self?.typeData[index].title
|
||||
self?.mTopCenterTypeButton.setTitle(title, for: UIControl.State.normal)
|
||||
}
|
||||
|
||||
//tap
|
||||
//开始、结束串流的回调
|
||||
menuView.tapFooterActionBlock = {
|
||||
self.startOrEndExternalVR()
|
||||
}
|
||||
@ -371,105 +451,24 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
|
||||
func startOrEndExternalVR() {
|
||||
//开始串流/结束串流
|
||||
if(link == true){
|
||||
isPlaying = !isPlaying
|
||||
if(isPlaying == true){
|
||||
// 当前已连接到 AirPlay 设备
|
||||
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
|
||||
playerLay.player!.allowsExternalPlayback = true
|
||||
}else{
|
||||
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
playerLay.player!.allowsExternalPlayback = false
|
||||
if(link == true){
|
||||
isPlaying = !isPlaying
|
||||
if(isPlaying == true){
|
||||
// 当前已连接到 AirPlay 设备
|
||||
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
|
||||
playerLay!.player!.allowsExternalPlayback = true
|
||||
}else{
|
||||
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
|
||||
playerLay!.player!.allowsExternalPlayback = false
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func selectedSpatialType(selectedIndex:Int) {
|
||||
|
||||
// if selectedIndex == 0 {
|
||||
// self.selectedIndex = .monocular2D
|
||||
// }else if selectedIndex == 1 {
|
||||
// self.selectedIndex = .redBlueSolid
|
||||
// }else if selectedIndex == 2 {
|
||||
// self.selectedIndex = .crossedEyes
|
||||
// }
|
||||
|
||||
self.selectedIndex = SpatialType(rawValue: selectedIndex) ?? .monocular2D
|
||||
|
||||
player.pause()
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
|
||||
//立体视频
|
||||
if(selectedIndex == 0){
|
||||
videoTempAsset = videoOriginalAsset
|
||||
play()
|
||||
}
|
||||
else{
|
||||
outputVideoURL = URL.documentsDirectory.appending(path:"output11112.mp4")
|
||||
}
|
||||
|
||||
//红蓝立体
|
||||
if(self.selectedIndex == .redBlueSolid){
|
||||
Task {
|
||||
convertor2.type = 3
|
||||
|
||||
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
|
||||
print(progress)
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.progressView.setProgress(progress, animated: true)
|
||||
if(progress > 0.99){
|
||||
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
|
||||
// 要执行的任务
|
||||
self!.play()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//交叉眼
|
||||
if(self.selectedIndex == .crossedEyes){
|
||||
Task {
|
||||
convertor2.type = 2
|
||||
|
||||
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
|
||||
print(progress)
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.progressView.setProgress(progress, animated: true)
|
||||
if(progress > 0.99){
|
||||
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
|
||||
self!.play()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//平行眼
|
||||
if(self.selectedIndex == .parallelEyes){
|
||||
Task {
|
||||
convertor2.type = 5
|
||||
|
||||
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
|
||||
print(progress)
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.progressView.setProgress(progress, animated: true)
|
||||
if(progress > 0.99){
|
||||
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
|
||||
self!.play()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func selectedSpatialType(selectedIndex:Int){
|
||||
let rsi : SpatialType = SpatialType(rawValue: selectedIndex) ?? .monocular2D
|
||||
// player!.play()
|
||||
self.selectedIndex = rsi
|
||||
|
||||
|
||||
|
||||
@ -511,29 +510,6 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
}
|
||||
|
||||
|
||||
|
||||
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
|
||||
|
||||
|
||||
if let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String, mediaType == "public.movie" {
|
||||
let videoURL = info[.mediaURL] as? URL
|
||||
print("Selected video URL: \(videoURL)")
|
||||
sourceVideoURL = videoURL
|
||||
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
|
||||
videoTempAsset = videoOriginalAsset
|
||||
if(!isSpatialVideo(asset: videoTempAsset!)){
|
||||
showTextAlert(title: "提示", message: "当前视频不是空间视频")
|
||||
}
|
||||
play()
|
||||
}
|
||||
|
||||
dismiss(animated: true, completion: nil)
|
||||
}
|
||||
|
||||
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
|
||||
dismiss(animated: true, completion: nil)
|
||||
}
|
||||
|
||||
//检查是否为空间视频
|
||||
func isSpatialVideo(asset: AVAsset) -> Bool {
|
||||
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
|
||||
@ -546,23 +522,4 @@ class CCSpatialVideoDisplayController: BaseController {
|
||||
return isSpatialVideo
|
||||
}
|
||||
|
||||
func showTextAlert(title: String, message: String) {
|
||||
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
|
||||
let okAction = UIAlertAction(title: "OK", style: .default, handler: nil)
|
||||
alertController.addAction(okAction)
|
||||
|
||||
// 在视图控制器中显示弹窗
|
||||
present(alertController, animated: true, completion: nil)
|
||||
}
|
||||
|
||||
/*
|
||||
// MARK: - Navigation
|
||||
|
||||
// In a storyboard-based application, you will often want to do a little preparation before navigation
|
||||
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
|
||||
// Get the new view controller using segue.destination.
|
||||
// Pass the selected object to the new view controller.
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,166 @@
|
||||
//
|
||||
// PlayByTransferConvertor.swift
|
||||
// SwiftProject
|
||||
//
|
||||
// Created by aaa on 2024/3/12.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import AVKit
|
||||
import VideoToolbox
|
||||
import CoreImage
|
||||
import ImageIO
|
||||
|
||||
|
||||
|
||||
class PlayByTransferConvertor {
|
||||
func convertVideo(asset:AVAsset, assetOutput:AVAssetReaderTrackOutput,type:SpatialType,time: CMTime)->(CIImage?) {
|
||||
var newpb:CIImage? = nil
|
||||
while let nextSampleBuffer = assetOutput.copyNextSampleBuffer() {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
|
||||
if presentationTime == time {
|
||||
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { break }
|
||||
|
||||
let leftEyeBuffer = taggedBuffers.first(where: {
|
||||
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
|
||||
})?.buffer
|
||||
let rightEyeBuffer = taggedBuffers.first(where: {
|
||||
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
|
||||
})?.buffer
|
||||
|
||||
|
||||
if let leftEyeBuffer,
|
||||
let rightEyeBuffer,
|
||||
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
|
||||
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
|
||||
|
||||
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
|
||||
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
|
||||
|
||||
let left = UIImage(ciImage: lciImage )
|
||||
let right = UIImage(ciImage: rciImage )
|
||||
|
||||
|
||||
var cwidth:CGFloat
|
||||
var cheight:CGFloat
|
||||
switch type {
|
||||
case .hsbs:
|
||||
cwidth = left.size.width
|
||||
cheight = left.size.height
|
||||
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight:cheight )
|
||||
break
|
||||
case .fsbs:
|
||||
cwidth = left.size.width*2
|
||||
cheight = left.size.height
|
||||
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight: cheight)
|
||||
break
|
||||
case .parallelEyes://平行眼
|
||||
|
||||
newpb = joinImages(leftImage: lciImage, rightImage: rciImage)
|
||||
break
|
||||
case .crossedEyes://交叉眼
|
||||
newpb = joinImages(leftImage: rciImage, rightImage: lciImage)
|
||||
break
|
||||
case .redBlueSolid://红蓝立体
|
||||
newpb = joinImages_red_blue(lciImage: lciImage, rciImage: rciImage)
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return newpb
|
||||
}
|
||||
|
||||
|
||||
//合成红蓝立体图片
|
||||
func joinImages_red_blue(lciImage:CIImage,rciImage:CIImage) -> CIImage {
|
||||
// 创建红色和蓝色滤镜
|
||||
let redColorMatrix: [CGFloat] = [
|
||||
0.0, 0.0, 0.0, 0.0, 0.0, // 红色通道
|
||||
0.0, 0.0, 0.0, 0.0, 0.0, // 绿色通道
|
||||
0.0, 0.0, 1.0, 0.0, 0.0, // 蓝色通道
|
||||
0.0, 0.0, 0.0, 1.0, 0.0 // 透明通道
|
||||
]
|
||||
|
||||
let blueColorMatrix: [CGFloat] = [
|
||||
1.0, 0.0, 0.0, 0.0, 0.0, // 红色通道
|
||||
0.0, 0.0, 0.0, 0.0, 0.0, // 绿色通道
|
||||
0.0, 0.0, 0.0, 0.0, 0.0, // 蓝色通道
|
||||
0.0, 0.0, 0.0, 1.0, 0.0 // 透明通道
|
||||
]
|
||||
|
||||
let redFilter = CIFilter(name: "CIColorMatrix")!
|
||||
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
|
||||
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
|
||||
|
||||
let blueFilter = CIFilter(name: "CIColorMatrix")!
|
||||
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
|
||||
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
|
||||
var lastImg:CIImage? = nil
|
||||
// 获取处理后的图像
|
||||
if let redOutputImage = redFilter.outputImage,
|
||||
let blueOutputImage = blueFilter.outputImage {
|
||||
|
||||
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
|
||||
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
|
||||
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
|
||||
|
||||
let sharpenedFilter = CIFilter(name: "CISharpenLuminance")!
|
||||
sharpenedFilter.setValue(compositeFilter.outputImage, forKey: kCIInputImageKey)
|
||||
sharpenedFilter.setValue(2, forKey: kCIInputSharpnessKey)
|
||||
lastImg = sharpenedFilter.outputImage!
|
||||
}
|
||||
return lastImg!
|
||||
}
|
||||
|
||||
|
||||
//将两张图片合成一张图片 SBS
|
||||
func joinImages_sbs( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
|
||||
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
|
||||
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
|
||||
left.draw(in: CGRect(x:0, y:0, width:imgWidth/2, height:imgHeight))
|
||||
right.draw(in: CGRect(x:imgWidth/2, y:0, width:imgWidth/2, height:imgHeight))
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()!
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
|
||||
let ci = CIImage(cgImage: image.cgImage!)
|
||||
return ci
|
||||
}
|
||||
|
||||
//将两张图片合成一张图片 OU
|
||||
func joinImages_ou( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
|
||||
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
|
||||
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
|
||||
left.draw(in: CGRect(x:0, y:0, width:imgWidth, height:imgHeight/2))
|
||||
right.draw(in: CGRect(x:0, y:imgHeight/2, width:imgWidth, height:imgHeight/2))
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()!
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
let ci = CIImage(cgImage: image.cgImage!)
|
||||
return ci
|
||||
}
|
||||
|
||||
//将两张图片合成一张图片
|
||||
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
|
||||
let left = UIImage(ciImage: leftImage )
|
||||
let right = UIImage(ciImage: rightImage )
|
||||
|
||||
let imageWidth = left.size.width/2 + right.size.width/2
|
||||
let imageHeight = left.size.height/2
|
||||
|
||||
let newImageSize = CGSize(width:imageWidth, height: left.size.height);
|
||||
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
|
||||
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
|
||||
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()!
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
let ci = CIImage(cgImage: image.cgImage!)
|
||||
return ci
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,6 +75,10 @@ public class CCSpatialDisplayTypeView: UIView {
|
||||
|
||||
var tableView:UITableView! = nil
|
||||
|
||||
var isShowing:Bool = false//是否为显示状态
|
||||
var dissMissCallback:()->Void = {
|
||||
print("请初始化回调")
|
||||
}
|
||||
|
||||
lazy var showBottomView: UIView = {
|
||||
var view = UIView()
|
||||
@ -110,9 +114,11 @@ public class CCSpatialDisplayTypeView: UIView {
|
||||
/// - arrow: 箭头位置是popmenu相对整个屏幕的位置
|
||||
/// - datas: 数据源,icon允许传空,数据源没数据,不会显示菜单
|
||||
/// - configure: 配置信息,可不传
|
||||
init(menuWidth:CGFloat,arrow:CGPoint,datas:[(icon:String,title:String,isHiden:Bool)],configures:[CCSpatialDisplayTypeConfigure] = []) {
|
||||
init(menuWidth:CGFloat,arrow:CGPoint,datas:[(icon:String,title:String,isHiden:Bool)],configures:[CCSpatialDisplayTypeConfigure] = [],dissMissCallback:@escaping ()->Void) {
|
||||
super.init(frame: UIScreen.main.bounds)
|
||||
self.frame = UIScreen.main.bounds
|
||||
self.dissMissCallback=dissMissCallback
|
||||
|
||||
//读取配置
|
||||
configures.forEach { (config) in
|
||||
switch (config){
|
||||
@ -158,7 +164,9 @@ public class CCSpatialDisplayTypeView: UIView {
|
||||
//MARK: - 设置数据源
|
||||
func setData(datas:[(icon:String,title:String,isHiden:Bool)]) {
|
||||
popData = datas
|
||||
tableView.reloadData()
|
||||
if(isShowing){
|
||||
tableView.reloadData()
|
||||
}
|
||||
}
|
||||
|
||||
//MARK: - 设置是否展示Footer
|
||||
@ -282,6 +290,7 @@ extension CCSpatialDisplayTypeView{
|
||||
}
|
||||
|
||||
public func show() {
|
||||
isShowing = true
|
||||
if popData.isEmpty{
|
||||
return
|
||||
}
|
||||
@ -290,6 +299,8 @@ extension CCSpatialDisplayTypeView{
|
||||
}
|
||||
|
||||
public func dismiss() {
|
||||
isShowing = false
|
||||
self.dissMissCallback()
|
||||
self.removeFromSuperview()
|
||||
}
|
||||
|
||||
|
||||
@ -494,7 +494,7 @@
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = 2RAN5PZH5L;
|
||||
DEVELOPMENT_TEAM = 8DQD6BV6H9;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_FILE = tdvideo/Info.plist;
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";
|
||||
@ -534,7 +534,7 @@
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = 2RAN5PZH5L;
|
||||
DEVELOPMENT_TEAM = 8DQD6BV6H9;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_FILE = tdvideo/Info.plist;
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";
|
||||
|
||||
Binary file not shown.
@ -93,15 +93,15 @@ class PlayContoller8: UIViewController {
|
||||
view.layer.addSublayer(playerLayer!)
|
||||
player?.play()
|
||||
// 循环播放设置
|
||||
player!.actionAtItemEnd = .none
|
||||
// 监听播放结束的通知
|
||||
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
|
||||
Task {
|
||||
await loadVideo()
|
||||
}
|
||||
player?.seek(to: .zero)
|
||||
player?.play()
|
||||
}
|
||||
// player!.actionAtItemEnd = .none
|
||||
// // 监听播放结束的通知
|
||||
// NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
|
||||
// Task {
|
||||
// await loadVideo()
|
||||
// }
|
||||
// player?.seek(to: .zero)
|
||||
// player?.play()
|
||||
// }
|
||||
|
||||
let segmentedControl = UISegmentedControl(items: ["空间视频", "红蓝立体"])
|
||||
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user