实现边转边播

This commit is contained in:
bluesea 2024-03-12 19:21:07 +08:00
parent ff75b1ec71
commit 51b3cd596f
8 changed files with 356 additions and 215 deletions

View File

@ -14,6 +14,7 @@
00D33BF42B998BF700604A44 /* SpatialImageConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */; };
00D33BF62B99A19900604A44 /* SpatialVideoConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */; };
00D33BFA2B9AB21A00604A44 /* ZZHAVExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */; };
00ED6B342BA04AC200915BDE /* PlayByTransferConvertor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */; };
04E1D3F12B68EDFE00743F2F /* CCWebController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 04E1D3F02B68EDFE00743F2F /* CCWebController.swift */; };
1E02C9322B8990C600DD3143 /* CCDeviceOperationListView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */; };
1E02C9342B89916C00DD3143 /* CCDeviceOperationListCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */; };
@ -99,6 +100,7 @@
00D33BF32B998BF700604A44 /* SpatialImageConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialImageConvertor.swift; sourceTree = "<group>"; };
00D33BF52B99A19900604A44 /* SpatialVideoConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpatialVideoConvertor.swift; sourceTree = "<group>"; };
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHAVExtension.swift; sourceTree = "<group>"; };
00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayByTransferConvertor.swift; sourceTree = "<group>"; };
04E1D3F02B68EDFE00743F2F /* CCWebController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCWebController.swift; sourceTree = "<group>"; };
1E02C9312B8990C600DD3143 /* CCDeviceOperationListView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListView.swift; sourceTree = "<group>"; };
1E02C9332B89916C00DD3143 /* CCDeviceOperationListCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCDeviceOperationListCell.swift; sourceTree = "<group>"; };
@ -234,6 +236,7 @@
00D33BF92B9AB21A00604A44 /* ZZHAVExtension.swift */,
1EE5C5F92B8F97BF00EDFC2F /* SpatialVideoWriter.swift */,
005580772B9F1525004B9567 /* ZZHHelper.swift */,
00ED6B332BA04AC200915BDE /* PlayByTransferConvertor.swift */,
);
path = Util;
sourceTree = "<group>";
@ -673,6 +676,7 @@
AF2120DA2B4E9BD400400B7F /* CCAlert.swift in Sources */,
1EFB8C702B88DA4800C72119 /* CCBottomMenuCell.swift in Sources */,
AF2120FA2B4EA5BD00400B7F /* CCHomeController.swift in Sources */,
00ED6B342BA04AC200915BDE /* PlayByTransferConvertor.swift in Sources */,
AF2121072B4EA63000400B7F /* CCHomeData.swift in Sources */,
AF2120FE2B4EA5F100400B7F /* CCLoginController.swift in Sources */,
AF2120CE2B4E979500400B7F /* CCTextField.swift in Sources */,

View File

@ -131,7 +131,10 @@ class CCSpatialPhotoDisplayController: BaseController {
//init (testarrow)
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){
//dissmiss
self.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
}
return popMenu
}()

View File

@ -8,8 +8,8 @@
import UIKit
import AVFoundation
import Photos
import AVKit
import VideoToolbox
enum SpatialType : Int {
/*
@ -24,32 +24,70 @@ enum SpatialType : Int {
case parallelEyes
case redBlueSolid
case crossedEyes
//使
case hsbs
case fsbs
}
class CCSpatialVideoDisplayController: BaseController {
var isAirPlayActive:Bool = false {
didSet{
//selectedIndextypeData
if(isAirPlayActive){
self.selectedIndex = .hsbs//hsbs
self.typeData = [(icon:"type_check",title:"3D HSBS",isHiden:false),
(icon:"type_check",title:"3D FSBS",isHiden:false),
]
}
else{
self.selectedIndex = .monocular2D//monocular2D
self.typeData = [(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
}
//UI
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
}
}
var assetReader:AVAssetReader?
var assetOutput:AVAssetReaderTrackOutput?
var link = false//
var isPlaying = false//
var selectedIndex:SpatialType = .monocular2D//
{
didSet{
//
}
}
let convertor2 = VideoConvertor2()
let videoTranserConvertor = PlayByTransferConvertor()
lazy var videoOriginalAsset:AVAsset = {
let asset = AVAsset(url: sourceVideoURL!)
return asset
}()
var videoOriginalAsset:AVAsset?
var videoTempAsset:AVAsset?
var videoOriginalPHAsset:PHAsset?
//
var sourceVideoURL:URL?
var outputVideoURL:URL?
let outputVideoURL:URL = URL.documentsDirectory.appending(path:"output1111.mp4")
var imgData:Data?
//
var type = 0
var player:AVPlayer = AVPlayer()
var player:AVPlayer?
var playerLay:AVPlayerLayer?
lazy var mTopImgView:UIImageView = {
@ -102,26 +140,19 @@ class CCSpatialVideoDisplayController: BaseController {
return button
}()
lazy var playerLay:AVPlayerLayer = {
let view = AVPlayerLayer()
view.backgroundColor = UIColor.black.cgColor
view.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
return view
}()
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"单眼2D",isHiden:false),
(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
{
didSet {
menuView.setData(datas: self.typeData)
}
}
lazy var menuView: CCSpatialDisplayTypeView = {
//icon
// let popData = [(icon:"type_check",title:"2D",isHiden:false),
// (icon:"type_check",title:"",isHiden:false),
// (icon:"type_check",title:"",isHiden:false),
// (icon:"type_check",title:"",isHiden:false)]
//
let parameters:[CCSpatialDisplayTypeConfigure] = [
.PopMenuTextColor(UIColor.white),
@ -133,7 +164,10 @@ class CCSpatialVideoDisplayController: BaseController {
//init (testarrow)
let pointOnScreen = navtionImgView!.convert(CGPointMake(navtionImgView!.centerX, navtionImgView!.bottom), to: KWindow)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters)
let popMenu = CCSpatialDisplayTypeView(menuWidth: SCREEN_Width * 0.4, arrow: pointOnScreen, datas: typeData,configures: parameters){
//dissmiss
self.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
}
return popMenu
}()
@ -171,15 +205,14 @@ class CCSpatialVideoDisplayController: BaseController {
override func viewDidLoad() {
super.viewDidLoad()
Task {
await self.loadVideoAssetReader()
}
let playerItem = self.getPlayerItem()
player = AVPlayer(playerItem: playerItem)
self.view.backgroundColor = UIColor(hexString: "#060507")
// Do any additional setup after loading the view.
// let path = Bundle.main.path(forResource: "img3", ofType: "HEIC")
// photoOriginalURL = URL.init(filePath: path!)
outputVideoURL = URL.documentsDirectory.appending(path:"output11114.jpg")
//
self.setLeftOneBtnImg(imgStr: "spatial_back_button")
@ -193,25 +226,77 @@ class CCSpatialVideoDisplayController: BaseController {
navtionBar?.addSubview(transformButton)
navtionBar?.addSubview(mTopCenterTypeButton)
self.view.addSubview(progressView)
self.view.layer.addSublayer(playerLay)
playerLay = AVPlayerLayer(player: self.player)
playerLay!.backgroundColor = UIColor.clear.cgColor
playerLay!.frame = CGRect.init(x: 0, y: 250, width: self.view.frame.size.width, height: 240)
self.view.layer.addSublayer(playerLay!)
self.view.addSubview(tipsButton)
if sourceVideoURL != nil {
outputVideoURL = URL.documentsDirectory.appending(path:"output1111.mp4")
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
videoTempAsset = videoOriginalAsset
play()
}else{
print("这不是一张空间图片")
}
// AirPlay
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
self.player!.play()
}
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
//
temItem.videoComposition = AVVideoComposition(asset: temItem.asset) { [self] request in
let compositionTime = request.compositionTime
var ciImg:CIImage? = nil
switch self.selectedIndex {
case .crossedEyes://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .fsbs:
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .hsbs:
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .parallelEyes://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
case .monocular2D:
ciImg = request.sourceImage
break
case .redBlueSolid://
ciImg = videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: self.assetOutput!, type: self.selectedIndex, time: compositionTime)
break
}
// if let ciImg {
request.finish(with: ciImg!, context: nil)
// }
}
return temItem
}
//assetReaderoutput
func loadVideoAssetReader() async {
do {
if(assetReader != nil && assetReader!.status == .reading){
assetReader?.cancelReading()
}
assetReader = try AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = try await AVAssetReaderTrackOutput(
track: self.videoOriginalAsset.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader!.timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
assetReader!.add(assetOutput!)
assetReader!.startReading()
} catch {
print("Error loading video: \(error)")
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
@ -227,12 +312,10 @@ class CCSpatialVideoDisplayController: BaseController {
private func checkAirPlayStatus() {
print("设备连接变化")
let currentRoute = AVAudioSession.sharedInstance().currentRoute
let isAirPlayActive = currentRoute.outputs.contains { output in
self.isAirPlayActive = currentRoute.outputs.contains { output in
return output.portType == AVAudioSession.Port.HDMI ||
output.portType == AVAudioSession.Port.airPlay
}
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
}
func setttinisScreenMirroring(isScreenMirroring:Bool){
@ -242,8 +325,8 @@ class CCSpatialVideoDisplayController: BaseController {
print("已连接")
link = true
isPlaying = true
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerLay.player!.allowsExternalPlayback = true
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerLay!.player!.allowsExternalPlayback = true
//
// mTopCenterTypeButton.setTitle("", for: UIControl.State.normal)
@ -254,8 +337,6 @@ class CCSpatialVideoDisplayController: BaseController {
mTopCenterTypeButton.backgroundColor = UIColor(hexString: "#5326D6")
mTopCenterTypeButton.updateBtnEdgeInsets(style: .Left, space: 5)
playerLay.player!.play()
//
if isPlaying {
@ -275,8 +356,8 @@ class CCSpatialVideoDisplayController: BaseController {
link = false
isPlaying = false
// AirPlay
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerLay.player!.allowsExternalPlayback = false
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerLay!.player!.allowsExternalPlayback = false
if self.selectedIndex == .monocular2D {
mTopCenterTypeButton.setTitle("单眼2D", for: UIControl.State.normal)
@ -307,13 +388,13 @@ class CCSpatialVideoDisplayController: BaseController {
//
func play(){
let playerItem = AVPlayerItem(asset: videoTempAsset!)
playerLay.player = AVPlayer(playerItem: playerItem)
playerLay.player!.play()
playerLay.backgroundColor = UIColor.clear.cgColor
}
// func play(){
//
// let playerItem = AVPlayerItem(asset: videoOriginalAsset)
// playerLay.player = AVPlayer(playerItem: playerItem)
// playerLay.player!.play()
// playerLay.backgroundColor = UIColor.clear.cgColor
// }
//MARK: - action
@ -347,17 +428,16 @@ class CCSpatialVideoDisplayController: BaseController {
}
//click
//click
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
print("block select \(index)")
self?.mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
self?.selectedSpatialType(selectedIndex: index)
let title = self?.typeData[index].title
self?.mTopCenterTypeButton.setTitle(title, for: UIControl.State.normal)
}
//tap
//
menuView.tapFooterActionBlock = {
self.startOrEndExternalVR()
}
@ -375,101 +455,20 @@ class CCSpatialVideoDisplayController: BaseController {
isPlaying = !isPlaying
if(isPlaying == true){
// AirPlay
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerLay.player!.allowsExternalPlayback = true
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = true
playerLay!.player!.allowsExternalPlayback = true
}else{
playerLay.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerLay.player!.allowsExternalPlayback = false
}
}
}
func selectedSpatialType(selectedIndex:Int) {
// if selectedIndex == 0 {
// self.selectedIndex = .monocular2D
// }else if selectedIndex == 1 {
// self.selectedIndex = .redBlueSolid
// }else if selectedIndex == 2 {
// self.selectedIndex = .crossedEyes
// }
self.selectedIndex = SpatialType(rawValue: selectedIndex) ?? .monocular2D
player.pause()
NotificationCenter.default.removeObserver(self)
//
if(selectedIndex == 0){
videoTempAsset = videoOriginalAsset
play()
}
else{
outputVideoURL = URL.documentsDirectory.appending(path:"output11112.mp4")
}
//
if(self.selectedIndex == .redBlueSolid){
Task {
convertor2.type = 3
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self?.progressView.setProgress(progress, animated: true)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
//
self!.play()
}
}
}
}
}
}
//
if(self.selectedIndex == .crossedEyes){
Task {
convertor2.type = 2
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self?.progressView.setProgress(progress, animated: true)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
self!.play()
}
}
}
}
}
//
if(self.selectedIndex == .parallelEyes){
Task {
convertor2.type = 5
try await convertor2.convertVideo(asset: videoOriginalAsset!, outputFile: outputVideoURL! ) { [self] progress in
print(progress)
DispatchQueue.main.async { [weak self] in
self?.progressView.setProgress(progress, animated: true)
if(progress > 0.99){
self!.videoTempAsset = AVAsset(url: self!.outputVideoURL!)
self!.play()
}
}
playerLay!.player!.usesExternalPlaybackWhileExternalScreenIsActive = false
playerLay!.player!.allowsExternalPlayback = false
}
}
}
func selectedSpatialType(selectedIndex:Int){
let rsi : SpatialType = SpatialType(rawValue: selectedIndex) ?? .monocular2D
// player!.play()
self.selectedIndex = rsi
@ -511,29 +510,6 @@ class CCSpatialVideoDisplayController: BaseController {
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String, mediaType == "public.movie" {
let videoURL = info[.mediaURL] as? URL
print("Selected video URL: \(videoURL)")
sourceVideoURL = videoURL
videoOriginalAsset = AVAsset(url: sourceVideoURL!)
videoTempAsset = videoOriginalAsset
if(!isSpatialVideo(asset: videoTempAsset!)){
showTextAlert(title: "提示", message: "当前视频不是空间视频")
}
play()
}
dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dismiss(animated: true, completion: nil)
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
@ -546,23 +522,4 @@ class CCSpatialVideoDisplayController: BaseController {
return isSpatialVideo
}
func showTextAlert(title: String, message: String) {
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let okAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(okAction)
//
present(alertController, animated: true, completion: nil)
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}

View File

@ -0,0 +1,166 @@
//
// PlayByTransferConvertor.swift
// SwiftProject
//
// Created by aaa on 2024/3/12.
//
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
class PlayByTransferConvertor {
func convertVideo(asset:AVAsset, assetOutput:AVAssetReaderTrackOutput,type:SpatialType,time: CMTime)->(CIImage?) {
var newpb:CIImage? = nil
while let nextSampleBuffer = assetOutput.copyNextSampleBuffer() {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
if presentationTime == time {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { break }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
let left = UIImage(ciImage: lciImage )
let right = UIImage(ciImage: rciImage )
var cwidth:CGFloat
var cheight:CGFloat
switch type {
case .hsbs:
cwidth = left.size.width
cheight = left.size.height
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight:cheight )
break
case .fsbs:
cwidth = left.size.width*2
cheight = left.size.height
newpb = joinImages_sbs(left: left, right: right, imgWidth: cwidth, imgHeight: cheight)
break
case .parallelEyes://
newpb = joinImages(leftImage: lciImage, rightImage: rciImage)
break
case .crossedEyes://
newpb = joinImages(leftImage: rciImage, rightImage: lciImage)
break
case .redBlueSolid://
newpb = joinImages_red_blue(lciImage: lciImage, rciImage: rciImage)
break
default:
break
}
}
break
}
}
return newpb
}
//
func joinImages_red_blue(lciImage:CIImage,rciImage:CIImage) -> CIImage {
//
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
1.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(lciImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rciImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
var lastImg:CIImage? = nil
//
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
let sharpenedFilter = CIFilter(name: "CISharpenLuminance")!
sharpenedFilter.setValue(compositeFilter.outputImage, forKey: kCIInputImageKey)
sharpenedFilter.setValue(2, forKey: kCIInputSharpnessKey)
lastImg = sharpenedFilter.outputImage!
}
return lastImg!
}
// SBS
func joinImages_sbs( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imgWidth/2, height:imgHeight))
right.draw(in: CGRect(x:imgWidth/2, y:0, width:imgWidth/2, height:imgHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
// OU
func joinImages_ou( left:UIImage, right:UIImage,imgWidth:CGFloat,imgHeight:CGFloat) -> CIImage {
let newImageSize = CGSize(width:imgWidth, height: imgHeight);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imgWidth, height:imgHeight/2))
right.draw(in: CGRect(x:0, y:imgHeight/2, width:imgWidth, height:imgHeight/2))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
//
func joinImages( leftImage:CIImage, rightImage:CIImage) -> CIImage {
let left = UIImage(ciImage: leftImage )
let right = UIImage(ciImage: rightImage )
let imageWidth = left.size.width/2 + right.size.width/2
let imageHeight = left.size.height/2
let newImageSize = CGSize(width:imageWidth, height: left.size.height);
UIGraphicsBeginImageContextWithOptions(newImageSize, false, 1);
left.draw(in: CGRect(x:0, y:0, width:imageWidth/2, height:imageHeight))
right.draw(in: CGRect(x:imageWidth/2, y:0, width:imageWidth/2, height:imageHeight))
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext();
let ci = CIImage(cgImage: image.cgImage!)
return ci
}
}

View File

@ -75,6 +75,10 @@ public class CCSpatialDisplayTypeView: UIView {
var tableView:UITableView! = nil
var isShowing:Bool = false//
var dissMissCallback:()->Void = {
print("请初始化回调")
}
lazy var showBottomView: UIView = {
var view = UIView()
@ -110,9 +114,11 @@ public class CCSpatialDisplayTypeView: UIView {
/// - arrow: popmenu
/// - datas: icon
/// - configure:
init(menuWidth:CGFloat,arrow:CGPoint,datas:[(icon:String,title:String,isHiden:Bool)],configures:[CCSpatialDisplayTypeConfigure] = []) {
init(menuWidth:CGFloat,arrow:CGPoint,datas:[(icon:String,title:String,isHiden:Bool)],configures:[CCSpatialDisplayTypeConfigure] = [],dissMissCallback:@escaping ()->Void) {
super.init(frame: UIScreen.main.bounds)
self.frame = UIScreen.main.bounds
self.dissMissCallback=dissMissCallback
//
configures.forEach { (config) in
switch (config){
@ -158,8 +164,10 @@ public class CCSpatialDisplayTypeView: UIView {
//MARK: -
func setData(datas:[(icon:String,title:String,isHiden:Bool)]) {
popData = datas
if(isShowing){
tableView.reloadData()
}
}
//MARK: - Footer
func showFooterView(isShow:Bool,showText:String) {
@ -282,6 +290,7 @@ extension CCSpatialDisplayTypeView{
}
public func show() {
isShowing = true
if popData.isEmpty{
return
}
@ -290,6 +299,8 @@ extension CCSpatialDisplayTypeView{
}
public func dismiss() {
isShowing = false
self.dissMissCallback()
self.removeFromSuperview()
}

View File

@ -494,7 +494,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 2RAN5PZH5L;
DEVELOPMENT_TEAM = 8DQD6BV6H9;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = tdvideo/Info.plist;
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";
@ -534,7 +534,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 2RAN5PZH5L;
DEVELOPMENT_TEAM = 8DQD6BV6H9;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = tdvideo/Info.plist;
INFOPLIST_KEY_NSCameraUsageDescription = "我们需要访问您的摄像头以拍摄照片和录制视频";

View File

@ -93,15 +93,15 @@ class PlayContoller8: UIViewController {
view.layer.addSublayer(playerLayer!)
player?.play()
//
player!.actionAtItemEnd = .none
//
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
Task {
await loadVideo()
}
player?.seek(to: .zero)
player?.play()
}
// player!.actionAtItemEnd = .none
// //
// NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
// Task {
// await loadVideo()
// }
// player?.seek(to: .zero)
// player?.play()
// }
let segmentedControl = UISegmentedControl(items: ["空间视频", "红蓝立体"])
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)