问题主要在线程处理的地方

This commit is contained in:
bluesea 2024-04-19 19:12:40 +08:00
parent c228bc37ee
commit 0270796016
9 changed files with 421 additions and 177 deletions

View File

@ -19,6 +19,8 @@
006B61D32BBAA938003FCB49 /* StoreKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 006B61D22BBAA938003FCB49 /* StoreKit.framework */; };
006B61DC2BBCFAC4003FCB49 /* CustomSheetController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 006B61DB2BBCFAC4003FCB49 /* CustomSheetController.swift */; };
006B61DE2BBCFB45003FCB49 /* CustomSheetCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 006B61DD2BBCFB45003FCB49 /* CustomSheetCell.swift */; };
0072361F2BD13B9D000595A9 /* ZZHCustomVideoCompositor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0072361E2BD13B9D000595A9 /* ZZHCustomVideoCompositor.swift */; };
007236212BD13C75000595A9 /* ZZHCustomVideoCompositionInstruction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 007236202BD13C75000595A9 /* ZZHCustomVideoCompositionInstruction.swift */; };
0073BD142BCE80F700721885 /* ZZHCustomPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD132BCE80F700721885 /* ZZHCustomPlayer.swift */; };
0073BD182BCF7B3400721885 /* ZZHCustomSlider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD172BCF7B3400721885 /* ZZHCustomSlider.swift */; };
0073BD1A2BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0073BD192BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift */; };
@ -128,6 +130,8 @@
006B61D22BBAA938003FCB49 /* StoreKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = StoreKit.framework; path = System/Library/Frameworks/StoreKit.framework; sourceTree = SDKROOT; };
006B61DB2BBCFAC4003FCB49 /* CustomSheetController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomSheetController.swift; sourceTree = "<group>"; };
006B61DD2BBCFB45003FCB49 /* CustomSheetCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomSheetCell.swift; sourceTree = "<group>"; };
0072361E2BD13B9D000595A9 /* ZZHCustomVideoCompositor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomVideoCompositor.swift; sourceTree = "<group>"; };
007236202BD13C75000595A9 /* ZZHCustomVideoCompositionInstruction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomVideoCompositionInstruction.swift; sourceTree = "<group>"; };
0073BD132BCE80F700721885 /* ZZHCustomPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomPlayer.swift; sourceTree = "<group>"; };
0073BD172BCF7B3400721885 /* ZZHCustomSlider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomSlider.swift; sourceTree = "<group>"; };
0073BD192BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ZZHCustomPlayerForVideoTask.swift; sourceTree = "<group>"; };
@ -314,6 +318,8 @@
0073BD132BCE80F700721885 /* ZZHCustomPlayer.swift */,
0073BD192BCFC8E800721885 /* ZZHCustomPlayerForVideoTask.swift */,
0073BD172BCF7B3400721885 /* ZZHCustomSlider.swift */,
0072361E2BD13B9D000595A9 /* ZZHCustomVideoCompositor.swift */,
007236202BD13C75000595A9 /* ZZHCustomVideoCompositionInstruction.swift */,
);
path = CCSpatialVideoDisplayController;
sourceTree = "<group>";
@ -816,6 +822,7 @@
009DFB0E2BC8CFA2007B56E8 /* FeedbackView.swift in Sources */,
AF2120C42B4E95DA00400B7F /* UIImage+Add.swift in Sources */,
1EFAF0C02B8B7A59002A1773 /* VRPhotoTransformController.swift in Sources */,
0072361F2BD13B9D000595A9 /* ZZHCustomVideoCompositor.swift in Sources */,
AF2120D82B4E9AC500400B7F /* CCAddImageView.swift in Sources */,
00D33BF42B998BF700604A44 /* SpatialImageConvertor.swift in Sources */,
006B61CA2BBA4B0D003FCB49 /* MembershipVC.swift in Sources */,
@ -852,6 +859,7 @@
AF2120E62B4E9DE000400B7F /* CCTableSwitchView.swift in Sources */,
0096624D2BB3BA3B00FCA65F /* ZZHExternalViewController.swift in Sources */,
00D33BFA2B9AB21A00604A44 /* ZZHAVExtension.swift in Sources */,
007236212BD13C75000595A9 /* ZZHCustomVideoCompositionInstruction.swift in Sources */,
009661F82BAD6C7100FCA65F /* CCSpaceAlbumFilterPopView2.swift in Sources */,
009DFB132BC8EA90007B56E8 /* MenuVCCell.swift in Sources */,
AF2120E02B4E9C8000400B7F /* Timer+Add.swift in Sources */,

View File

@ -521,84 +521,6 @@
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "BB183D45-3193-4FC7-92DB-275D529A4AE6"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "498"
endingLineNumber = "498"
landmarkName = "updateTopCenterButtonWhenIsPlayingChange()"
landmarkType = "7">
<Locations>
<Location
uuid = "BB183D45-3193-4FC7-92DB-275D529A4AE6 - bc6a78627bfebfd5"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "SwiftProject.CCSpatialVideoDisplayController.displayUpdate(caDisplayLink: __C.CADisplayLink) -&gt; ()"
moduleName = "SwiftProject"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/aaa/Documents/IOS%20Dev/VR/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "531"
endingLineNumber = "531"
offsetFromSymbolStart = "56">
</Location>
<Location
uuid = "BB183D45-3193-4FC7-92DB-275D529A4AE6 - bc6a78627bfebc61"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "SwiftProject.CCSpatialVideoDisplayController.displayUpdate(caDisplayLink: __C.CADisplayLink) -&gt; ()"
moduleName = "SwiftProject"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/aaa/Documents/IOS%20Dev/VR/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "551"
endingLineNumber = "551"
offsetFromSymbolStart = "56">
</Location>
<Location
uuid = "BB183D45-3193-4FC7-92DB-275D529A4AE6 - bc6a78627bfebc00"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "SwiftProject.CCSpatialVideoDisplayController.displayUpdate(caDisplayLink: __C.CADisplayLink) -&gt; ()"
moduleName = "SwiftProject"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/aaa/Documents/IOS%20Dev/VR/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "552"
endingLineNumber = "552"
offsetFromSymbolStart = "56">
</Location>
<Location
uuid = "BB183D45-3193-4FC7-92DB-275D529A4AE6 - 1f02ddc6d27b6aec"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
symbolName = "SwiftProject.CCSpatialVideoDisplayController.updateTopCenterButtonWhenIsPlayingChange() -&gt; ()"
moduleName = "SwiftProject"
usesParentBreakpointCondition = "Yes"
urlString = "file:///Users/aaa/Documents/IOS%20Dev/VR/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "497"
endingLineNumber = "497"
offsetFromSymbolStart = "1248">
</Location>
</Locations>
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
@ -663,5 +585,21 @@
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "F69C7D02-67BE-4F9F-8AAB-A7CA1271B99E"
shouldBeEnabled = "No"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "SwiftProject/Project/Util/PlayByTransferConvertor.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "20"
endingLineNumber = "20"
landmarkName = "convertVideo(asset:assetOutput:type:time:)"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>

View File

@ -422,20 +422,17 @@ class CCSpatialVideoDisplayController: BaseController {
}
}
//isAirPlayActive
func setttinisScreenMirroring(isScreenMirroring:Bool){
releaseVideoComposition()
//
if(isScreenMirroring){
self.dealTaskWhenExternalScreenConnect()
}else{
//
self.dealTaskWhenExternalScreenDisConnect()
}
//
if(isScreenMirroring){
self.dealTaskWhenExternalScreenConnect()
}else{
//
self.dealTaskWhenExternalScreenDisConnect()
}
}
//session
@ -456,8 +453,9 @@ class CCSpatialVideoDisplayController: BaseController {
isPlaying = true
// let playerItem = self.getPlayerItem()
// player?.replaceCurrentItem(with: playerItem)
print("外接屏幕已连接.....")
activeSession()
// activeSession()
UIApplication.shared.connectedScenes.forEach { us in
print("uisence:\(us)\n")
@ -528,10 +526,11 @@ class CCSpatialVideoDisplayController: BaseController {
//
menuView.showFooterView(isShow: false, showText: "")
externalVC?.playerLayer?.player = nil
// self.playerController?.player = player
// player?.play()
externalVC = nil
if let externalVC {
customPlayer.recoveryPlayerLayer(otherLayer: (externalVC.playerLayer)!)
self.externalVC = nil
}
}
@objc func displayUpdate(caDisplayLink:CADisplayLink) {
@ -539,9 +538,7 @@ class CCSpatialVideoDisplayController: BaseController {
if has_exvc.playerLayer?.player == nil {
has_exvc.imageView?.isHidden = true
has_exvc.playerLayer?.isHidden = false
// self.playerController?.player = nil
// has_exvc.playerLayer?.player = player
// player?.play()
customPlayer.moveToOtherPlayerLayer(destLayer: has_exvc.playerLayer!)
externalDispalylink?.invalidate()
externalDispalylink = nil
}
@ -554,7 +551,6 @@ class CCSpatialVideoDisplayController: BaseController {
print("patialvideodisplaycontroler deinit......")
releaseVideoComposition()
NotificationCenter.default.removeObserver(self)
// player?.removeObserver(self, forKeyPath: "status")
}
func releaseVideoComposition() {
@ -562,8 +558,6 @@ class CCSpatialVideoDisplayController: BaseController {
externalDispalylink?.invalidate()
externalDispalylink = nil
}
// self.player?.currentItem?.videoComposition = nil
// self.player?.replaceCurrentItem(with: nil)
}

View File

@ -8,7 +8,14 @@
import Foundation
import UIKit
import AVKit
//assetoutput 线
//let AssetGlobalQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
let CustomPlayerShareSemaphore = DispatchSemaphore(value: 1)
class ZZHCustomPlayer: UIView {
//
var sourceVideoURL:URL?
var assetTrack:AVAssetTrack?
@ -18,13 +25,19 @@ class ZZHCustomPlayer: UIView {
}()
var assetReader:AVAssetReader?
var assetOutput:AVAssetReaderTrackOutput?
var assetOutput:AVAssetReaderTrackOutput? {
didSet {
custominstruction?.assetOutput = assetOutput
}
}
var custominstruction:ZZHCustomVideoCompositionInstruction?
let videoTranserConvertor = PlayByTransferConvertor()
var selectedIndex:SpatialType = .parallelEyes//
{
didSet{
//
custominstruction?.selectedIndex = selectedIndex
}
}
@ -34,7 +47,7 @@ class ZZHCustomPlayer: UIView {
var avPlayer:AVPlayer?
var avPlayerLayer:AVPlayerLayer?
var timeSlider:ZZHCustomSlider?
var prePlayingState:Bool?// ,player,便
var prePlayingState:Bool = true // ,player,便
let playerPauseBgColor:UIColor = UIColor(r: 20, g: 20, b: 20, a: 0.2)//
//线
@ -126,8 +139,10 @@ class ZZHCustomPlayer: UIView {
func manualToSeekPlay(value:Float,isMoving:Bool){
if isMoving {
self.play(false)
print("此处已暂停播放,那么后面希望不要出现 sta.....> 的打印信息")
}
else {
Task {
let totalSec = CMTimeGetSeconds((self.avPlayer?.currentItem?.duration)!)
var atSec = Float(totalSec) * value
var timeScale:CMTimeScale? = self.avPlayer?.currentItem?.duration.timescale
@ -138,24 +153,48 @@ class ZZHCustomPlayer: UIView {
timeScale = CMTimeScale(1)
}
let ct = CMTime(value:CMTimeValue(atSec) , timescale: timeScale!)
quickLoadAReaderWhenSeek(startCT: ct)
// CustomPlayerShareSemaphore.wait()
// self.quickLoadAReaderWhenSeek(startCT: ct)
// CustomPlayerShareSemaphore.signal()
print("正在seek.....\(ct) ismainthread:\(Thread.isMainThread)")
DispatchQueue.main.async {
self.avPlayer?.seek(to: ct,toleranceBefore:.zero,toleranceAfter: .zero, completionHandler: { finished in
if finished {
if(!isMoving) {//,
CustomPlayerShareSemaphore.wait()
self.quickLoadAReaderWhenSeek(startCT: ct)
CustomPlayerShareSemaphore.signal()
self.play(true)
}
}
})
}
// self.releaseVideoComposition()
// self.avPlayer?.replaceCurrentItem(with: self.getPlayerItem())
print("正在seek.....\(ct)")
self.avPlayer?.seek(to: ct,toleranceBefore:.zero,toleranceAfter: .zero, completionHandler: {[weak self] finished in
if finished {
self?.quickLoadAReaderWhenSeek(startCT: ct)
self?.play(true)
}
print("seek result:\(finished)")
})
}
}
//readeroutputcopynextSampleBuffertime,,seekcopynextSampleBuffer
func checkFBTime(ct_Buffer:CMTime?) {
return //,,,
// guard let ct_Buffer,let playerCurrCT = self.avPlayer?.currentTime() else {
// return
// }
// print("checkFBTime......call CMTimeGetSeconds:\(CMTimeGetSeconds(playerCurrCT))")
// if ct_Buffer > playerCurrCT{
// print("in checkFBTime......call\nct_Buffer: \(ct_Buffer),\nplayerCurrCT: \(playerCurrCT)")
// let nct = CMTime(value: ct_Buffer.value+20, timescale: ct_Buffer.timescale)
// self.avPlayer?.seek(to: nct,toleranceBefore:.zero,toleranceAfter: .zero, completionHandler: {finished in
// print("checkFBTime seek result:\(finished)")
// })
// }
}
//
func setUPPlayer() {
Task {[weak self] in
@ -166,24 +205,52 @@ class ZZHCustomPlayer: UIView {
}
}
//
//
private func reLoadPlay(){
self.releaseVideoComposition()
self.avPlayer?.replaceCurrentItem(with: self.getPlayerItem())
self.quickLoadAReaderWhenReplayBack()
self.play(true)
}
//avplayeravplayerLayer:
func moveToOtherPlayerLayer(destLayer:AVPlayerLayer) {
print("moveToOtherPlayerLayer 准备暂停播放")
self.play(false)
print("avPlayerLayer 准备移除player")
self.avPlayerLayer?.player = nil
print("avPlayerLayer 已经移除player")
destLayer.player = self.avPlayer
if assetTrack != nil {
quickLoadAReaderWhenSeek(startCT: (self.avPlayer?.currentTime())!)
}
print("外屏destLayer 已添加player")
self.play(true)
print("moveToOtherPlayerLayer 恢复播放play")
}
//,layer
func recoveryPlayerLayer(otherLayer:AVPlayerLayer) {
print("recoveryPlayerLayer 准备暂停播放")
self.play(false)
print("otherLayer 准备移除player")
otherLayer.player = nil
print("otherLayer 已经移除player")
avPlayerLayer?.player = self.avPlayer
quickLoadAReaderWhenSeek(startCT: (self.avPlayer?.currentTime())!)
print("手机屏幕avPlayerLayer 已添加player")
self.play(true)
print("recoveryPlayerLayer 恢复播放play")
}
//
@objc func playOrPause(sender:UIButton) {
if sender.tag == 0 {//
if prePlayingState == false {//
play(true)
sender.tag = 1
print("bofang...")
}
else {//
print("暂停...")
play(false)
sender.tag = 0
}
}
@ -201,9 +268,7 @@ class ZZHCustomPlayer: UIView {
updatePlayMaskView(value)
}
// func seek(ct:CMTime) {
// self.avPlayer?.seek(to: ct)
// }
//
func updatePlayMaskView(_ value:Bool) {
@ -215,6 +280,9 @@ class ZZHCustomPlayer: UIView {
func updateSliderUI(ct:CMTime) {
let sec = CMTimeGetSeconds(ct)
let totalSec = CMTimeGetSeconds((self.avPlayer?.currentItem?.duration)!)
if totalSec.isNaN {
return
}
let s = sec / totalSec
self.timeSlider?.exUpdateProcessValue(value: Float(s),currSec: Int(sec),totalSec: Int(totalSec))
}
@ -230,22 +298,13 @@ class ZZHCustomPlayer: UIView {
//MARK: -
//MARK: -
//
@objc func notification_PlayerEndTime(notification:Notification){
print("PlayerEndTime....")
// self.quickLoadAReaderWhenReplayBack()
avPlayer?.seek(to: .zero,toleranceBefore: .zero,toleranceAfter: .zero,completionHandler: {[weak self] finish in
self?.quickLoadAReaderWhenReplayBack()
self?.avPlayer?.play()
self?.reLoadPlay()
})
}
}

View File

@ -12,29 +12,44 @@ import AVKit
import VideoToolbox
extension ZZHCustomPlayer {
// func getPlayerItem() -> AVPlayerItem {
// let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
// AVVideoComposition.videoComposition(with: temItem.asset) { [weak self] request in
//
// print("frame....")
// guard let weakSelf = self else {
// print("self .....")
// return
// }
// weakSelf.convertFrame(request:request)
//
// } completionHandler: { ac, err in
// if err != nil {
// print("coposition\(err)")
// }
// else{
// print("composition ok....")
// temItem.videoComposition = ac
// }
// }
// return temItem
// }
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
AVVideoComposition.videoComposition(with: temItem.asset) { [weak self] request in
print("正在请求解码图片frame....")
guard let weakSelf = self else {
print("self 被销毁了.....")
return
}
// self?.lock.lock()
weakSelf.convertFrame(request:request)
// self?.lock.unlock()
} completionHandler: { ac, err in
if err != nil {
print("初始化coposition报错\(err)")
}
else{
print("composition 生成ok....")
temItem.videoComposition = ac
}
let timeRange = CMTimeRange(start: CMTime.zero, duration: temItem.asset.duration)
let videoTracks = temItem.asset.tracks(withMediaType: AVMediaType.video)
guard let sourceVideoTrack = videoTracks.first else {
return temItem
}
let videoComposition = AVMutableVideoComposition(propertiesOf: temItem.asset)
videoComposition.customVideoCompositorClass = ZZHCustomVideoCompositor.self
let instruction = ZZHCustomVideoCompositionInstruction(track: sourceVideoTrack, timeRange: timeRange, transform: sourceVideoTrack.preferredTransform, targetSize: sourceVideoTrack.naturalSize,sourceVideoURL: self.sourceVideoURL!)
custominstruction = instruction
videoComposition.instructions = [instruction]
temItem.videoComposition = videoComposition
return temItem
}
@ -70,32 +85,35 @@ extension ZZHCustomPlayer {
}
var ciImg:CIImage? = nil
var presentTime:CMTime? = nil
switch mode {
case .crossedEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .fsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .hsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .parallelEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .redBlueSolid://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
default:
break
}
//buffer,,
// checkFBTime(ct_Buffer: presentTime)
return ciImg
}
@ -118,16 +136,20 @@ extension ZZHCustomPlayer {
func quickLoadAReader(timeRange:CMTimeRange) {
// self.lock.lock()
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
quickLoadAssetOutput()
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
// self.lock.unlock()
do {
assetReader = try AVAssetReader(asset: self.videoOriginalAsset)
quickLoadAssetOutput()
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
}
catch {
print("quickLoadAReader err:\(error)")
}
}
//slider
@ -136,6 +158,8 @@ extension ZZHCustomPlayer {
self.quickLoadAReader(timeRange: timeRange)
}
//,assetreadertimeRange
func quickLoadAReaderWhenReplayBack() {
let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
@ -144,8 +168,11 @@ extension ZZHCustomPlayer {
func quickLoadAssetOutput() {
if(assetOutput != nil){
print("正在释放assetoutput----assetOutput:\(assetOutput) \n\(Date.now.timeIntervalSince1970)")
assetOutput?.markConfigurationAsFinal()
print("正在释放assetoutput....assetOutput:\(assetOutput) \n\(Date.now.timeIntervalSince1970)")
assetOutput = nil
print("释放完毕assetoutput....thread:\(Thread.current) \n\(Date.now.timeIntervalSince1970)")
}
assetOutput = AVAssetReaderTrackOutput(

View File

@ -0,0 +1,44 @@
//
// ZZHCustomVideoCompositionInstruction.swift
// SwiftProject
//
// Created by aaa on 2024/4/18.
//
import Foundation
import AVFoundation
final class ZZHCustomVideoCompositionInstruction: NSObject, AVVideoCompositionInstructionProtocol {
// Fixed
let enablePostProcessing: Bool = true
let containsTweening: Bool = false
let passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
// Variable
let timeRange: CMTimeRange
let requiredSourceTrackIDs: [NSValue]?
let videoTrackID: CMPersistentTrackID
let targetSize: CGSize
let transform: CGAffineTransform
//
var selectedIndex:SpatialType = .parallelEyes//
lazy var videoOriginalAsset:AVAsset = {
let asset = AVAsset(url: sourceVideoURL!)
return asset
}()
var sourceVideoURL:URL?
var assetOutput:AVAssetReaderTrackOutput?
init(track: AVAssetTrack, timeRange: CMTimeRange, transform: CGAffineTransform, targetSize: CGSize,sourceVideoURL:URL) {
self.requiredSourceTrackIDs = [NSNumber(value: track.trackID)]
self.timeRange = timeRange
self.videoTrackID = track.trackID
self.transform = transform
self.targetSize = targetSize
self.sourceVideoURL = sourceVideoURL
super.init()
}
}

View File

@ -0,0 +1,169 @@
//
// ZZHCustomVideoCompositor.swift
// SwiftProject
//
// Created by aaa on 2024/4/18.
//
import Foundation
import Dispatch
import AVFoundation
import CoreImage
import CoreVideo
enum ZZHCustomVideoCompositoringError: Error {
case invalidRequest
}
final class ZZHCustomVideoCompositor: NSObject, AVVideoCompositing {
//
private let videoTranserConvertor = PlayByTransferConvertor()
// 使
var activeRequests: [AVAsynchronousVideoCompositionRequest] = []
//
private let queue = DispatchQueue(label: "ca.gurulogic.layer-video-compositor.render", qos: .default)
private var renderContext: AVVideoCompositionRenderContext = AVVideoCompositionRenderContext()
private var cancelled: Bool = false
private static let pixelFormat = kCVPixelFormatType_32ARGB
private let colorSpace = CGColorSpaceCreateDeviceRGB()
private let ciContext: CIContext = {
if let eaglContext = EAGLContext(api: .openGLES3) ?? EAGLContext(api: .openGLES2) {
return CIContext(eaglContext: eaglContext)
}
return CIContext()
}()
let sourcePixelBufferAttributes: [String : Any]? = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: ZZHCustomVideoCompositor.pixelFormat),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
]
let requiredPixelBufferAttributesForRenderContext: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: ZZHCustomVideoCompositor.pixelFormat),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
]
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
renderContext = newRenderContext
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
queue.async{
//
self.activeRequests.append(request)
print("startRequest ....AVAsynchronousVideoCompositionRequest: \(self.cancelled) request:\(request)")
guard !self.cancelled else {
print("startRequest cancell....")
request.finishCancelledRequest()
self.activeRequests.removeAll { $0 === request }
// self.cancelled = false
return
}
print("startRequest queue:\(self.queue) current:\(Thread.current)")
CustomPlayerShareSemaphore.wait()
guard let renderedBuffer = self.renderFrame(forRequest: request) else {
request.finish(with: ZZHCustomVideoCompositoringError.invalidRequest)
print("报了个异常,但是应该执行不到....")
return
}
CustomPlayerShareSemaphore.signal()
request.finish(withComposedVideoFrame: renderedBuffer)
//
self.activeRequests.removeAll { $0 === request }
print("完成执行 finish.....")
}
}
func cancelAllPendingVideoCompositionRequests() {
print("取消所有的pending 视频")
cancelled = true
queue.async(flags: .barrier) {
print("取消所有的pending 视频 barrier")
self.cancelled = false
}
}
private func renderFrame(forRequest request: AVAsynchronousVideoCompositionRequest)-> CVPixelBuffer? {
return autoreleasepool {
guard let instruction = request.videoCompositionInstruction as? ZZHCustomVideoCompositionInstruction else {
return nil
}
guard let videoFrameBuffer = request.sourceFrame(byTrackID: instruction.videoTrackID) else {
return nil
}
let compositionTime = request.compositionTime
var ciImg:CIImage? = nil
switch instruction.selectedIndex {
case .monocular2D://2d
// ciImg = request.sourceImage
break
default :
ciImg = self.otherModeImgWithMode(mode: instruction.selectedIndex,compositionTime: compositionTime,videoOriginalAsset: instruction.videoOriginalAsset,ao: instruction.assetOutput!)
break
}
if let ciImg {
guard let renderedBuffer = renderContext.newPixelBuffer() else {
return nil
}
ciContext.render(ciImg, to: renderedBuffer, bounds: ciImg.extent, colorSpace: self.colorSpace)
return renderedBuffer
}
else {
print("未合成成功.....")
return videoFrameBuffer
}
}
}
func otherModeImgWithMode(mode:SpatialType,compositionTime:CMTime,videoOriginalAsset:AVAsset,ao:AVAssetReaderTrackOutput)->CIImage? {
var ciImg:CIImage? = nil
var presentTime:CMTime? = nil
switch mode {
case .crossedEyes://
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: ao, type:mode, time: compositionTime)
break
case .fsbs://3d
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: ao, type: mode, time: compositionTime)
break
case .hsbs://3d
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: ao, type: mode, time: compositionTime)
break
case .parallelEyes://
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: ao, type: mode, time: compositionTime)
break
case .redBlueSolid://
(ciImg,presentTime) = self.videoTranserConvertor.convertVideo(asset: videoOriginalAsset, assetOutput: ao, type: mode, time: compositionTime)
break
default:
break
}
return ciImg
}
}

View File

@ -14,17 +14,20 @@ import ImageIO
class PlayByTransferConvertor {
func convertVideo(asset:AVAsset, assetOutput:AVAssetReaderTrackOutput,type:SpatialType,time: CMTime)->(CIImage?) {
func convertVideo(asset:AVAsset, assetOutput:AVAssetReaderTrackOutput,type:SpatialType,time: CMTime)->(CIImage?,CMTime?) {
var newpb:CIImage? = nil
// print("sta.....>>>>>>>\(Date.now.timeIntervalSince1970)")
while let nextSampleBuffer = assetOutput.copyNextSampleBuffer() {
// print("nextSampleBuffer.....+++++++\(Date.now.timeIntervalSince1970)")
// return nil
var presentationTime:CMTime? = nil
print("sta.....>>>>>>>thread:\(Thread.current) assetOutput:\(assetOutput) \n\(Date.now.timeIntervalSince1970)")
while let nextSampleBuffer = assetOutput.copyNextSampleBuffer() {
presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
print("presentationTime: \(presentationTime) \ntime: \(time)")
if presentationTime! > time {//buffer>time,
print("如果当前获取的buffer的时间>time的时间,则直接返回即可...")
break
}
// print("PlayByTransferConvertor while")
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
if presentationTime == time {
// print("PlayByTransferConvertor while break")
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { break }
let leftEyeBuffer = taggedBuffers.first(where: {
@ -78,7 +81,9 @@ class PlayByTransferConvertor {
break
}
}
return newpb
print("PlayByTransferConvertor 测试看是否有返回....")
return (newpb,presentationTime)
}