视频转码修改,但是还存在一个必现的bug

This commit is contained in:
bluesea 2024-03-29 20:21:15 +08:00
parent bc7d441f0c
commit a5b4f5e5c8
4 changed files with 251 additions and 184 deletions

View File

@ -515,8 +515,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift" filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift"
startingColumnNumber = "9223372036854775807" startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807" endingColumnNumber = "9223372036854775807"
startingLineNumber = "83" startingLineNumber = "63"
endingLineNumber = "83" endingLineNumber = "63"
landmarkName = "convertFrame(request:)" landmarkName = "convertFrame(request:)"
landmarkType = "7"> landmarkType = "7">
<Locations> <Locations>
@ -563,8 +563,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift" filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift"
startingColumnNumber = "9223372036854775807" startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807" endingColumnNumber = "9223372036854775807"
startingLineNumber = "87" startingLineNumber = "67"
endingLineNumber = "87" endingLineNumber = "67"
landmarkName = "convertFrame(request:)" landmarkName = "convertFrame(request:)"
landmarkType = "7"> landmarkType = "7">
<Locations> <Locations>
@ -611,8 +611,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift" filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807" startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807" endingColumnNumber = "9223372036854775807"
startingLineNumber = "437" startingLineNumber = "440"
endingLineNumber = "437" endingLineNumber = "440"
landmarkName = "navgationButtonClick2(sender:)" landmarkName = "navgationButtonClick2(sender:)"
landmarkType = "7"> landmarkType = "7">
</BreakpointContent> </BreakpointContent>

View File

@ -21,16 +21,16 @@ enum SpatialType : Int {
*/ */
case monocular2D case monocular2D = 1
case parallelEyes case parallelEyes = 0
case redBlueSolid case redBlueSolid = 2
case crossedEyes case crossedEyes = 3
//使 //使
case hsbs case hsbs = 4
case fsbs case fsbs = 5
} }
class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDelegate { class CCSpatialVideoDisplayController: BaseController {
var isAirPlayActive:Bool = false { var isAirPlayActive:Bool = false {
didSet{ didSet{
@ -43,11 +43,14 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
} }
else{ else{
selectedIndex = .parallelEyes//monocular2D selectedIndex = .parallelEyes//monocular2D
typeData = [(icon:"type_check",title:"平行眼",isHiden:false), typeData = [
(icon:"type_check",title:"单眼2D",isHiden:false), (icon:"type_check",title:NSLocalizedString("平行眼", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("单眼2D", comment: ""),isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
(icon:"type_check",title:NSLocalizedString("红蓝立体", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("交叉眼", comment: ""),isHiden:false)]
} }
//UI //UI
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive) setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
@ -86,15 +89,10 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// //
var sourceVideoURL:URL? var sourceVideoURL:URL?
let outputVideoURL:URL = URL.documentsDirectory.appending(path:"output1111.mp4")
var imgData:Data? var imgData:Data?
var player:AVPlayer? var player:AVPlayer?
// var needRestReaderTimeRange = false//
// var playerLay:AVPlayerLayer?
var playerController:AVPlayerViewController? var playerController:AVPlayerViewController?
lazy var mTopImgView:UIImageView = { lazy var mTopImgView:UIImageView = {
@ -139,7 +137,7 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
button.layer.cornerRadius = 18 button.layer.cornerRadius = 18
button.layer.borderWidth = 1 button.layer.borderWidth = 1
button.layer.borderColor = UIColor.white.cgColor button.layer.borderColor = UIColor.white.cgColor
button.setTitle("平行眼", for: UIControl.State.normal) button.setTitle(NSLocalizedString("平行眼", comment: ""), for: UIControl.State.normal)
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal) button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
button.updateBtnEdgeInsets(style: .Right, space: 10) button.updateBtnEdgeInsets(style: .Right, space: 10)
button.setTitleColor(UIColor.white, for: UIControl.State.normal) button.setTitleColor(UIColor.white, for: UIControl.State.normal)
@ -149,11 +147,11 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"平行眼",isHiden:false), var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:NSLocalizedString("平行眼", comment: ""),isHiden:false),
(icon:"type_check",title:"单眼2D",isHiden:false), (icon:"type_check",title:NSLocalizedString("单眼2D", comment: ""),isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false), (icon:"type_check",title:NSLocalizedString("红蓝立体", comment: ""),isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)] (icon:"type_check",title:NSLocalizedString("交叉眼", comment: ""),isHiden:false)]
{ {
didSet { didSet {
menuView.setData(datas: self.typeData) menuView.setData(datas: self.typeData)
@ -209,14 +207,15 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// //
@objc func notification_PlayerEndTime(notification:Notification){ @objc func notification_PlayerEndTime(notification:Notification){
// print("PlayerEndTime....") print("PlayerEndTime....")
// needRestReaderTimeRange = true
} }
// //
@objc func notification_PlayerTimeJump(notification:Notification){ @objc func notification_PlayerTimeJump(notification:Notification){
// needRestReaderTimeRange = true
print("time jump....")
// print("time jump....")
} }
@ -230,8 +229,16 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
Task { Task {
print("ccs viewdid load ---------") print("ccs viewdid load ---------")
await self.loadVideoAssetReader() await self.loadVideoAssetReader()
print("ccs viewdid load +++++++") DispatchQueue.main.async {
self.configureInfo()
print("ccs viewdid load +++++++")
// AirPlay
self.checkAirPlayStatus()
}
} }
}
func configureInfo(){
print("getPlayerItem ========") print("getPlayerItem ========")
let playerItem = self.getPlayerItem() let playerItem = self.getPlayerItem()
@ -253,11 +260,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
playerController = AVPlayerViewController() playerController = AVPlayerViewController()
playerController?.player = player playerController?.player = player
playerController?.delegate = self
playerController!.view.backgroundColor = .clear playerController!.view.backgroundColor = .clear
playerController?.view.frame = CGRect.init(x: 0, y: 170, width: self.view.frame.size.width, height: 400) playerController?.view.frame = CGRect.init(x: 0, y: 170, width: self.view.frame.size.width, height: 400)
self.addChild(playerController!) self.addChild(playerController!)
self.view.addSubview(playerController!.view) self.view.addSubview(playerController!.view)
self.view.addSubview(tipsButton) self.view.addSubview(tipsButton)
@ -265,163 +273,38 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// AirPlay // AirPlay
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
self.player!.play()
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerEndTime(notification:)), name: AVPlayerItem.didPlayToEndTimeNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerEndTime(notification:)), name: AVPlayerItem.didPlayToEndTimeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerTimeJump(notification:)), name: AVPlayerItem.timeJumpedNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerTimeJump(notification:)), name: AVPlayerItem.timeJumpedNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notificatin_failedToPlayToEndTimeNotification(notifiation: )), name: AVPlayerItem.failedToPlayToEndTimeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notificatin_failedToPlayToEndTimeNotification(notifiation: )), name: AVPlayerItem.newErrorLogEntryNotification, object: nil)
} }
func getPlayerItem() -> AVPlayerItem { @objc func notificatin_failedToPlayToEndTimeNotification(notifiation:Notification) {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset) print("notificatin_failedToPlayToEndTimeNotification....")
//
temItem.videoComposition = AVVideoComposition(asset: temItem.asset) { [weak self] request in
guard let weakSelf = self else {
print("self 被销毁了.....")
return
}
let compositionTime = request.compositionTime
let end:CMTime = CMTimeMake(value: Int64(compositionTime.value+1), timescale: compositionTime.timescale)
let tr = CMTimeRange(start: compositionTime, end: end)
// self.quickLoadAReader(timeRange: tr)
var ciImg:CIImage? = nil
switch weakSelf.selectedIndex {
case .crossedEyes://
guard let ao = weakSelf.assetOutput else {
print("assetOutput 应该是没有被创建成功.....")
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .fsbs://3d
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .hsbs://3d
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .parallelEyes://
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .monocular2D://2d
ciImg = request.sourceImage
break
case .redBlueSolid://
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
}
if let ciImg {
request.finish(with: ciImg, context: nil)
}
}
return temItem
} }
//assetReaderoutput @objc func notificatin_newErrorLogEntryNotification(notifiation:Notification) {
func loadVideoAssetReader() async { print("notificatin_newErrorLogEntryNotification....")
do {
if self.assetTrack == nil {
assetTrack = try await self.videoOriginalAsset.loadTracks(withMediaType: .video).first!
}
// let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
let timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: 1))
self.quickLoadAReader(timeRange: timeRange)
} catch {
print("Error loading video: \(error)")
}
}
func quickLoadAReader(timeRange:CMTimeRange) {
if(assetOutput != nil){
assetOutput?.markConfigurationAsFinal()
assetOutput = nil
}
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = AVAssetReaderTrackOutput(
track: self.assetTrack!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetOutput?.supportsRandomAccess = true
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
} }
override func viewDidDisappear(_ animated: Bool) { override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated) super.viewDidDisappear(animated)
player?.pause()
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_VideoDisplayController) let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_VideoDisplayController)
Analytics.logEvent("preview_pv", parameters: ["refer_page":"视频预览页面","duration":sec]) Analytics.logEvent("preview_pv", parameters: ["refer_page":"视频预览页面","duration":sec])
player?.pause()
} }
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
}
override func viewDidAppear(_ animated: Bool) { override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated) super.viewDidAppear(animated)
// AirPlay
checkAirPlayStatus()
} }
//MARK: - //MARK: -
@ -469,12 +352,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// //
if isPlaying { if isPlaying {
// --- // ---
menuView.showFooterView(isShow: true, showText: "结束串流") menuView.showFooterView(isShow: true, showText: NSLocalizedString("结束串流", comment: ""))
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("外部串流中", comment: ""), for: UIControl.State.normal)
}else{ }else{
// --- // ---
menuView.showFooterView(isShow: true, showText: "开始串流") menuView.showFooterView(isShow: true, showText: NSLocalizedString("开始串流", comment: ""))
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("已连接外部设备", comment: ""), for: UIControl.State.normal)
} }
@ -488,14 +371,14 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
playerController!.player!.allowsExternalPlayback = false playerController!.player!.allowsExternalPlayback = false
if self.selectedIndex == .monocular2D { if self.selectedIndex == .monocular2D {
mTopCenterTypeButton.setTitle("单眼2D", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("单眼2D", comment: ""), for: UIControl.State.normal)
}else if self.selectedIndex == .redBlueSolid { }else if self.selectedIndex == .redBlueSolid {
mTopCenterTypeButton.setTitle("红蓝立体", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("红蓝立体", comment: ""), for: UIControl.State.normal)
}else if self.selectedIndex == .crossedEyes { }else if self.selectedIndex == .crossedEyes {
mTopCenterTypeButton.setTitle("交叉眼", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("交叉眼", comment: ""), for: UIControl.State.normal)
} }
else if self.selectedIndex == .parallelEyes { else if self.selectedIndex == .parallelEyes {
mTopCenterTypeButton.setTitle("平行眼", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("平行眼", comment: ""), for: UIControl.State.normal)
} }
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal) mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
@ -537,12 +420,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// //
if isPlaying { if isPlaying {
// --- // ---
menuView.showFooterView(isShow: true, showText: "结束串流") menuView.showFooterView(isShow: true, showText: NSLocalizedString("结束串流", comment: ""))
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("外部串流中", comment: ""), for: UIControl.State.normal)
}else{ }else{
// --- // ---
menuView.showFooterView(isShow: true, showText: "开始串流") menuView.showFooterView(isShow: true, showText: NSLocalizedString("开始串流", comment: ""))
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal) mTopCenterTypeButton.setTitle(NSLocalizedString("已连接外部设备", comment: ""), for: UIControl.State.normal)
} }
}else{ }else{
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal) mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)

View File

@ -0,0 +1,184 @@
//
// CCSpatialVideoDisplayForVideoTask.swift
// SwiftProject
//
// Created by aaa on 2024/3/28.
//
import Foundation
import AVFoundation
import Photos
import AVKit
import VideoToolbox
extension CCSpatialVideoDisplayController {
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
AVVideoComposition.videoComposition(with: temItem.asset) { [weak self] request in
print("正在请求解码图片frame....")
guard let weakSelf = self else {
print("self 被销毁了.....")
return
}
weakSelf.convertFrame(request:request)
} completionHandler: { ac, err in
if err != nil {
print("初始化coposition报错\(err)")
}
else{
print("composition 生成ok....")
temItem.videoComposition = ac
}
}
// let filter = CIFilter(name: "CIGaussianBlur")!
// let composition = AVVideoComposition(asset: temItem.asset, applyingCIFiltersWithHandler: {[weak self] request in
// print("frame....")
// guard let weakSelf = self else {
// print("self .....")
// return
// }
// weakSelf.convertFrame(request:request)
//
//
// })
// temItem.videoComposition = composition
return temItem
}
func convertFrame(request:AVAsynchronousCIImageFilteringRequest){
let compositionTime = request.compositionTime
let end:CMTime = CMTimeMake(value: Int64(compositionTime.value+1), timescale: compositionTime.timescale)
let tr = CMTimeRange(start: compositionTime, end: end)
let tr_All = CMTimeRange(start: compositionTime, duration: .positiveInfinity)
var ciImg:CIImage? = nil
switch self.selectedIndex {
case .monocular2D://2d
ciImg = request.sourceImage
break
default :
ciImg = self.otherModeImgWithMode(mode: self.selectedIndex,tr:tr,compositionTime: compositionTime,allTime: tr_All)
break
}
if let ciImg {
print("已返回图片frame....")
request.finish(with: ciImg, context: nil)
}
else {
print("没有合成可用视频帧图片....准备用request.sourceImage替换")
request.finish(with: request.sourceImage, context: nil)
}
}
func otherModeImgWithMode(mode:SpatialType,tr:CMTimeRange,compositionTime:CMTime,allTime:CMTimeRange)->CIImage? {
guard let ao = self.assetOutput else {
print("assetOutput 应该是没有被创建成功.....")
return nil
}
// if needRestReaderTimeRange {
while ao.copyNextSampleBuffer() != nil {
print("正在遍历。。。。1")
}
guard self.assetReader?.status == .reading else {
print("assetReader status:\(self.assetReader?.status) err:\(self.assetReader?.error)")
return nil
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
// }
var ciImg:CIImage? = nil
switch mode {
case .crossedEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .fsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .hsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .parallelEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .redBlueSolid://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
default:
break
}
return ciImg
}
//assetReaderoutput
func loadVideoAssetReader() async {
do {
if self.assetTrack == nil {
assetTrack = try await self.videoOriginalAsset.loadTracks(withMediaType: .video).first!
print("等待loading tracks。。。。。")
}
// let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
print("加载完毕loading tracks。。。。。")
let timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: 1))
DispatchQueue.main.async {
print("进入主线程loading tracks。。。。。")
self.quickLoadAReader(timeRange: timeRange)
}
} catch {
print("Error loading video: \(error)")
}
}
func quickLoadAReader(timeRange:CMTimeRange) {
if(assetOutput != nil){
assetOutput?.markConfigurationAsFinal()
assetOutput = nil
}
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = AVAssetReaderTrackOutput(
track: self.assetTrack!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetOutput?.supportsRandomAccess = true
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
//play
DispatchQueue.main.async {
self.player!.play()
print("资产加载完毕。。。。开始播放player\n err:\(self.player?.error)")
}
}
}