视频转码修改,但是还存在一个必现的bug

This commit is contained in:
bluesea 2024-03-29 20:21:15 +08:00
parent bc7d441f0c
commit a5b4f5e5c8
4 changed files with 251 additions and 184 deletions

View File

@ -515,8 +515,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "83"
endingLineNumber = "83"
startingLineNumber = "63"
endingLineNumber = "63"
landmarkName = "convertFrame(request:)"
landmarkType = "7">
<Locations>
@ -563,8 +563,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayForVideoTask.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "87"
endingLineNumber = "87"
startingLineNumber = "67"
endingLineNumber = "67"
landmarkName = "convertFrame(request:)"
landmarkType = "7">
<Locations>
@ -611,8 +611,8 @@
filePath = "SwiftProject/Project/Controller/RecordingVideo/CCSpatialVideoDisplayController/CCSpatialVideoDisplayController.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "437"
endingLineNumber = "437"
startingLineNumber = "440"
endingLineNumber = "440"
landmarkName = "navgationButtonClick2(sender:)"
landmarkType = "7">
</BreakpointContent>

View File

@ -21,16 +21,16 @@ enum SpatialType : Int {
*/
case monocular2D
case parallelEyes
case redBlueSolid
case crossedEyes
case monocular2D = 1
case parallelEyes = 0
case redBlueSolid = 2
case crossedEyes = 3
//使
case hsbs
case fsbs
case hsbs = 4
case fsbs = 5
}
class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDelegate {
class CCSpatialVideoDisplayController: BaseController {
var isAirPlayActive:Bool = false {
didSet{
@ -43,11 +43,14 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
}
else{
selectedIndex = .parallelEyes//monocular2D
typeData = [(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"单眼2D",isHiden:false),
typeData = [
(icon:"type_check",title:NSLocalizedString("平行眼", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("单眼2D", comment: ""),isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
(icon:"type_check",title:NSLocalizedString("红蓝立体", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("交叉眼", comment: ""),isHiden:false)]
}
//UI
setttinisScreenMirroring(isScreenMirroring: isAirPlayActive)
@ -86,15 +89,10 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
//
var sourceVideoURL:URL?
let outputVideoURL:URL = URL.documentsDirectory.appending(path:"output1111.mp4")
var imgData:Data?
var player:AVPlayer?
// var playerLay:AVPlayerLayer?
// var needRestReaderTimeRange = false//
var playerController:AVPlayerViewController?
lazy var mTopImgView:UIImageView = {
@ -139,7 +137,7 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
button.layer.cornerRadius = 18
button.layer.borderWidth = 1
button.layer.borderColor = UIColor.white.cgColor
button.setTitle("平行眼", for: UIControl.State.normal)
button.setTitle(NSLocalizedString("平行眼", comment: ""), for: UIControl.State.normal)
button.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
button.updateBtnEdgeInsets(style: .Right, space: 10)
button.setTitleColor(UIColor.white, for: UIControl.State.normal)
@ -149,11 +147,11 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:"平行眼",isHiden:false),
(icon:"type_check",title:"单眼2D",isHiden:false),
var typeData:[(icon:String,title:String,isHiden:Bool)] = [(icon:"type_check",title:NSLocalizedString("平行眼", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("单眼2D", comment: ""),isHiden:false),
(icon:"type_check",title:"红蓝立体",isHiden:false),
(icon:"type_check",title:"交叉眼",isHiden:false)]
(icon:"type_check",title:NSLocalizedString("红蓝立体", comment: ""),isHiden:false),
(icon:"type_check",title:NSLocalizedString("交叉眼", comment: ""),isHiden:false)]
{
didSet {
menuView.setData(datas: self.typeData)
@ -209,14 +207,15 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
//
@objc func notification_PlayerEndTime(notification:Notification){
// print("PlayerEndTime....")
print("PlayerEndTime....")
// needRestReaderTimeRange = true
}
//
@objc func notification_PlayerTimeJump(notification:Notification){
// needRestReaderTimeRange = true
// print("time jump....")
print("time jump....")
}
@ -230,8 +229,16 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
Task {
print("ccs viewdid load ---------")
await self.loadVideoAssetReader()
print("ccs viewdid load +++++++")
DispatchQueue.main.async {
self.configureInfo()
print("ccs viewdid load +++++++")
// AirPlay
self.checkAirPlayStatus()
}
}
}
func configureInfo(){
print("getPlayerItem ========")
let playerItem = self.getPlayerItem()
@ -253,11 +260,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
playerController = AVPlayerViewController()
playerController?.player = player
playerController?.delegate = self
playerController!.view.backgroundColor = .clear
playerController?.view.frame = CGRect.init(x: 0, y: 170, width: self.view.frame.size.width, height: 400)
self.addChild(playerController!)
self.view.addSubview(playerController!.view)
self.view.addSubview(tipsButton)
@ -265,163 +273,38 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
// AirPlay
NotificationCenter.default.addObserver(self, selector: #selector(airPlayStatusDidChange(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
self.player!.play()
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerEndTime(notification:)), name: AVPlayerItem.didPlayToEndTimeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notification_PlayerTimeJump(notification:)), name: AVPlayerItem.timeJumpedNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notificatin_failedToPlayToEndTimeNotification(notifiation: )), name: AVPlayerItem.failedToPlayToEndTimeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(notificatin_failedToPlayToEndTimeNotification(notifiation: )), name: AVPlayerItem.newErrorLogEntryNotification, object: nil)
}
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
//
temItem.videoComposition = AVVideoComposition(asset: temItem.asset) { [weak self] request in
guard let weakSelf = self else {
print("self 被销毁了.....")
return
}
let compositionTime = request.compositionTime
let end:CMTime = CMTimeMake(value: Int64(compositionTime.value+1), timescale: compositionTime.timescale)
let tr = CMTimeRange(start: compositionTime, end: end)
// self.quickLoadAReader(timeRange: tr)
var ciImg:CIImage? = nil
switch weakSelf.selectedIndex {
case .crossedEyes://
guard let ao = weakSelf.assetOutput else {
print("assetOutput 应该是没有被创建成功.....")
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .fsbs://3d
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .hsbs://3d
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .parallelEyes://
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
case .monocular2D://2d
ciImg = request.sourceImage
break
case .redBlueSolid://
guard let ao = weakSelf.assetOutput else {
return
}
while let nextSampleBuffer = ao.copyNextSampleBuffer() {
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
ciImg = weakSelf.videoTranserConvertor.convertVideo(asset: weakSelf.videoOriginalAsset, assetOutput: ao, type: weakSelf.selectedIndex, time: compositionTime)
break
}
if let ciImg {
request.finish(with: ciImg, context: nil)
}
}
return temItem
@objc func notificatin_failedToPlayToEndTimeNotification(notifiation:Notification) {
print("notificatin_failedToPlayToEndTimeNotification....")
}
//assetReaderoutput
func loadVideoAssetReader() async {
do {
if self.assetTrack == nil {
assetTrack = try await self.videoOriginalAsset.loadTracks(withMediaType: .video).first!
}
// let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
let timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: 1))
self.quickLoadAReader(timeRange: timeRange)
} catch {
print("Error loading video: \(error)")
}
}
func quickLoadAReader(timeRange:CMTimeRange) {
if(assetOutput != nil){
assetOutput?.markConfigurationAsFinal()
assetOutput = nil
}
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = AVAssetReaderTrackOutput(
track: self.assetTrack!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetOutput?.supportsRandomAccess = true
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
@objc func notificatin_newErrorLogEntryNotification(notifiation:Notification) {
print("notificatin_newErrorLogEntryNotification....")
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
player?.pause()
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_VideoDisplayController)
Analytics.logEvent("preview_pv", parameters: ["refer_page":"视频预览页面","duration":sec])
player?.pause()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// AirPlay
checkAirPlayStatus()
}
//MARK: -
@ -469,12 +352,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
//
if isPlaying {
// ---
menuView.showFooterView(isShow: true, showText: "结束串流")
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal)
menuView.showFooterView(isShow: true, showText: NSLocalizedString("结束串流", comment: ""))
mTopCenterTypeButton.setTitle(NSLocalizedString("外部串流中", comment: ""), for: UIControl.State.normal)
}else{
// ---
menuView.showFooterView(isShow: true, showText: "开始串流")
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal)
menuView.showFooterView(isShow: true, showText: NSLocalizedString("开始串流", comment: ""))
mTopCenterTypeButton.setTitle(NSLocalizedString("已连接外部设备", comment: ""), for: UIControl.State.normal)
}
@ -488,14 +371,14 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
playerController!.player!.allowsExternalPlayback = false
if self.selectedIndex == .monocular2D {
mTopCenterTypeButton.setTitle("单眼2D", for: UIControl.State.normal)
mTopCenterTypeButton.setTitle(NSLocalizedString("单眼2D", comment: ""), for: UIControl.State.normal)
}else if self.selectedIndex == .redBlueSolid {
mTopCenterTypeButton.setTitle("红蓝立体", for: UIControl.State.normal)
mTopCenterTypeButton.setTitle(NSLocalizedString("红蓝立体", comment: ""), for: UIControl.State.normal)
}else if self.selectedIndex == .crossedEyes {
mTopCenterTypeButton.setTitle("交叉眼", for: UIControl.State.normal)
mTopCenterTypeButton.setTitle(NSLocalizedString("交叉眼", comment: ""), for: UIControl.State.normal)
}
else if self.selectedIndex == .parallelEyes {
mTopCenterTypeButton.setTitle("平行眼", for: UIControl.State.normal)
mTopCenterTypeButton.setTitle(NSLocalizedString("平行眼", comment: ""), for: UIControl.State.normal)
}
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_down"), for: .normal)
@ -537,12 +420,12 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
//
if isPlaying {
// ---
menuView.showFooterView(isShow: true, showText: "结束串流")
mTopCenterTypeButton.setTitle("外部串流中", for: UIControl.State.normal)
menuView.showFooterView(isShow: true, showText: NSLocalizedString("结束串流", comment: ""))
mTopCenterTypeButton.setTitle(NSLocalizedString("外部串流中", comment: ""), for: UIControl.State.normal)
}else{
// ---
menuView.showFooterView(isShow: true, showText: "开始串流")
mTopCenterTypeButton.setTitle("已连接外部设备", for: UIControl.State.normal)
menuView.showFooterView(isShow: true, showText: NSLocalizedString("开始串流", comment: ""))
mTopCenterTypeButton.setTitle(NSLocalizedString("已连接外部设备", comment: ""), for: UIControl.State.normal)
}
}else{
mTopCenterTypeButton.setImage(UIImage.init(named: "type_button_arrow_up"), for: .normal)

View File

@ -0,0 +1,184 @@
//
// CCSpatialVideoDisplayForVideoTask.swift
// SwiftProject
//
// Created by aaa on 2024/3/28.
//
import Foundation
import AVFoundation
import Photos
import AVKit
import VideoToolbox
extension CCSpatialVideoDisplayController {
func getPlayerItem() -> AVPlayerItem {
let temItem = AVPlayerItem(asset: self.videoOriginalAsset)
AVVideoComposition.videoComposition(with: temItem.asset) { [weak self] request in
print("正在请求解码图片frame....")
guard let weakSelf = self else {
print("self 被销毁了.....")
return
}
weakSelf.convertFrame(request:request)
} completionHandler: { ac, err in
if err != nil {
print("初始化coposition报错\(err)")
}
else{
print("composition 生成ok....")
temItem.videoComposition = ac
}
}
// let filter = CIFilter(name: "CIGaussianBlur")!
// let composition = AVVideoComposition(asset: temItem.asset, applyingCIFiltersWithHandler: {[weak self] request in
// print("frame....")
// guard let weakSelf = self else {
// print("self .....")
// return
// }
// weakSelf.convertFrame(request:request)
//
//
// })
// temItem.videoComposition = composition
return temItem
}
func convertFrame(request:AVAsynchronousCIImageFilteringRequest){
let compositionTime = request.compositionTime
let end:CMTime = CMTimeMake(value: Int64(compositionTime.value+1), timescale: compositionTime.timescale)
let tr = CMTimeRange(start: compositionTime, end: end)
let tr_All = CMTimeRange(start: compositionTime, duration: .positiveInfinity)
var ciImg:CIImage? = nil
switch self.selectedIndex {
case .monocular2D://2d
ciImg = request.sourceImage
break
default :
ciImg = self.otherModeImgWithMode(mode: self.selectedIndex,tr:tr,compositionTime: compositionTime,allTime: tr_All)
break
}
if let ciImg {
print("已返回图片frame....")
request.finish(with: ciImg, context: nil)
}
else {
print("没有合成可用视频帧图片....准备用request.sourceImage替换")
request.finish(with: request.sourceImage, context: nil)
}
}
func otherModeImgWithMode(mode:SpatialType,tr:CMTimeRange,compositionTime:CMTime,allTime:CMTimeRange)->CIImage? {
guard let ao = self.assetOutput else {
print("assetOutput 应该是没有被创建成功.....")
return nil
}
// if needRestReaderTimeRange {
while ao.copyNextSampleBuffer() != nil {
print("正在遍历。。。。1")
}
guard self.assetReader?.status == .reading else {
print("assetReader status:\(self.assetReader?.status) err:\(self.assetReader?.error)")
return nil
}
ao.reset(forReadingTimeRanges:[NSValue(timeRange: tr)])
// }
var ciImg:CIImage? = nil
switch mode {
case .crossedEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .fsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .hsbs://3d
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .parallelEyes://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
case .redBlueSolid://
ciImg = self.videoTranserConvertor.convertVideo(asset: self.videoOriginalAsset, assetOutput: ao, type: self.selectedIndex, time: compositionTime)
break
default:
break
}
return ciImg
}
//assetReaderoutput
func loadVideoAssetReader() async {
do {
if self.assetTrack == nil {
assetTrack = try await self.videoOriginalAsset.loadTracks(withMediaType: .video).first!
print("等待loading tracks。。。。。")
}
// let timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
print("加载完毕loading tracks。。。。。")
let timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: 1))
DispatchQueue.main.async {
print("进入主线程loading tracks。。。。。")
self.quickLoadAReader(timeRange: timeRange)
}
} catch {
print("Error loading video: \(error)")
}
}
func quickLoadAReader(timeRange:CMTimeRange) {
if(assetOutput != nil){
assetOutput?.markConfigurationAsFinal()
assetOutput = nil
}
if(assetReader != nil){
assetReader?.cancelReading()
}
assetReader = try! AVAssetReader(asset: self.videoOriginalAsset)
assetOutput = AVAssetReaderTrackOutput(
track: self.assetTrack!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetOutput?.supportsRandomAccess = true
assetReader!.timeRange = timeRange
assetReader!.add(assetOutput!)
assetReader!.startReading()
//play
DispatchQueue.main.async {
self.player!.play()
print("资产加载完毕。。。。开始播放player\n err:\(self.player?.error)")
}
}
}