修正判断视频是否为空间视频的逻辑

This commit is contained in:
bluesea 2024-03-15 16:25:32 +08:00
parent 40c3a4747e
commit a6aefd1a81
5 changed files with 55 additions and 30 deletions

View File

@ -611,21 +611,6 @@ class CCSpatialVideoDisplayController: BaseController, AVPlayerViewControllerDel
} }
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
//AVPlayerViewControllerDelegate
} }

View File

@ -33,12 +33,6 @@ class SpatialVideoConvertor {
let assetReader = try AVAssetReader(asset: asset) let assetReader = try AVAssetReader(asset: asset)
// print("")
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("不是空间视频")
}
//() //()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset) let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)

View File

@ -54,6 +54,31 @@ class SpatialVideoWriter {
assetWriter.add(input_video) assetWriter.add(input_video)
//
let metadataItem_1 = AVMutableMetadataItem()
metadataItem_1.identifier = .quickTimeMetadataDescription
metadataItem_1.dataType = kCMMetadataDataType_QuickTimeMetadataDirection as String
metadataItem_1.value = String("ff") as (NSCopying & NSObjectProtocol)
let metadataItem_2 = AVMutableMetadataItem()
metadataItem_2.identifier = .quickTimeMetadataInformation
metadataItem_2.dataType = kCMMetadataDataType_QuickTimeMetadataDirection as String
metadataItem_2.value = String("ff") as (NSCopying & NSObjectProtocol)
let atmg:AVMutableTimedMetadataGroup = AVMutableTimedMetadataGroup(items: [metadataItem_1,metadataItem_2], timeRange: CMTimeRange(start: .zero, duration: .positiveInfinity))
let desc:CMMetadataFormatDescription? = atmg.copyFormatDescription()
let input_metadata = AVAssetWriterInput.init(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc)
let metadataAdaptor = AVAssetWriterInputMetadataAdaptor(assetWriterInput: input_metadata)
assetWriter.add(input_metadata)
// //
let inputSettings_Audio = [ let inputSettings_Audio = [
AVFormatIDKey: kAudioFormatLinearPCM, // AVFormatIDKey: kAudioFormatLinearPCM, //
@ -76,11 +101,12 @@ class SpatialVideoWriter {
else { else {
print("assetWriter 添加writerInput_Audio_left失败...") print("assetWriter 添加writerInput_Audio_left失败...")
} }
// AVMetadataFormat
assetWriter.startWriting() assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero) assetWriter.startSession(atSourceTime: .zero)
// //
let leftEyeReader = try AVAssetReader(asset: leftEyeAsset) let leftEyeReader = try AVAssetReader(asset: leftEyeAsset)
let rightEyeReader = try AVAssetReader(asset: rightEyeAsset) let rightEyeReader = try AVAssetReader(asset: rightEyeAsset)
@ -125,6 +151,9 @@ class SpatialVideoWriter {
while let leftBuffer = leftEyeOutput.copyNextSampleBuffer(), while let leftBuffer = leftEyeOutput.copyNextSampleBuffer(),
let rightBuffer = rightEyeOutput.copyNextSampleBuffer() { let rightBuffer = rightEyeOutput.copyNextSampleBuffer() {
@ -148,12 +177,18 @@ class SpatialVideoWriter {
// writerInput // writerInput
Thread.sleep(forTimeInterval: 0.1) // Thread.sleep(forTimeInterval: 0.1) //
} }
adaptor_inputVideo.appendTaggedBuffers([left, right], withPresentationTime: leftBuffer.presentationTimeStamp) let appendResult = adaptor_inputVideo.appendTaggedBuffers([left, right], withPresentationTime: leftBuffer.presentationTimeStamp)
print("appendResult :\(appendResult)")
} }
self.addAudio(assetTrackOutput: output_audio_left, audio_input: writerInput_Audio_left) self.addAudio(assetTrackOutput: output_audio_left, audio_input: writerInput_Audio_left)
//
let appendATMGResult = metadataAdaptor.append(atmg)
print("atmg result:\(appendATMGResult)")
// //
print("完成写入") print("完成写入")
writerInput_Audio_left.markAsFinished() writerInput_Audio_left.markAsFinished()

View File

@ -78,12 +78,6 @@ class VideoConvertor2 {
let assetReader = try AVAssetReader(asset: asset) let assetReader = try AVAssetReader(asset: asset)
// print("")
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
print("不是空间视频")
}
//() //()
let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset) let (orientation, videoSize) = try await getOrientationAndResolutionSizeForVideo(asset: asset)

View File

@ -6,6 +6,7 @@
// //
import Foundation import Foundation
import AVFoundation
class ZZHHelper { class ZZHHelper {
@ -33,4 +34,20 @@ class ZZHHelper {
return "\(hoursText):\(minutesText):\(secondsText)" return "\(hoursText):\(minutesText):\(secondsText)"
} }
} }
//MARK:
class func isSpatialVideo(asset: AVAsset) -> Bool {
for track in asset.tracks {
if track.mediaType == .video {
for formatDesc in track.formatDescriptions {
let dic = CMFormatDescriptionGetExtensions(formatDesc as! CMFormatDescription) as! Dictionary<String, Any>
let fromatName = dic["FormatName"] as? String
if let fn = fromatName,fn == "HEVC"{
return true
}
}
}
}
return false
}
} }