VPCamera/tdvideo/tdvideo/PlayControllerVideo.swift
2024-03-05 11:44:34 +08:00

278 lines
10 KiB
Swift

//
// PlayControllerVideo.swift
// tdvideo
//
// Created by mac on 2024/2/4.
//
import UIKit
import Photos
import ImageIO
import CoreFoundation
import UIKit
import Photos
import ImageIO
import CoreGraphics
import MobileCoreServices
import AVKit
class PhotoCell2: UICollectionViewCell {
let imageView: UIImageView = {
let imageView = UIImageView()
imageView.contentMode = .scaleAspectFill
imageView.clipsToBounds = true
return imageView
}()
let frameCountLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = .red
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.clipsToBounds = true
return label
}()
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViews()
}
private func setupViews() {
addSubview(imageView)
addSubview(frameCountLabel)
imageView.translatesAutoresizingMaskIntoConstraints = false
frameCountLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
imageView.topAnchor.constraint(equalTo: topAnchor),
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
frameCountLabel.topAnchor.constraint(equalTo: topAnchor, constant: 8),
frameCountLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8),
frameCountLabel.widthAnchor.constraint(equalToConstant: 40),
frameCountLabel.heightAnchor.constraint(equalToConstant: 20)
])
}
}
class PlayControllerVideo: UIViewController, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
var collectionView: UICollectionView!
var fetchResult: PHFetchResult<PHAsset>!
var photos: [UIImage] = []
var mediaSelectedHandler: ((AVAsset) -> Void)?
override func viewDidLoad() {
super.viewDidLoad()
setupCollectionView()
fetchPhotos()
collectionView.dataSource = self
collectionView.delegate = self
}
private func setupCollectionView() {
let layout = UICollectionViewFlowLayout()
layout.minimumLineSpacing = 10
layout.minimumInteritemSpacing = 10
collectionView = UICollectionView(frame: view.bounds, collectionViewLayout: layout)
collectionView.backgroundColor = .white
collectionView.register(PhotoCell2.self, forCellWithReuseIdentifier: "PhotoCell2")
view.addSubview(collectionView)
collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
// PHAsset.fetchAssets(with: .image, options: fetchOptions)
// PHAsset.fetchAssets(with: fetchOptions)
func fetchPhotos() {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions)
for index in 0..<fetchResult.count {
let asset = fetchResult.object(at: index)
if asset.mediaType == .video {
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { [self] (avAsset, _, _) in
if let avAsset = avAsset {
Task {
do {
let isSS = isSpatialVideo(asset: avAsset)
print(isSS)
if(isSS == true){
//
let generator = AVAssetImageGenerator(asset: avAsset)
generator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 0, preferredTimescale: 1)
let imageRef = try await generator.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: imageRef)
self.photos.append(thumbnail)
collectionView.reloadData()
}
} catch {
//
}
}
}
}
}
}
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
func isSSVideo(asset:AVAsset)async throws->Bool{
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
return false
}
return true
}
// MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return photos.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "PhotoCell2", for: indexPath) as! PhotoCell2
cell.imageView.image = photos[indexPath.item]
let frameCount = getFrameCount(for: indexPath.item)
cell.frameCountLabel.isHidden = frameCount <= 1
cell.frameCountLabel.text = "\(frameCount)"
return cell
}
// MARK: - UICollectionViewDelegateFlowLayout
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let width = collectionView.bounds.width / 3 - 10
return CGSize(width: width, height: width)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
//
let asset = fetchResult.object(at: indexPath.item)
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { [self] (avAsset, _, _) in
mediaSelectedHandler!(avAsset!)
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
}
}
func playVideo(asset: PHAsset) {
PHImageManager.default().requestPlayerItem(forVideo: asset, options: nil) { [weak self] (playerItem, _) in
guard let playerItem = playerItem else { return }
DispatchQueue.main.async {
let playerViewController = AVPlayerViewController()
playerViewController.player = AVPlayer(playerItem: playerItem)
self?.present(playerViewController, animated: true) {
playerViewController.player?.play()
}
}
}
}
private func getFrameCount(for index: Int) -> Int {
let asset = fetchResult.object(at: index)
if let imageData = getImageData(for: asset) {
if let cgImageSource = CGImageSourceCreateWithData(imageData as CFData, nil) {
return CGImageSourceGetCount(cgImageSource)
}
}
return 0
}
private func getImageData(for asset: PHAsset) -> Data? {
var imageData: Data?
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (data, _, _, _) in
imageData = data
}
return imageData
}
func convertCGImageToCFData(cgImage: CGImage) -> CFData? {
let data = CFDataCreateMutable(kCFAllocatorDefault, 0)
if let data = data {
if let destination = CGImageDestinationCreateWithData(data, kUTTypePNG, 1, nil) {
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
}
}
return data
}
}