VPCamera3/tdvideo/tdvideo/转码/PlayControllerImg.swift
2024-03-05 11:44:34 +08:00

284 lines
9.8 KiB
Swift

//
// PlayController2.swift
// tdvideo
//
// Created by mac on 2024/2/1.
//
/*
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
let image = photos[indexPath.item]
let h = isSpatialImage(from: image)
self.navigationController?.popViewController(animated: true)
}
//
DispatchQueue.main.async { [self] in
collectionView.reloadData()
}
*/
import UIKit
import Photos
import ImageIO
import CoreFoundation
import UIKit
import Photos
import ImageIO
import CoreGraphics
import MobileCoreServices
import AVKit
class PhotoCell: UICollectionViewCell {
let imageView: UIImageView = {
let imageView = UIImageView()
imageView.contentMode = .scaleAspectFill
imageView.clipsToBounds = true
return imageView
}()
let frameCountLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = .red
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.clipsToBounds = true
return label
}()
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViews()
}
private func setupViews() {
addSubview(imageView)
addSubview(frameCountLabel)
imageView.translatesAutoresizingMaskIntoConstraints = false
frameCountLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
imageView.topAnchor.constraint(equalTo: topAnchor),
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
frameCountLabel.topAnchor.constraint(equalTo: topAnchor, constant: 8),
frameCountLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8),
frameCountLabel.widthAnchor.constraint(equalToConstant: 40),
frameCountLabel.heightAnchor.constraint(equalToConstant: 20)
])
}
}
class PlayControllerImg: UIViewController, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
var collectionView: UICollectionView!
var fetchResult: PHFetchResult<PHAsset>!
var photos: [UIImage] = []
var mediaSelectedHandler: ((Data) -> Void)?
override func viewDidLoad() {
super.viewDidLoad()
setupCollectionView()
fetchPhotos()
collectionView.dataSource = self
collectionView.delegate = self
}
private func setupCollectionView() {
let layout = UICollectionViewFlowLayout()
layout.minimumLineSpacing = 10
layout.minimumInteritemSpacing = 10
collectionView = UICollectionView(frame: view.bounds, collectionViewLayout: layout)
collectionView.backgroundColor = .white
collectionView.register(PhotoCell.self, forCellWithReuseIdentifier: "PhotoCell")
view.addSubview(collectionView)
collectionView.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor)
])
}
// PHAsset.fetchAssets(with: .image, options: fetchOptions)
// PHAsset.fetchAssets(with: fetchOptions)
func fetchPhotos() {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchResult = PHAsset.fetchAssets(with: .image, options: fetchOptions)
for index in 0..<fetchResult.count {
let asset = fetchResult.object(at: index)
if asset.mediaType == .image {
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (imageData, _, _, _) in
if let imageData = imageData {
if let image = UIImage(data: imageData) {
self.photos.append(image)
}
}
}
}
}
}
//
func isSpatialVideo(asset: AVAsset) -> Bool {
let metadata = asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
let isSpatialVideo = metadata.contains { item in
if let identifier = item.identifier?.rawValue {
return identifier == "mdta/com.apple.quicktime.spatial.format-version"
}
return false
}
return isSpatialVideo
}
func isSSVideo(asset:AVAsset)async throws->Bool{
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
return false
}
return true
}
// MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return photos.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "PhotoCell", for: indexPath) as! PhotoCell
cell.imageView.image = photos[indexPath.item]
let frameCount = getFrameCount(for: indexPath.item)
cell.frameCountLabel.isHidden = frameCount <= 1
cell.frameCountLabel.text = "\(frameCount)"
return cell
}
// MARK: - UICollectionViewDelegateFlowLayout
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let width = collectionView.bounds.width / 3 - 10
return CGSize(width: width, height: width)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 10
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
//
let asset = fetchResult.object(at: indexPath.item)
//
if asset.mediaType == .image {
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (imageData, _, _, _) in
if let imageData = imageData {
self.mediaSelectedHandler?(imageData)
}
//
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
}
}
}
private func getMediaURL(from asset: PHAsset, completion: @escaping (URL) -> Void) {
if asset.mediaType == .video {
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { (avAsset, _, _) in
if let avAsset = avAsset as? AVURLAsset {
let mediaURL = avAsset.url
completion(mediaURL)
}
}
} else {
}
}
private func getFrameCount(for index: Int) -> Int {
let asset = fetchResult.object(at: index)
if let imageData = getImageData(for: asset) {
if let cgImageSource = CGImageSourceCreateWithData(imageData as CFData, nil) {
return CGImageSourceGetCount(cgImageSource)
}
}
return 0
}
private func getImageData(for asset: PHAsset) -> Data? {
var imageData: Data?
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (data, _, _, _) in
imageData = data
}
return imageData
}
func convertCGImageToCFData(cgImage: CGImage) -> CFData? {
let data = CFDataCreateMutable(kCFAllocatorDefault, 0)
if let data = data {
if let destination = CGImageDestinationCreateWithData(data, kUTTypePNG, 1, nil) {
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
}
}
return data
}
}