VPCamera/SwiftProject/SwiftProject/Project/View/Album/CCSpaceAlbumFliterPopView/CCSpaceAlbumFliterPopView.swift
2024-03-22 12:57:10 +08:00

677 lines
23 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CCSpaceAlbumFliterPopView.swift
// SwiftProject
//
// Created by Zhang, Joyce on 2024/3/3.
//
import UIKit
import Photos
import ImageIO
import CoreFoundation
import UIKit
import Photos
import ImageIO
import CoreGraphics
import MobileCoreServices
import AVKit
import CoreMedia
private let Back_Height:CGFloat = KHZSize(700)
enum SelectedFileType : Int {
case all
case image
case video
}
class CCSpaceAlbumFliterPopView: UIView {
var selectedFileType:SelectedFileType = .all
var allAssetItemModelArr:[CCSpaceAlbumFliterModel] = []
var collectionView: UICollectionView!
var fetchResult: PHFetchResult<PHAsset>!
var photos: [UIImage] = []
var selectedImageHandler: ((Data,PHAsset) -> Void)?
var selectedVideoHandler: ((URL,PHAsset) -> Void)?
lazy var menuView: CCPopMenuView = {
//icon
let popData = [(icon:"",title:"全部"),
(icon:"",title:"空间照片"),
(icon:"",title:"空间视频")
]
//
let parameters:[CCPopMenuConfigure] = [
.PopMenuTextColor(UIColor.white),
.popMenuItemHeight(30),
.PopMenuTextFont(KFont_Medium(10)),
.PopMenuBackgroudColor(UIColor(hexString: "#383739"))
]
//init (testarrow)
let pointOnScreen = filtersButton.convert(filtersButton.center, to: KWindow)
let popMenu = CCPopMenuView(menuWidth: 90, arrow: pointOnScreen, datas: popData,configures: parameters)
return popMenu
}()
lazy var backView: UIView = {
let imageView = UIView(frame: CGRectMake(0, KScreenHeight + 10 , KScreenWidth, Back_Height))
imageView.backgroundColor = UIColor(hexString: "#1F1E20")
imageView.addCorner(conrners: [.topLeft,.topRight], radius: 12.0)
return imageView
}()
lazy var topView: UIView = {
let imageView = UIView(frame: CGRectMake(0, 0, backView.frame.size.width, 58))
imageView.backgroundColor = .clear
return imageView
}()
lazy var cancelButton: UIButton = {
let button = UIButton.init()
button.setTitle("取消", for: .normal)
// button.setTitleColor(KMain_Color, for: .normal)
button.setTitleColor(UIColor(red: 165.0/255, green: 137.0/255, blue: 1, alpha: 1), for: .normal)
button.titleLabel?.font = KFont_Medium(16)
button.titleLabel?.adjustsFontSizeToFitWidth = true
button.contentHorizontalAlignment = .center
button.addTarget(self, action: #selector(cancelAction), for: .touchUpInside)
return button
}()
lazy var topLabel: UILabel = {
let label = UILabel()
label.backgroundColor = UIColor.clear
label.font = KFont_Medium(18)
label.textColor = UIColor(hexString: "#ffffff")
label.text = "空间相簿"
return label
}()
lazy var filtersButton: UIButton = {
let button = UIButton.init()
button.setImage(UIImage(named: "photomoreBtn"), for: .normal)
button.addTarget(self, action: #selector(filtersButtonAction), for: .touchUpInside)
return button
}()
let AlbumIdentifierCell: String = "CCSpaceAlbumFliterCollectionCell"
lazy var listCollectView:UICollectionView = {
//1.
let collectionLayout = UICollectionViewFlowLayout()
//2, item
// collectionLayout.itemSize = CGSize(width: KHZSize(48), height: KHZSize(24))
//3.
collectionLayout.scrollDirection = .vertical
//6.collectionView
let collectionView = UICollectionView(frame: CGRect.zero, collectionViewLayout: collectionLayout)
//7.collectionViewCell
collectionView.register(CCSpaceAlbumFliterCollectionCell.self, forCellWithReuseIdentifier: AlbumIdentifierCell)
//10.
collectionView.delegate = self
collectionView.dataSource = self
//11.
// collectionView.isPagingEnabled = true
//
collectionView.showsVerticalScrollIndicator = false
collectionView.showsHorizontalScrollIndicator = false
collectionView.backgroundColor = UIColor.black
return collectionView
}()
deinit {
NotificationCenter.default.removeObserver(self)
}
//
@objc func appEnterForeground(notification:Notification) {
self.fetchPhotos()
}
override init(frame: CGRect) {
super.init(frame: frame)
NotificationCenter.default.addObserver(self, selector: #selector(appEnterForeground), name: UIApplication.willEnterForegroundNotification, object: nil)
self.backgroundColor = UIColor.black.withAlphaComponent(0.5)
self.addSubview(backView)
backView.addSubview(topView)
topView.addSubview(cancelButton)
topView.addSubview(topLabel)
topView.addSubview(filtersButton)
backView.addSubview(listCollectView)
cancelButton.snp.makeConstraints { (make) in
make.leading.equalTo(topView.snp.leading).offset(16)
make.centerY.equalTo(topView.snp.centerY)
make.width.equalTo(32)
make.height.equalTo(24)
}
topLabel.snp.makeConstraints { (make) in
make.centerY.equalTo(topView.snp.centerY)
make.centerX.equalTo(topView.snp.centerX)
}
filtersButton.snp.makeConstraints { (make) in
make.trailing.equalTo(topView.snp.trailing).offset(-16)
make.centerY.equalTo(topView.snp.centerY)
make.width.equalTo(24)
make.height.equalTo(24)
}
listCollectView.snp.makeConstraints { (make) in
make.top.equalTo(topView.snp.bottom)
make.leading.equalTo(backView.snp.leading)
make.trailing.equalTo(backView.snp.trailing)
make.bottom.equalTo(backView.snp.bottom)
}
fetchPhotos()
show()
}
//MARK: - /
func show() {
UIView.animate(withDuration: 0.5, animations: {
self.listCollectView.alpha = 1.0
self.backView.frame = CGRect.init(x: 0, y: KScreenHeight - Back_Height, width: KScreenWidth, height: Back_Height)
}) { (isFinished) in
self.listCollectView.reloadData()
}
}
func hide() {
UIView.animate(withDuration: 0.5, animations: { [self] in
self.listCollectView.alpha = 0
self.backView.frame = CGRect.init(x: 0, y: KScreenHeight + 10 , width: KScreenWidth, height: Back_Height)
}) { (isFinished) in
self.removeFromSuperview()
}
}
//MARK: -
func fetchAllAsset() {
}
func fetchPhotos() {
let fetchOptions = PHFetchOptions()
//ascending YES;NO
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
self.allAssetItemModelArr.removeAll()
self.listCollectView.reloadData()
if selectedFileType == .image {
let fetchImageResult = PHAsset.fetchAssets(with:.image, options: fetchOptions)
fetchImageResult.enumerateObjects(options: .concurrent) { asset, index, stop in
self.dealPhoto(asset: asset)
}
self.listCollectView.reloadData()
}
else if selectedFileType == .video {
let fetchVideoResult = PHAsset.fetchAssets(with:.video, options: fetchOptions)
fetchVideoResult.enumerateObjects(options: .concurrent) { asset, index, stop in
self.dealVideo(asset: asset)
}
}
else if selectedFileType == .all {
let fetchImageResult = PHAsset.fetchAssets(with:.image, options: fetchOptions)
let fetchVideoResult = PHAsset.fetchAssets(with:.video, options: fetchOptions)
let fetchAllResult:NSMutableArray = NSMutableArray()
fetchImageResult.enumerateObjects { asset, index, stop in
fetchAllResult.add(asset)
}
fetchVideoResult.enumerateObjects { asset, index, stop in
fetchAllResult.add(asset)
}
fetchAllResult.enumerateObjects(options: .concurrent) { asset, index, stop in
if let phasset = asset as? PHAsset{
if phasset.mediaType == .image {
self.dealPhoto(asset: phasset)
}
else if phasset.mediaType == .video {
self.dealVideo(asset: phasset)
}
}
self.listCollectView.reloadData()
}
print("sss");
}
}
func dealPhoto(asset:PHAsset){
let frameCount = self.getFrameCount(for: asset)
if frameCount > 1 {
//
let imageData = self.getImageData(for: asset)
let photoImage:UIImage = UIImage(data: imageData!)!
//model
let model = CCSpaceAlbumFliterModel.initWithAssetType(assetType: AssetType.image, asset: asset,image:photoImage, localIdentifier: asset.localIdentifier)
//
self.allAssetItemModelArr.append(model)
}
}
func dealVideo(asset:PHAsset){
self.getVideoURL(from: asset) { [weak self] url, avasset in
let isSpaceVideo = self?.isSpatialVideo(asset:avasset)
if isSpaceVideo! {
let firstFrameImage:UIImage = (self?.getVideoFirstFrame(url: url))!
//model
let model = CCSpaceAlbumFliterModel.initWithAssetType(assetType: AssetType.video, asset: asset,image:firstFrameImage, localIdentifier: asset.localIdentifier)
//
self?.allAssetItemModelArr.append(model)
DispatchQueue.main.async { [weak self] in
self?.listCollectView.reloadData()
}
}
}
}
//MARK:
func isSpatialVideo(asset: AVAsset) -> Bool {
return ZZHHelper.isSpatialVideo(asset: asset)
}
func isSSVideo(asset:AVAsset)async throws->Bool{
let userDataItems = try await asset.loadMetadata(for:.quickTimeMetadata)
let spacialCharacteristics = userDataItems.filter { $0.identifier?.rawValue == "mdta/com.apple.quicktime.spatial.format-version" }
if spacialCharacteristics.count == 0 {
return false
}
return true
}
// MARK: - action
@objc func cancelAction() {
hide()
}
@objc func filtersButtonAction() {
filtersButton.layoutIfNeeded()
topView.layoutIfNeeded()
menuView.show()
//click
menuView.didSelectMenuBlock = { [weak self](index:Int)->Void in
print("block select \(index)")
if index == 1 {
//image
self?.selectedFileType = .image
}else if index == 2 {
//video
self?.selectedFileType = .video
}else if index == 0 {
//all
self?.selectedFileType = .all
}
//
self?.fetchPhotos()
}
}
//MARK: - url
private func getVideoURL(from asset: PHAsset, completion: @escaping (URL,AVURLAsset) -> Void) {
if asset.mediaType == .video {
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = true
//
PHImageManager.default().requestAVAsset(forVideo: asset, options: requestOptions) { (avAsset, _, _) in
if let avAsset = avAsset as? AVURLAsset {
let mediaURL = avAsset.url
completion(mediaURL,avAsset)
}
}
} else {
}
}
//MARK: - data
private func getImageData(for asset: PHAsset) -> Data? {
var imageData: Data?
if asset.mediaType == .image {
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true//
requestOptions.deliveryMode = .highQualityFormat
// PHImageManager.default().requestImageData(for: asset, options: requestOptions) { (data, _, _, _) in
// imageData = data
// }
PHImageManager.default().requestImageDataAndOrientation(for: asset, options: requestOptions) { data, dataUTI, imagePropertyOrientation, info in
imageData = data
}
}
return imageData
}
//MARK: -
private func getFrameCount(for asset: PHAsset) -> Int {
if let imageData = getImageData(for: asset) {
if let cgImageSource = CGImageSourceCreateWithData(imageData as CFData, nil) {
return CGImageSourceGetCount(cgImageSource)
}
}
return 0
}
func convertCGImageToCFData(cgImage: CGImage) -> CFData? {
let data = CFDataCreateMutable(kCFAllocatorDefault, 0)
if let data = data {
if let destination = CGImageDestinationCreateWithData(data, UTType.png as! CFString, 1, nil) {
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
}
}
return data
}
//MARK: -
func getVideoFirstFrame(url:URL) -> UIImage {
let videoURL = url
let asset = AVURLAsset(url: videoURL)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(0.0, preferredTimescale: 1)
let imageRef = try! generator.copyCGImage(at: time, actualTime: nil)
let image = UIImage(cgImage: imageRef)
// Use the `image` as you like
return image
}
// MARK: - 00:00:00
///
/// - Parameter second:
/// - Returns: String
func transToHourMinSec(second: Int) -> String {
let allTime: Int = second
var hours = 0
var minutes = 0
var seconds = 0
var hoursText = ""
var minutesText = ""
var secondsText = ""
hours = allTime / 3600
hoursText = hours > 9 ? "\(hours)" : "0\(hours)"
minutes = allTime % 3600 / 60
minutesText = minutes > 9 ? "\(minutes)" : "0\(minutes)"
seconds = allTime % 3600 % 60
secondsText = seconds > 9 ? "\(seconds)" : "0\(seconds)"
if hoursText == "00" {
return "\(minutesText):\(secondsText)"
}else{
return "\(hoursText):\(minutesText):\(secondsText)"
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
extension CCSpaceAlbumFliterPopView:UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
// MARK: - UICollectionViewDataSource
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return self.allAssetItemModelArr.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "CCSpaceAlbumFliterCollectionCell", for: indexPath) as! CCSpaceAlbumFliterCollectionCell
let model:CCSpaceAlbumFliterModel = allAssetItemModelArr[indexPath.item]
cell.imageView.image = model.image
if model.assetType == .image {
// let frameCount = getFrameCount(for: model.itemAsset!)
// if frameCount <= 1 {
// cell.frameCountLabel.text = ""
// }else{
cell.frameCountLabel.text = "空间"
cell.frameCountLabel.isHidden = false
cell.timeLabel.isHidden = true
// }
}else if model.assetType == .video {
let time:Double = Double(model.itemAsset!.duration)
let timeStr:String = self.transToHourMinSec(second:Int(time))
// cell.frameCountLabel.text = timeStr
cell.frameCountLabel.isHidden = true
cell.timeLabel.isHidden = false
cell.timeLabel.text = timeStr
}
return cell
}
// MARK: - UICollectionViewDelegateFlowLayout
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let width = (collectionView.bounds.width - 4)/3
return CGSize(width: width, height: width)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 2
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumInteritemSpacingForSectionAt section: Int) -> CGFloat {
return 2
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
let model:CCSpaceAlbumFliterModel = allAssetItemModelArr[indexPath.item]
guard let asset = model.itemAsset else { return }
//
if asset.mediaType == .image {
//
let imageData = self.getImageData(for: asset)
//
if Thread.isMainThread {
self.hide()
if let imageData = imageData {
self.selectedImageHandler?(imageData,asset)
}
}else{
DispatchQueue.main.async {
self.hide()
if let imageData = imageData {
self.selectedImageHandler?(imageData,asset)
}
}
}
}else if asset.mediaType == .video {
//video
self.getVideoURL(from: asset) { url,avasset in
if Thread.isMainThread {
self.hide()
self.selectedVideoHandler?(url,asset)
}else{
DispatchQueue.main.async {
self.hide()
self.selectedVideoHandler?(url,asset)
}
}
}
}
}
}
//MARK: - Cell
class CCSpaceAlbumFliterCollectionCell: UICollectionViewCell {
let imageView: UIImageView = {
let imageView = UIImageView()
imageView.contentMode = .scaleAspectFill
imageView.clipsToBounds = true
return imageView
}()
let frameCountLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = UIColor(hexString: "#383739")
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.adjustsFontSizeToFitWidth = true
label.layer.masksToBounds = true
return label
}()
let timeLabel: UILabel = {
let label = UILabel()
label.textColor = .white
label.backgroundColor = UIColor(hexString: "#383739")
label.textAlignment = .center
label.font = UIFont.boldSystemFont(ofSize: 12)
label.layer.cornerRadius = 8
label.adjustsFontSizeToFitWidth = true
label.layer.masksToBounds = true
return label
}()
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setupViews()
}
private func setupViews() {
addSubview(imageView)
addSubview(frameCountLabel)
addSubview(timeLabel);
imageView.translatesAutoresizingMaskIntoConstraints = false
frameCountLabel.translatesAutoresizingMaskIntoConstraints = false
timeLabel.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
imageView.topAnchor.constraint(equalTo: topAnchor),
imageView.leadingAnchor.constraint(equalTo: leadingAnchor),
imageView.trailingAnchor.constraint(equalTo: trailingAnchor),
imageView.bottomAnchor.constraint(equalTo: bottomAnchor),
frameCountLabel.topAnchor.constraint(equalTo: topAnchor, constant: 8),
frameCountLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 8),
frameCountLabel.widthAnchor.constraint(equalToConstant: 40),
frameCountLabel.heightAnchor.constraint(equalToConstant: 20),
timeLabel.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -8),
timeLabel.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -8),
timeLabel.widthAnchor.constraint(equalToConstant: 40),
timeLabel.heightAnchor.constraint(equalToConstant: 20)
])
}
}
//MARK: - Model
enum AssetType : Int {
case image
case video
case all
}
class CCSpaceAlbumFliterModel: NSObject {
/// 1 image 2 video 3 gif
var assetType:AssetType?
var itemAsset :PHAsset?
var astLocalIdentifier :String?
var image: UIImage?//
/** */
var isSelected :Bool?
/** */
var currentItemSelectedFlage: String?
var videoTimeLength:Int?
static func initWithAssetType(assetType:AssetType,asset:PHAsset,image:UIImage,localIdentifier:String) -> CCSpaceAlbumFliterModel {
let model = CCSpaceAlbumFliterModel()
model.assetType = assetType
model.itemAsset = asset
model.astLocalIdentifier = localIdentifier
model.image = image
return model
}
}