VPCamera/SwiftProject/SwiftProject/Project/Controller/RecordingVideo/CCSpatialShootController.swift

1260 lines
48 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CCSpatialShootController.swift
// SwiftProject
//
// Created by Zhang, Joyce on 2024/2/29.
//
import UIKit
import AVFoundation
import Photos
import AVKit
import VideoToolbox
import SVProgressHUD
import Firebase
class CCSpatialShootController: BaseController {
let kNowTimeToUserDefaultKey_SpatialShootController:String = "kNowTimeToUserDefaultKey_SpatialShootController"
var wideAngleCameraDevice:AVCaptureDevice?//使
//AVCaptureSession ---
var session = AVCaptureMultiCamSession()//
var wideAngleCameraDeviceInput: AVCaptureDeviceInput?//广 .builtInWideAngleCamera
var ultraWideCameraDeviceInput: AVCaptureDeviceInput?//广 .builtInUltraWideCamera
var wideAngleCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?//广
var ultraWideCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?//广
var wideAngleCameraMovieOutput: AVCaptureMovieFileOutput?
var ultraWideCameraMovieOutput: AVCaptureMovieFileOutput?
var isRecording = false
var imageCGImagePropertyOrientation:CGImagePropertyOrientation = CGImagePropertyOrientation.left//cgimage
var imageOrientation:UIImage.Orientation = UIImage.Orientation.up
var leftEyeVideoURL:URL?
var rightEyeVideoURL:URL?
var outputVideoURL: URL?
var imgs:NSMutableArray = NSMutableArray() //广
//================================
//
var shootingMode:CCShootingMode = .CCShootingMode_Camera
//
var timer: Timer?
lazy var spaceAlbumPopView: CCSpaceAlbumFilterPopView2 = {
let view = CCSpaceAlbumFilterPopView2.init(frame: CGRectMake(0, 0, KScreenWidth, KScreenHeight))
return view
}()
lazy var tipsLabel: UILabel = {
let label = UILabel.init(frame: CGRectMake((KScreenWidth - 320)/2, KNavigationBarHeight+KStatusBarHeight + 20, 320, 36))
// label.backgroundColor = UIColor(hexString: "ffffff", alpha: 0.2)
label.backgroundColor = UIColor.darkGray
label.font = KFont_Medium(12)
label.textColor = KTextColor_White
label.text = "你只需捕捉美好,转码交给我们"
label.textAlignment = .center
label.alpha = 1.0
label.layer.cornerRadius = 8
label.clipsToBounds = true
return label
}()
lazy var horizontalImageView: UIImageView = {
let imageView = UIImageView()
imageView.image = UIImage.init(named: "horizontal_rotate_img")
// imageView.backgroundColor = .green
imageView.alpha = 0.0
imageView.isHidden = true
return imageView
}()
lazy var effectView: UIVisualEffectView = {
let effect = UIBlurEffect(style: .light)
let view = UIVisualEffectView.init(effect: effect)
view.backgroundColor = .clear
view.alpha = 0.0
view.isHidden = true
return view
}()
lazy var horizontalLabel: UILabel = {
let label = UILabel()
label.backgroundColor = UIColor.clear
label.font = KFont_Medium(14)
label.textColor = KTextColor_White
label.text = "将iPhone旋转为横向"
label.alpha = 0.0
label.isHidden = true
return label
}()
lazy var shutterRingView: UIView = {
let view = UIView()
view.frame = CGRect(x:0,y:0,width:66,height:66)
view.backgroundColor = .clear
view.layer.cornerRadius = 33
view.layer.masksToBounds = true
view.layer.borderWidth = 3
view.layer.borderColor = UIColor.white.cgColor
view.center = CGPointMake(KScreenWidth/2, KScreenHeight - KTabbarSafeBottomMargin - 50 - 33)
return view
}()
lazy var shutterPhotoButton: UIButton = {
let botton = UIButton()
botton.frame = CGRect(x:0,y:0,width:56,height:56)
botton.backgroundColor = .white
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
botton.layer.cornerRadius = 28
botton.layer.masksToBounds = true
botton.center = CGPointMake(33, 33)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(shutterPhotoButtonAction), for: .touchUpInside)
return botton
}()
lazy var shutterVideoButton: UIButton = {
let botton = UIButton()
botton.frame = CGRect(x:0,y:0,width:56,height:56)
botton.backgroundColor = UIColor(hexString: "#FF3B2F")
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
botton.layer.cornerRadius = 28
botton.layer.masksToBounds = true
botton.center = CGPointMake(33, 33)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(shutterVideoButtonAction(_:)), for: .touchUpInside)
botton.isHidden = true
return botton
}()
lazy var albumButton: UIButton = {
let botton = UIButton()
botton.frame = CGRect(x:0,y:0,width:48,height:48)
// botton.backgroundColor = UIColor(hexString: "#FF3B2F")
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
botton.layer.cornerRadius = 8
botton.layer.masksToBounds = true
botton.center = CGPointMake(shutterRingView.center.x - 92 - 24, shutterRingView.center.y)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(albumButtonAction), for: .touchUpInside)
return botton
}()
lazy var lightButton: UIButton = {
let botton = UIButton()
botton.frame = CGRect(x:0,y:0,width:40,height:40)
// botton.backgroundColor = UIColor(hexString: "#FF3B2F")
botton.setImage(UIImage.init(named: "light_button_normal"), for: .normal)
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
// botton.layer.cornerRadius = 8
// botton.layer.masksToBounds = true
botton.center = CGPointMake( shutterRingView.center.x + 103 + 20, shutterRingView.center.y)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(lightButtonAction), for: .touchUpInside)
return botton
}()
lazy var switchBackView: UIView = {
let view = UIView()
view.backgroundColor = UIColor(hexString: "#ffffff", alpha: 0.6)
view.layer.cornerRadius = 20
view.layer.masksToBounds = true
// view.layer.borderWidth = 3
// view.layer.borderColor = UIColor.white.cgColor
// view.center = CGPointMake(KScreenWidth/2, KScreenHeight - KTabbarSafeBottomMargin - 50 - 33)
return view
}()
lazy var cameraButton: UIButton = {
let botton = UIButton()
botton.tag = 1001
// botton.frame = CGRect(x:0,y:0,width:40,height:40)
botton.backgroundColor = UIColor(hexString: "#000000", alpha: 0.3)
botton.setImage(UIImage(named: "camera_button_selected"), for: .normal)
// botton.setImage(UIImage(named: "camera_button_selected"), for: .selected)
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
botton.layer.cornerRadius = 16
botton.layer.masksToBounds = true
// botton.center = CGPointMake( shutterRingView.center.x + 103 + 20, shutterRingView.center.y)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(switchButtonAction(_:)), for: .touchUpInside)
return botton
}()
lazy var videoButton: UIButton = {
let botton = UIButton()
botton.tag = 1002
// botton.frame = CGRect(x:0,y:0,width:40,height:40)
botton.backgroundColor = .clear
botton.setImage(UIImage(named: "video_button_normal"), for: .normal)
// botton.setImage(UIImage(named: "camera_button_selected"), for: .selected)
// botton.setTitle("", for: .normal)
// botton.setTitle("", for: .selected)
botton.layer.cornerRadius = 16
botton.layer.masksToBounds = true
// botton.center = CGPointMake( shutterRingView.center.x + 103 + 20, shutterRingView.center.y)
// botton.layer.position = CGPoint(x:self.view.bounds.width/2 - 70,y:self.view.bounds.height-50)
botton.addTarget(self, action: #selector(switchButtonAction(_:)), for: .touchUpInside)
return botton
}()
lazy var cameraLabel: UILabel = {
let label = UILabel()
label.backgroundColor = UIColor.clear
label.font = KFont_Medium(12)
label.textColor = UIColor(hexString: "#ffffff", alpha: 0.6)
label.text = "拍照"
label.isHidden = false
return label
}()
lazy var videoLabel: UILabel = {
let label = UILabel()
label.backgroundColor = UIColor.clear
label.font = KFont_Medium(12)
label.textColor = UIColor(hexString: "#ffffff", alpha: 0.6)
label.text = "摄像"
label.isHidden = true
return label
}()
//=================================
//MARK: - viewWillAppear
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.setNavgationBarColor(color: UIColor.clear)
self.setNavgationBarLine(color: .clear)
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
let sec:TimeInterval = ZZHHelper.getSecFromUserDefaultByKey(kNowTimeToUserDefaultKey_SpatialShootController)
Analytics.logEvent("capture_pv", parameters: ["refer_page":"相机拍摄页面","duration":sec])
}
override func viewDidLoad() {
super.viewDidLoad()
ZZHHelper.setNowTimeToUserDefaultWithKey(kNowTimeToUserDefaultKey_SpatialShootController)
// view.backgroundColor = .white
outputVideoURL = URL.documentsDirectory.appendingPathComponent("output.MOV")
//======
//
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
// UIDevice.orientationDidChangeNotification
NotificationCenter.default.addObserver(self, selector: #selector(orientationDidChange), name: UIDevice.orientationDidChangeNotification, object: nil)
configureSession() //
setUI()
showTips()
self.view.bringSubviewToFront(navtionImgView!)
//
let isLandscape = UIDevice.current.orientation.isLandscape
if !isLandscape {
showHorizontalScreenTips()
}
//
getAlbumFirstPhoto()
}
private func setUI() {
self.view.addSubview(effectView)
self.view.addSubview(horizontalImageView)
self.view.addSubview(horizontalLabel)
self.view.bringSubviewToFront(horizontalImageView)
self.view.bringSubviewToFront(horizontalLabel)
//
self.view.addSubview(shutterRingView)
shutterRingView.addSubview(shutterPhotoButton)
shutterRingView.addSubview(shutterVideoButton)
self.view.addSubview(albumButton)
self.view.addSubview(lightButton)
self.view.addSubview(switchBackView)
switchBackView.addSubview(cameraButton)
switchBackView.addSubview(videoButton)
self.view.addSubview(cameraLabel)
self.view.addSubview(videoLabel)
self.setRightOneBtnTitle(string: "4K·3D")
effectView.snp.makeConstraints { (make) in
make.top.leading.bottom.trailing.equalTo(self.view)
}
horizontalImageView.snp.makeConstraints { (make) in
make.centerY.equalTo(self.view.snp.centerY)
make.centerX.equalTo(self.view.snp.centerX)
make.width.equalTo(28)
make.height.equalTo(28)
}
horizontalLabel.snp.makeConstraints { (make) in
make.top.equalTo(self.horizontalImageView.snp.bottom).offset(16)
make.centerX.equalTo(self.view.snp.centerX)
}
switchBackView.snp.makeConstraints { (make) in
make.bottom.equalTo(shutterRingView.snp.top).offset(-32)
make.centerX.equalTo(self.view.snp.centerX)
make.width.equalTo(128)
make.height.equalTo(40)
}
cameraButton.snp.makeConstraints { (make) in
make.centerY.equalTo(switchBackView.snp.centerY)
make.leading.equalTo(switchBackView.snp.leading).offset(4)
make.width.equalTo(56)
make.height.equalTo(32)
}
videoButton.snp.makeConstraints { (make) in
make.centerY.equalTo(switchBackView.snp.centerY)
make.trailing.equalTo(switchBackView.snp.trailing).offset(-4)
make.width.equalTo(56)
make.height.equalTo(32)
}
cameraLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(switchBackView.snp.top).offset(-8)
make.centerX.equalTo(switchBackView.snp.leading).offset(32)
}
videoLabel.snp.makeConstraints { (make) in
make.bottom.equalTo(switchBackView.snp.top).offset(-8)
make.centerX.equalTo(switchBackView.snp.trailing).offset(-32)
}
}
private func configureSession() {
session.beginConfiguration()
defer {
session.commitConfiguration()
}
// .builtInWideAngleCamera 广( --- 28mm) .back
guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return
}
wideAngleCameraDevice = backCamera
do {
print("builtInWideAngleCamera:\(backCamera.videoZoomFactor)")
// try backCamera.lockForConfiguration()
// backCamera.videoZoomFactor=1.0
// backCamera.unlockForConfiguration()
wideAngleCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
guard let wideAngleCameraDeviceInput = wideAngleCameraDeviceInput,
session.canAddInput(wideAngleCameraDeviceInput) else {
print("Could not add back camera input")
return
}
session.addInput(wideAngleCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return
}
// .builtInUltraWideCamera 广(0.5x使AVCaptureDeviceDiscoverySession) .back
guard let frontCamera = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) else {
print("Could not find the front camera")
return
}
do {
print("builtInUltraWideCamera:\(frontCamera.videoZoomFactor)")
//
// frontCamera.focusMode = .autoFocus
ultraWideCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
guard let ultraWideCameraDeviceInput = ultraWideCameraDeviceInput,
session.canAddInput(ultraWideCameraDeviceInput) else {
print("Could not add front camera input")
return
}
session.addInput(ultraWideCameraDeviceInput)
} catch {
print("Could not create front camera device input: \(error)")
return
}
//
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
print("Could not find audio device")
return
}
do {
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard session.canAddInput(audioDeviceInput) else {
print("Could not add audio input")
return
}
session.addInput(audioDeviceInput)
} catch {
print("Could not create audio device input: \(error)")
return
}
//
wideAngleCameraMovieOutput = AVCaptureMovieFileOutput()
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
session.canAddOutput(wideAngleCameraMovieOutput) else {
print("Could not add the back camera movie output")
return
}
session.addOutput(wideAngleCameraMovieOutput)
//
ultraWideCameraMovieOutput = AVCaptureMovieFileOutput()
guard let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput,
session.canAddOutput(ultraWideCameraMovieOutput) else {
print("Could not add the front camera movie output")
return
}
session.addOutput(ultraWideCameraMovieOutput)
// ---
wideAngleCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
// wideAngleCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height)
wideAngleCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width, height: view.frame.size.height)
if let wideAngleCameraVideoPreviewLayer = wideAngleCameraVideoPreviewLayer {
wideAngleCameraVideoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
// view.layer.addSublayer(wideAngleCameraVideoPreviewLayer)
self.view.layer.insertSublayer(wideAngleCameraVideoPreviewLayer, at: 0)
}
//广 --- 广
// ultraWideCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
// ultraWideCameraVideoPreviewLayer?.frame = CGRect(x: view.frame.size.width / 2, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height)
// if let ultraWideCameraVideoPreviewLayer = ultraWideCameraVideoPreviewLayer {
// ultraWideCameraVideoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
// // view.layer.addSublayer(ultraWideCameraVideoPreviewLayer)
// self.view.layer.insertSublayer(ultraWideCameraVideoPreviewLayer, at: 0)
// }
DispatchQueue.global().async {
self.session.startRunning()
//AVCaptureMultiCamSession statrunning
try! frontCamera.lockForConfiguration()
frontCamera.videoZoomFactor = 1.75//
frontCamera.unlockForConfiguration()
}
}
//MARK: -
func showTips() {
self.view.addSubview(tipsLabel)
self.view.bringSubviewToFront(tipsLabel)
UIView.animate(withDuration: 3, delay: 2.0,options: .curveEaseInOut) {
self.tipsLabel.alpha = 0
} completion: { finished in
self.tipsLabel.isHidden = true
}
}
func showHorizontalScreenTips() {
self.horizontalImageView.isHidden = false
self.horizontalLabel.isHidden = false
//
self.effectView.isHidden = false
UIView.animate(withDuration: 0.5) {
self.horizontalImageView.alpha = 1.0
self.horizontalLabel.alpha = 1.0
//
self.effectView.alpha = 1.0
}
}
func hidenHorizontalScreenTips() {
UIView.animate(withDuration: 0.5) {
self.horizontalImageView.alpha = 0.0
self.horizontalLabel.alpha = 0.0
//
self.effectView.alpha = 0.0
}completion: { finished in
self.horizontalImageView.isHidden = true
self.horizontalLabel.isHidden = true
//
self.effectView.isHidden = true
}
}
//MARK: - action
@objc func albumButtonAction() {
// let vc:CCSpaceAlbumController = CCSpaceAlbumController()
// self.present(vc, animated: true, completion: nil)
KWindow?.addSubview(spaceAlbumPopView)
spaceAlbumPopView.show()
//
spaceAlbumPopView.selectedImageHandler = { [self] data,asset in
print("回调")
let vc = CCSpatialPhotoDisplayController()
vc.photoOriginalData = data
vc.imageAsset = asset
self.navigationController?.pushViewController(vc, animated: true)
}
//
spaceAlbumPopView.selectedVideoHandler = { url,asset in
let vc = CCSpatialVideoDisplayController()
vc.sourceVideoURL = url
vc.videoOriginalPHAsset = asset
self.navigationController?.pushViewController(vc, animated: true)
}
}
@objc func lightButtonAction() {
guard let device = AVCaptureDevice.default(for: .video) else {
print("无法获取到您的设备")
return
}
if device.hasTorch && device.isTorchAvailable{
try? device.lockForConfiguration()
device.torchMode = device.torchMode == .off ? .on : .off
device.unlockForConfiguration()
}
}
@objc func switchButtonAction(_ sender: UIButton){
if sender.tag == 1001 {
//
shootingMode = .CCShootingMode_Camera
shutterPhotoButton.isHidden = false
shutterVideoButton.isHidden = true
}else if sender.tag == 1002 {
//
shootingMode = .CCShootingMode_Video
shutterPhotoButton.isHidden = true
shutterVideoButton.isHidden = false
}
changeSwitchstatus()
}
//
@objc func shutterPhotoButtonAction(){
// do {
// try wideAngleCameraDevice?.lockForConfiguration()
//// wideAngleCameraDevice?.ramp(toVideoZoomFactor: wideAngleCameraDevice!.videoZoomFactor+1.0, withRate: 2)
// wideAngleCameraDevice?.videoZoomFactor = wideAngleCameraDevice!.videoZoomFactor+1.0
// wideAngleCameraDevice?.unlockForConfiguration()
//
// print("factor:\(wideAngleCameraDevice!.videoZoomFactor)")
// }
// catch {
// print("wide:err")
// }
//
// return
shutterPhotoButton.backgroundColor = .gray
imgs.removeAllObjects()
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
print("Movie output not configured")
return
}
//
let soundID: SystemSoundID = 1108
AudioServicesPlaySystemSound(soundID)
let time = Date().timeIntervalSince1970
let name1 = "wideAnglePhoto" + String(time) + ".mov"
let name2 = "ultraWidePhoto" + String(time) + ".mov"
let wideAngleCameraOutputURL = URL.documentsDirectory.appending(path:name1)
let ultraWideCameraOutputURL = URL.documentsDirectory.appending(path:name2)
wideAngleCameraMovieOutput.startRecording(to: wideAngleCameraOutputURL, recordingDelegate: self)
ultraWideCameraMovieOutput.startRecording(to: ultraWideCameraOutputURL, recordingDelegate: self)
//0.1
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
self.shutterPhotoButton.backgroundColor = .white
self.stopVideoRecording()
}
}
//
@objc func shutterVideoButtonAction(_ sender: UIButton){
if !self.isRecording {
//left\right
// if imageOrientation != .right ||
// imageOrientation != .left {
// return
// }
//
print("录像中...")
UIView.animate(withDuration: 0.3) {
// transform
self.shutterVideoButton.transform = CGAffineTransform(scaleX: 0.53, y: 0.53)
self.shutterVideoButton.layer.cornerRadius = 4
}completion: { finished in
self.isRecording = true
self.changeSwitchstatus()
}
//
startRecordingTimer()
startVideoRecording()
}else{
print("录像结束!!!")
//
stopRecordingTimer()
stopVideoRecording()
UIView.animate(withDuration: 0.3) {
// transform
self.shutterVideoButton.transform = CGAffineTransform(scaleX: 1.0, y: 1.0)
self.shutterVideoButton.layer.cornerRadius = 28
}completion: { finished in
self.isRecording = false
self.changeSwitchstatus()
}
}
}
// MARK: - /
private func startVideoRecording() {
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
print("Movie output not configured")
return
}
let time = Date().timeIntervalSince1970
let name1 = "wideAngleVideo" + String(time) + ".mov"
let name2 = "ultraWideVideo" + String(time) + ".mov"
let wideAngleCameraOutputURL = URL.documentsDirectory.appending(path:name1)
let fultraWideCameraOutputURL = URL.documentsDirectory.appending(path:name2)
wideAngleCameraMovieOutput.startRecording(to: wideAngleCameraOutputURL, recordingDelegate: self)
ultraWideCameraMovieOutput.startRecording(to: fultraWideCameraOutputURL, recordingDelegate: self)
}
private func stopVideoRecording(){
guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
print("Movie output not configured")
return
}
wideAngleCameraMovieOutput.stopRecording()
ultraWideCameraMovieOutput.stopRecording()
}
//MARK: -
func changeSwitchstatus() {
if isRecording {
self.switchBackView.isHidden = true
cameraLabel.isHidden = true
videoLabel.isHidden = true
}else{
self.switchBackView.isHidden = false
if shootingMode == .CCShootingMode_Camera {
//
cameraLabel.isHidden = false
videoLabel.isHidden = true
cameraButton.backgroundColor = UIColor(hexString: "#000000", alpha: 0.3)
cameraButton.setImage(UIImage(named: "camera_button_selected"), for: .normal)
videoButton.backgroundColor = .clear
videoButton.setImage(UIImage(named: "video_button_normal"), for: .normal)
}else if shootingMode == .CCShootingMode_Video {
//
cameraLabel.isHidden = true
videoLabel.isHidden = false
cameraButton.backgroundColor = .clear
cameraButton.setImage(UIImage(named: "camera_button_normal"), for: .normal)
videoButton.backgroundColor = UIColor(hexString: "#000000", alpha: 0.3)
videoButton.setImage(UIImage(named: "video_button_selected"), for: .normal)
}
}
}
// MARK: - /
func startRecordingTimer() {
var count = 0 // 0
timer = Timer(timeInterval: 1, repeats: true, block: { timer in
DispatchQueue.global().async {
// 线
//
print("Count: \(count)")
count += 1
let timeText = self.transToHourMinSec(second: count)
DispatchQueue.main.async {
self.setNavgaionTitle(string: timeText)
}
}
})
//timerrunloop
RunLoop.main.add(timer!, forMode: .common)
}
func stopRecordingTimer() {
self.setNavgaionTitle(string: "")
if let timer = self.timer {
//
timer.invalidate()
}
}
// MARK: - 00:00:00
///
/// - Parameter second:
/// - Returns: String
func transToHourMinSec(second: Int) -> String {
let allTime: Int = second
var hours = 0
var minutes = 0
var seconds = 0
var hoursText = ""
var minutesText = ""
var secondsText = ""
hours = allTime / 3600
hoursText = hours > 9 ? "\(hours)" : "0\(hours)"
minutes = allTime % 3600 / 60
minutesText = minutes > 9 ? "\(minutes)" : "0\(minutes)"
seconds = allTime % 3600 % 60
secondsText = seconds > 9 ? "\(seconds)" : "0\(seconds)"
return "\(hoursText):\(minutesText):\(secondsText)"
}
//MARK: -
func getAlbumFirstPhoto() {
PHPhotoLibrary.requestAuthorization(for: .readWrite) { (status) in
if status == PHAuthorizationStatus.authorized {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key:"creationDate", ascending: false)]
let assetsFetchResults: PHFetchResult<PHAsset>! = PHAsset.fetchAssets(with: .image, options: fetchOptions)
if let asset = assetsFetchResults.firstObject {
let imageManager = PHCachingImageManager()
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
requestOptions.deliveryMode = .highQualityFormat
imageManager.requestImageDataAndOrientation(for: asset, options: requestOptions) { imageData, dataUTI, imagePropertyOrientation, info in
if let imageData = imageData {
DispatchQueue.main.async {
self.albumButton.setBackgroundImage(UIImage(data: imageData), for: .normal)
}
}
}
}
}
}
}
//MARK: -
@objc fileprivate func orientationDidChange() {
switch UIDevice.current.orientation {
case .unknown:
print("未知")
case .portrait:
print("竖屏")
imageCGImagePropertyOrientation = .up
imageOrientation = .up
showHorizontalScreenTips()
case .portraitUpsideDown:
print("颠倒竖屏")
imageCGImagePropertyOrientation = .down
imageOrientation = .down
showHorizontalScreenTips()
case .landscapeLeft:
print("设备向左旋转横屏")
imageCGImagePropertyOrientation = .left
imageOrientation = .left
hidenHorizontalScreenTips()
case .landscapeRight:
print("设备向右旋转横屏")
imageCGImagePropertyOrientation = .right
imageOrientation = .right
hidenHorizontalScreenTips()
case .faceUp:
print("屏幕朝上")
case .faceDown:
print("屏幕朝下")
default:
break
}
}
deinit {
//
NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: nil)
//
UIDevice.current.endGeneratingDeviceOrientationNotifications()
if let timer = self.timer {
//
timer.invalidate()
}
}
//=======
// private func startRecording() {
// guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
// let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
// print("Movie output not configured")
// return
// }
//
// let time = Date().timeIntervalSince1970
// let name1 = "back" + String(time) + ".mov"
// let name2 = "front" + String(time) + ".mov"
// let backCameraOutputURL = URL.documentsDirectory.appending(path:name1)
// let frontCameraOutputURL = URL.documentsDirectory.appending(path:name2)
//
// wideAngleCameraMovieOutput.startRecording(to: backCameraOutputURL, recordingDelegate: self)
// ultraWideCameraMovieOutput.startRecording(to: frontCameraOutputURL, recordingDelegate: self)
//
// isRecording = true
// startRecordingButton?.setTitle("Stop Recording", for: .normal)
// }
// private func stopRecording() {
// guard let wideAngleCameraMovieOutput = wideAngleCameraMovieOutput,
// let ultraWideCameraMovieOutput = ultraWideCameraMovieOutput else {
// print("Movie output not configured")
// return
// }
//
// wideAngleCameraMovieOutput.stopRecording()
// ultraWideCameraMovieOutput.stopRecording()
//
// isRecording = false
// startRecordingButton?.setTitle("Start Recording", for: .normal)
// }
}
extension CCSpatialShootController: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print("Video recording finished with error: \(error.localizedDescription)")
DispatchQueue.main.async {
SVProgressHUD.showSuccess(withStatus: "合成过程出现问题")
}
} else {
DispatchQueue.main.async {
SVProgressHUD.show(withStatus: "处理中...")
}
if shootingMode == .CCShootingMode_Camera {
if output == wideAngleCameraMovieOutput {
print("wide Angle photo recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
imgs.add(firstFrame)
}
else {
print("wide Angle firstframe is lost....")
}
} else if output == ultraWideCameraMovieOutput {
print("ultra Wide photo recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
imgs.add(firstFrame)
}
else{
print("ultra Wide firstFrame is lost....")
}
}
if(imgs.count == 2){
compositeSpatialPhoto()
}
else {
print("images 少于2张....")
}
}else if shootingMode == .CCShootingMode_Video {
if output == wideAngleCameraMovieOutput {
print("wide Angle video recorded: \(outputFileURL)")
leftEyeVideoURL = outputFileURL
} else if output == ultraWideCameraMovieOutput {
print("ultra Wide video recorded: \(outputFileURL)")
rightEyeVideoURL = outputFileURL
}
createSpVideo()
}
}
}
//MARK:
func createSpVideo(){
if(rightEyeVideoURL != nil && leftEyeVideoURL != nil){
let spatialVideoWriter = SpatialVideoWriter()
Task {
spatialVideoWriter.writeSpatialVideo(leftEyeVideoURL: leftEyeVideoURL!, rightEyeVideoURL: rightEyeVideoURL!, outputVideoURL: outputVideoURL!) {[self] success, error in
DispatchQueue.main.async {
SVProgressHUD.dismiss()
}
if success {
print("空间视频生成成功")
saveVideoToLibrary(videoURL:outputVideoURL!)
DispatchQueue.main.async {
SVProgressHUD.showSuccess(withStatus: "空间视频成功保存到相册")
}
} else if let error = error {
print("生成空间视频失败:\(error.localizedDescription)")
DispatchQueue.main.async {
SVProgressHUD.showInfo(withStatus: "空间视频保存失败:\(error.localizedDescription)")
}
}
}
}
}
}
private func saveVideoToLibrary(videoURL: URL) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
}) { success, error in
if success {
print("保存成功")
} else if let error = error {
print("保存失败")
}
}
}
//MARK:
private func firstFrame(from videoURL: URL) -> UIImage? {
let asset = AVURLAsset(url: videoURL)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 0.0, preferredTimescale: 1)
do {
let cgImage = try generator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: cgImage)
} catch {
print("Error generating first frame: \(error.localizedDescription)")
return nil
}
}
func convertOrigation(orientation:UIImage.Orientation) -> CGImagePropertyOrientation{
switch orientation {
case UIImage.Orientation.left:
return CGImagePropertyOrientation.left
case UIImage.Orientation.right:
return CGImagePropertyOrientation.right
case UIImage.Orientation.up:
return CGImagePropertyOrientation.up
case UIImage.Orientation.down:
return CGImagePropertyOrientation.down
case UIImage.Orientation.leftMirrored:
return CGImagePropertyOrientation.leftMirrored
case UIImage.Orientation.rightMirrored:
return CGImagePropertyOrientation.rightMirrored
case UIImage.Orientation.upMirrored:
return CGImagePropertyOrientation.upMirrored
case UIImage.Orientation.downMirrored:
return CGImagePropertyOrientation.downMirrored
@unknown default:
return CGImagePropertyOrientation.up
}
}
//
func compositeSpatialPhoto(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
imageCGImagePropertyOrientation = .left
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyTIFFOrientation:imageCGImagePropertyOrientation,
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
] as [CFString : Any]
let leftImg = img1//fixOrientation(img1)
let rightImg = img2//fixOrientation(img2)
// CGImageDestinationSetProperties(destination, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
// CGImageDestinationSetProperties(destination,properties1 as CFDictionary)
// CGImageDestinationSetProperties(destination,properties2 as CFDictionary)
CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary)
// CGImageDestinationAddImage(destination, leftImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
//// CGImageDestinationAddImage(destination, rightImg.cgImage!, [kCGImagePropertyOrientation: imageCGImagePropertyOrientation] as CFDictionary)
let rr = CGImageDestinationFinalize(destination)
if rr == false {
print("ee..")
}
let source = CGImageSourceCreateWithURL(url as CFURL, nil)!
guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else {
return
}
print("ssss:\(properties22)")
savePhoto(url)
}
//
func fixOrientation(_ image: UIImage) -> UIImage {
return image
// return UIImage(cgImage: image.cgImage!, scale: image.scale, orientation: imageOrientation)
// No-op if the orientation is already correct
guard image.imageOrientation != .up else { return image }
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
var transform = CGAffineTransform.identity
switch image.imageOrientation {
case .down, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: image.size.height)
transform = transform.rotated(by: .pi)
case .left, .leftMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.rotated(by: .pi / 2)
case .right, .rightMirrored:
transform = transform.translatedBy(x: 0, y: image.size.height)
transform = transform.rotated(by: -.pi / 2)
default:
break
}
switch image.imageOrientation {
case .upMirrored, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translatedBy(x: image.size.height, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
default:
break
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
guard let cgImage = image.cgImage,
let colorSpace = cgImage.colorSpace,
let context = CGContext(data: nil,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: cgImage.bitsPerComponent,
bytesPerRow: 0,
space: colorSpace,
bitmapInfo: cgImage.bitmapInfo.rawValue)
else {
return image
}
context.concatenate(transform)
switch image.imageOrientation {
case .left, .leftMirrored, .right, .rightMirrored:
// Grr...
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width))
default:
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
}
// And now we just create a new UIImage from the drawing context
guard let cgImageFixed = context.makeImage() else { return image }
let fixedImage = UIImage(cgImage: cgImageFixed)
return fixedImage
}
//
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) { success, error in
DispatchQueue.main.async {
SVProgressHUD.dismiss()
}
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
DispatchQueue.main.async {
SVProgressHUD.showInfo(withStatus: "空间图片保存失败: \(error.localizedDescription)")
}
} else {
print("Photo saved to library successfully.")
DispatchQueue.main.async {
SVProgressHUD.showSuccess(withStatus: "空间图片成功保存到相册")
}
}
}
}
}