VPCamera3/tdvideo/tdvideo/PlayContoller5.swift
2024-03-05 11:44:34 +08:00

402 lines
15 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// PlayContoller5.swift
// tdvideo
//
// Created by mac on 2024/2/2.
//
import UIKit
import AVFoundation
import Photos
class PlayContoller5: UIViewController, AVCaptureFileOutputRecordingDelegate {
var session = AVCaptureMultiCamSession()
var backCameraDeviceInput: AVCaptureDeviceInput?
var frontCameraDeviceInput: AVCaptureDeviceInput?
var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
var startRecordingButton: UIButton?
var backCameraMovieOutput: AVCaptureMovieFileOutput?
var frontCameraMovieOutput: AVCaptureMovieFileOutput?
var isRecording = false
var imgs:NSMutableArray = NSMutableArray()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
imgs.removeAllObjects()
configureSession()
setupUI()
}
private func configureSession() {
session.beginConfiguration()
defer {
session.commitConfiguration()
}
//builtInWideAngleCamera
//builtInWideAngleCamera
//builtInTrueDepthCamera
//
guard let backCamera = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) else {
print("Could not find the back camera")
return
}
do {
backCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
guard let backCameraDeviceInput = backCameraDeviceInput,
session.canAddInput(backCameraDeviceInput) else {
print("Could not add back camera input")
return
}
session.addInput(backCameraDeviceInput)
} catch {
print("Could not create back camera device input: \(error)")
return
}
//
guard let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
print("Could not find the main camera")
return
}
do {
frontCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
guard let frontCameraDeviceInput = frontCameraDeviceInput,
session.canAddInput(frontCameraDeviceInput) else {
print("Could not add front camera input")
return
}
session.addInput(frontCameraDeviceInput)
} catch {
print("Could not create front camera device input: \(error)")
return
}
//
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
print("Could not find audio device")
return
}
do {
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard session.canAddInput(audioDeviceInput) else {
print("Could not add audio input")
return
}
session.addInput(audioDeviceInput)
} catch {
print("Could not create audio device input: \(error)")
return
}
//
backCameraMovieOutput = AVCaptureMovieFileOutput()
guard let backCameraMovieOutput = backCameraMovieOutput,
session.canAddOutput(backCameraMovieOutput) else {
print("Could not add the back camera movie output")
return
}
session.addOutput(backCameraMovieOutput)
//
frontCameraMovieOutput = AVCaptureMovieFileOutput()
guard let frontCameraMovieOutput = frontCameraMovieOutput,
session.canAddOutput(frontCameraMovieOutput) else {
print("Could not add the front camera movie output")
return
}
session.addOutput(frontCameraMovieOutput)
//
backCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
backCameraVideoPreviewLayer?.frame = CGRect(x: 0, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer {
view.layer.addSublayer(backCameraVideoPreviewLayer)
}
frontCameraVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
frontCameraVideoPreviewLayer?.frame = CGRect(x: view.frame.size.width / 2, y: 0, width: view.frame.size.width / 2, height: view.frame.size.height / 2)
if let frontCameraVideoPreviewLayer = frontCameraVideoPreviewLayer {
view.layer.addSublayer(frontCameraVideoPreviewLayer)
}
DispatchQueue.global().async {
self.session.startRunning()
}
}
private func setupUI() {
startRecordingButton = UIButton(type: .system)
startRecordingButton?.setTitle("Start Recording", for: .normal)
startRecordingButton?.setTitleColor(UIColor.brown, for: UIControl.State.normal)
startRecordingButton?.addTarget(self, action: #selector(toggleRecording(_:)), for: .touchUpInside)
startRecordingButton?.frame = CGRect(x: 0, y: view.frame.size.height - 250, width: view.frame.size.width, height: 50)
view.addSubview(startRecordingButton!)
}
@objc private func toggleRecording(_ sender: UIButton) {
startRecording()
}
private func startRecording() {
imgs.removeAllObjects()
guard let backCameraMovieOutput = backCameraMovieOutput,
let frontCameraMovieOutput = frontCameraMovieOutput else {
print("Movie output not configured")
return
}
//
let soundID: SystemSoundID = 1108
AudioServicesPlaySystemSound(soundID)
let time = Date().timeIntervalSince1970
let name1 = "back" + String(time) + ".mov"
let name2 = "front" + String(time) + ".mov"
let backCameraOutputURL = URL.documentsDirectory.appending(path:name1)
let frontCameraOutputURL = URL.documentsDirectory.appending(path:name2)
backCameraMovieOutput.startRecording(to: backCameraOutputURL, recordingDelegate: self)
frontCameraMovieOutput.startRecording(to: frontCameraOutputURL, recordingDelegate: self)
//0.1
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
self.stopRecording()
}
}
private func stopRecording() {
guard let backCameraMovieOutput = backCameraMovieOutput,
let frontCameraMovieOutput = frontCameraMovieOutput else {
print("Movie output not configured")
return
}
backCameraMovieOutput.stopRecording()
frontCameraMovieOutput.stopRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error {
print("Video recording finished with error: \(error.localizedDescription)")
} else {
if output == backCameraMovieOutput {
print("Back camera video recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
// self.saveImageToLibrary(image: firstFrame)
imgs.add(firstFrame)
}
} else if output == frontCameraMovieOutput {
print("Front camera video recorded: \(outputFileURL)")
if let firstFrame = self.firstFrame(from: outputFileURL) {
// self.saveImageToLibrary(image: firstFrame)
imgs.add(firstFrame)
}
}
if(imgs.count == 2){
bbb()
}
}
}
private func firstFrame(from videoURL: URL) -> UIImage? {
let asset = AVURLAsset(url: videoURL)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 0.0, preferredTimescale: 1)
do {
let cgImage = try generator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: cgImage)
} catch {
print("Error generating first frame: \(error.localizedDescription)")
return nil
}
}
private func saveImageToLibrary(image: UIImage) {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAsset(from: image)
}) { success, error in
if success {
print("Image saved to library")
} else if let error = error {
print("Error saving image to library: \(error.localizedDescription)")
}
}
}
func bbb(){
let img1:UIImage = imgs[0] as! UIImage
let img2:UIImage = imgs[1] as! UIImage
let imageSize1 = CGRect(x: 0, y: 0, width: img1.cgImage!.width, height: img1.cgImage!.height)
let imageSize2 = CGRect(x: 0, y: 0, width: img2.cgImage!.width, height: img2.cgImage!.height)
let url = URL.documentsDirectory.appending(path:"aaa12.HEIC")
let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 2, nil)!
let properties1 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
0,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let properties2 = [
kCGImagePropertyGroups: [
kCGImagePropertyGroupIndex: 0,
kCGImagePropertyGroupType: kCGImagePropertyGroupTypeStereoPair,
kCGImagePropertyGroupImageIndexLeft: 0,
kCGImagePropertyGroupImageIndexRight: 1,
],
kCGImagePropertyHEIFDictionary: [
kIIOMetadata_CameraExtrinsicsKey: [
kIIOCameraExtrinsics_CoordinateSystemID: 0,
kIIOCameraExtrinsics_Position: [
-0.019238,
0,
0
],
kIIOCameraExtrinsics_Rotation: [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
]
]
]
let leftImg = fixOrientation(img1)
let rightImg = fixOrientation(img2)
CGImageDestinationAddImage(destination, leftImg.cgImage!, properties1 as CFDictionary)
CGImageDestinationAddImage(destination, rightImg.cgImage!, properties2 as CFDictionary)
CGImageDestinationFinalize(destination)
let image = UIImage(contentsOfFile: url.path())
let source = CGImageSourceCreateWithURL(url as CFURL, nil)!
guard let properties22 = CGImageSourceCopyPropertiesAtIndex(source, 1, nil) as? [CFString: Any] else {
return
}
print(properties22)
savePhoto(url)
}
func savePhoto(_ fileURL: URL) {
// PHAssetCreationRequest
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photoProxy, fileURL: fileURL, options: nil)
}) { success, error in
if let error = error {
print("Error saving photo to library: \(error.localizedDescription)")
} else {
print("Photo saved to library successfully.")
}
}
}
//
func fixOrientation(_ image: UIImage) -> UIImage {
// No-op if the orientation is already correct
guard image.imageOrientation != .up else { return image }
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
var transform = CGAffineTransform.identity
switch image.imageOrientation {
case .down, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: image.size.height)
transform = transform.rotated(by: .pi)
case .left, .leftMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.rotated(by: .pi / 2)
case .right, .rightMirrored:
transform = transform.translatedBy(x: 0, y: image.size.height)
transform = transform.rotated(by: -.pi / 2)
default:
break
}
switch image.imageOrientation {
case .upMirrored, .downMirrored:
transform = transform.translatedBy(x: image.size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translatedBy(x: image.size.height, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
default:
break
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
guard let cgImage = image.cgImage,
let colorSpace = cgImage.colorSpace,
let context = CGContext(data: nil,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: cgImage.bitsPerComponent,
bytesPerRow: 0,
space: colorSpace,
bitmapInfo: cgImage.bitmapInfo.rawValue)
else {
return image
}
context.concatenate(transform)
switch image.imageOrientation {
case .left, .leftMirrored, .right, .rightMirrored:
// Grr...
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.height, height: image.size.width))
default:
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
}
// And now we just create a new UIImage from the drawing context
guard let cgImageFixed = context.makeImage() else { return image }
let fixedImage = UIImage(cgImage: cgImageFixed)
return fixedImage
}
}