VPCamera/tdvideo/tdvideo/PlayContoller8.swift
2024-03-12 19:21:07 +08:00

293 lines
10 KiB
Swift

//
// PlayContoller8.swift
// tdvideo
//
// Created by mac on 2024/2/18.
//
import Foundation
import AVKit
import VideoToolbox
import CoreImage
import ImageIO
import UIKit
import AVFoundation
import UIKit
import AVFoundation
import CoreMedia
//
class PlayContoller8: UIViewController {
var player: AVPlayer?
var playerLayer: AVPlayerLayer?
var isRedFilterEnabled = false
var asset:AVAsset?
var assetReader:AVAssetReader?
var output:AVAssetReaderTrackOutput?
var selectedIndex:NSInteger?
var videoConver:VideoConvertor3 = VideoConvertor3()
func loadVideo() async {
do {
if(assetReader != nil && assetReader!.status == .reading){
assetReader?.cancelReading()
}
assetReader = try AVAssetReader(asset: asset!)
output = try await AVAssetReaderTrackOutput(
track: asset!.loadTracks(withMediaType: .video).first!,
outputSettings: [
AVVideoDecompressionPropertiesKey: [
kVTDecompressionPropertyKey_RequestedMVHEVCVideoLayerIDs: [0, 1] as CFArray,
],
]
)
assetReader!.timeRange = CMTimeRange(start: .zero, duration: .positiveInfinity)
assetReader!.add(output!)
assetReader!.startReading()
} catch {
print("Error loading video: \(error)")
}
}
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .black
selectedIndex = 0
let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV")!
asset = AVAsset(url: videoURL)
Task {
await loadVideo()
}
let playerItem = AVPlayerItem(asset: asset!)
//
playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { [self] request in
let compositionTime = request.compositionTime
print(compositionTime.value)
if(selectedIndex == 0){
//
request.finish(with: request.sourceImage, context: nil)
}
else if(selectedIndex == 1){
//time
getImage(at: compositionTime) { [self] leftImage, rightImage in
if(leftImage != nil){
//
let lastImg = getHonalanImg(leftImage: leftImage!, rightImage: rightImage!)
request.finish(with: lastImg, context: nil)
}
}
}
}
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player!)
playerLayer?.frame = view.bounds
view.layer.addSublayer(playerLayer!)
player?.play()
//
// player!.actionAtItemEnd = .none
// //
// NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [self] _ in
// Task {
// await loadVideo()
// }
// player?.seek(to: .zero)
// player?.play()
// }
let segmentedControl = UISegmentedControl(items: ["空间视频", "红蓝立体"])
segmentedControl.frame = CGRect(x: 20, y: 700, width: 360, height: 45)
segmentedControl.selectedSegmentIndex = 0
self.view.addSubview(segmentedControl)
segmentedControl.layer.borderWidth = 1.0
segmentedControl.layer.borderColor = UIColor.blue.cgColor
segmentedControl.tintColor = UIColor.blue
let normalTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white]
let selectedTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.blue]
segmentedControl.setTitleTextAttributes(normalTextAttributes, for: .normal)
segmentedControl.setTitleTextAttributes(selectedTextAttributes, for: .selected)
segmentedControl.addTarget(self, action: #selector(segmentedControlValueChanged(_:)), for: .valueChanged)
}
@objc func segmentedControlValueChanged(_ sender: UISegmentedControl) {
selectedIndex = sender.selectedSegmentIndex
print("选中了第 \(selectedIndex) 个选项")
}
func getImage(at time: CMTime, completion: @escaping ((CIImage?, CIImage?) -> Void)) {
//time output
while let nextSampleBuffer = output!.copyNextSampleBuffer() {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(nextSampleBuffer)
if presentationTime == time {
guard let taggedBuffers = nextSampleBuffer.taggedBuffers else { return }
let leftEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.leftEye)
})?.buffer
let rightEyeBuffer = taggedBuffers.first(where: {
$0.tags.first(matchingCategory: .stereoView) == .stereoView(.rightEye)
})?.buffer
if let leftEyeBuffer,
let rightEyeBuffer,
case let .pixelBuffer(leftEyePixelBuffer) = leftEyeBuffer,
case let .pixelBuffer(rightEyePixelBuffer) = rightEyeBuffer {
let lciImage = CIImage(cvPixelBuffer: leftEyePixelBuffer)
let rciImage = CIImage(cvPixelBuffer: rightEyePixelBuffer)
completion(lciImage,rciImage)
}
break
}
}
completion(nil,nil)
}
func getHonalanImg(leftImage:CIImage,rightImage:CIImage)->CIImage{
//
let redColorMatrix: [CGFloat] = [
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let blueColorMatrix: [CGFloat] = [
1.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 0.0, 0.0, // 绿
0.0, 0.0, 0.0, 0.0, 0.0, //
0.0, 0.0, 0.0, 1.0, 0.0 //
]
let redFilter = CIFilter(name: "CIColorMatrix")!
redFilter.setValue(leftImage, forKey: kCIInputImageKey)
redFilter.setValue(CIVector(values: redColorMatrix, count: redColorMatrix.count), forKey: "inputRVector")
let blueFilter = CIFilter(name: "CIColorMatrix")!
blueFilter.setValue(rightImage, forKey: kCIInputImageKey)
blueFilter.setValue(CIVector(values: blueColorMatrix, count: blueColorMatrix.count), forKey: "inputBVector")
var lastImg:CIImage?
if let redOutputImage = redFilter.outputImage,
let blueOutputImage = blueFilter.outputImage {
let compositeFilter = CIFilter(name: "CIScreenBlendMode")!
compositeFilter.setValue(redOutputImage, forKey: kCIInputImageKey)
compositeFilter.setValue(blueOutputImage, forKey: kCIInputBackgroundImageKey)
lastImg = compositeFilter.outputImage!
}
return lastImg!
}
}
//import Foundation
//import AVKit
//import VideoToolbox
//import CoreImage
//import ImageIO
//import UIKit
//import AVFoundation
//import UIKit
//import AVFoundation
//import CoreMedia
//
//
//class PlayContoller8: UIViewController {
//
// var player: AVPlayer?
// var playerLayer: AVPlayerLayer?
// var isRedFilterEnabled = false
//
// var videoConver:VideoConvertor3 = VideoConvertor3()
//
// override func viewDidLoad() {
// super.viewDidLoad()
// view.backgroundColor = .black
//
// let videoURL = Bundle.main.url(forResource: "IMG_0071", withExtension: "MOV")!
// let asset = AVAsset(url: videoURL)
// let playerItem = AVPlayerItem(asset: asset)
//
// player = AVPlayer(playerItem: playerItem)
//
// playerLayer = AVPlayerLayer(player: player)
// playerLayer?.frame = view.bounds
// view.layer.addSublayer(playerLayer!)
//
//
//
// Task{
// try await videoConver.convertVideo(asset: asset) { [self] pro in
// player?.play()
// }
// }
//
//
//
//
// let button = UIButton(type: .system)
// button.setTitle("", for: .normal)
// button.addTarget(self, action: #selector(toggleRedFilter), for: .touchUpInside)
// button.translatesAutoresizingMaskIntoConstraints = false
//
// view.addSubview(button)
//
// NSLayoutConstraint.activate([
// button.centerXAnchor.constraint(equalTo: view.centerXAnchor),
// button.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -20)
// ])
// }
//
// @objc func toggleRedFilter() {
// guard let playerItem = player?.currentItem else { return }
//
// if !isRedFilterEnabled {
// applyRedFilter(to: playerItem)
// } else {
// //
// playerItem.videoComposition = nil
// }
//
// isRedFilterEnabled.toggle()
// }
//
// func applyRedFilter(to playerItem: AVPlayerItem) {
//
// playerItem.videoComposition = AVVideoComposition(asset: playerItem.asset) { [self] request in
// let compositionTime = request.compositionTime
// let arr = videoConver.datas
// for i in 0...arr.count{
// let dic = arr[i] as! NSDictionary
// let time = dic["time"] as! CMTime
// if(compositionTime == time){
// let left = dic["left"] as! CIImage
// let right = dic["right"] as! CIImage
// let image = videoConver.joinImages(leftImage: left, rightImage: right)
// request.finish(with: image, context: nil)
// break
// }
// }
// }
// }
//
//}