// // MetalPlayer.swift // tdvideo // // Created by aaa on 2024/1/19. // import AVFoundation import MetalKit import SwiftUI /// 呈现正在进行的视频转换的预览的Metal视图。 class MetalPlayer: MTKView { // MARK: - Properties // MARK: Private /// 用于显示视频帧的色彩空间。 private let colorSpace = CGColorSpaceCreateDeviceRGB() /// 用于处理图形命令的命令队列。 private lazy var commandQueue: MTLCommandQueue? = { return self.device!.makeCommandQueue() }() /// 用于将纹理写入视图以进行预览的CIContext。 private lazy var context: CIContext = { return CIContext( mtlDevice: self.device!, options: [CIContextOption.workingColorSpace : NSNull()] ) }() /// 加工管道时应使用的金属装置。 private let metalDevice = MTLCreateSystemDefaultDevice()! /// 用于存储可写入的缓存纹理的纹理缓存。 private var textureCache: CVMetalTextureCache? /// 一个可以提供' CVPixelBuffer '的输出池。 private var outputPixelBufferPool: CVPixelBufferPool? /// 可用于将视频预览写入库的管道状态。 private var computePipelineState: MTLComputePipelineState? /// 应该绘制到视图上的图像。 private var image: CIImage? { didSet { draw() } } // MARK: - Methods // MARK: Public ///使用提供的坐标和大小初始化这个视图。 /// -参数frameect:渲染视图的坐标和大小。 init(frame frameRect: CGRect) { super.init( frame: frameRect, device: metalDevice ) setup(frameSize: frameRect.size) } ///初始化器需要的一致性。 /// -参数aDecoder:用来创建这个视图的NSCoder。 required init(coder aDecoder: NSCoder) { super.init(coder: aDecoder) device = MTLCreateSystemDefaultDevice() setup(frameSize: .zero) } // MARK: Private ///配置该视图及其属性,然后创建必要的缓存。 /// -参数frameSize:该视图的期望大小。 private func setup(frameSize: CGSize) { framebufferOnly = false enableSetNeedsDisplay = false guard let defaultLibrary = metalDevice.makeDefaultLibrary() else { assertionFailure("Could not create default Metal device.") return } let kernelFunction = defaultLibrary.makeFunction(name: "sideBySideEffect") do { computePipelineState = try metalDevice.makeComputePipelineState(function: kernelFunction!) } catch { print("Could not create pipeline state: \(error)") } setupCache( outputRetainedBufferCountHint: 5, frameSize: frameSize ) } ///配置必要的缓存来保存正在写入的纹理。 /// -参数: /// - outputRetainedBufferCountHint:要保留的理想缓冲区数。 /// - frameSize:这个视图和它的纹理的期望大小。 private func setupCache( outputRetainedBufferCountHint: Int, frameSize: CGSize ) { reset() let outputSize = CGSize( width: frameSize.width, height: frameSize.height ) guard let outputPixelBufferPool = createBufferPool(size: outputSize) else {return} self.outputPixelBufferPool = outputPixelBufferPool var metalTextureCache: CVMetalTextureCache? if CVMetalTextureCacheCreate( kCFAllocatorDefault, nil, metalDevice, nil, &metalTextureCache ) != kCVReturnSuccess { assertionFailure("Unable to allocate texture cache") } else { textureCache = metalTextureCache } } ///重置缓存以便重新配置。 func reset() { outputPixelBufferPool = nil textureCache = nil } ///将提供的' CVPixelBuffer '图像渲染到视图。 /// -参数: /// - leftPixelBuffer:渲染到视图的左眼预览图像。 /// - rightPixelBuffer:渲染到视图的右眼预览图像。 func render( leftPixelBuffer: CVPixelBuffer, rightPixelBuffer: CVPixelBuffer ) { var outputPixelBuffer: CVPixelBuffer? CVPixelBufferPoolCreatePixelBuffer( kCFAllocatorDefault, outputPixelBufferPool!, &outputPixelBuffer ) guard let outputBuffer = outputPixelBuffer else { print("Allocation failure: Could not get pixel buffer from pool. (\(self.description))") return } guard let leftInputTexture = makeTextureFromCVPixelBuffer( pixelBuffer: leftPixelBuffer, textureFormat: .bgra8Unorm ), let rightInputTexture = makeTextureFromCVPixelBuffer( pixelBuffer: rightPixelBuffer, textureFormat: .bgra8Unorm ), let outputTexture = makeTextureFromCVPixelBuffer( pixelBuffer: outputBuffer, textureFormat: .bgra8Unorm ) else { return } //设置命令队列、缓冲区和编码器。 guard let commandQueue = commandQueue, let commandBuffer = commandQueue.makeCommandBuffer(), let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { print("Failed to create a Metal command queue.") CVMetalTextureCacheFlush(textureCache!, 0) return } commandEncoder.label = "BlendGPU" commandEncoder.setComputePipelineState(computePipelineState!) commandEncoder.setTexture(leftInputTexture, index: 0) commandEncoder.setTexture(rightInputTexture, index: 1) commandEncoder.setTexture(outputTexture, index: 2) //设置线程组 let width = computePipelineState!.threadExecutionWidth let height = computePipelineState!.maxTotalThreadsPerThreadgroup / width let threadsPerThreadgroup = MTLSizeMake(width, height, 1) let threadgroupsPerGrid = MTLSize(width: (leftInputTexture.width + width - 1) / width, height: (leftInputTexture.height + height - 1) / height, depth: 1) commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup) commandEncoder.endEncoding() commandBuffer.commit() commandBuffer.waitUntilCompleted() guard let outputPixelBuffer else { return } self.image = CIImage(cvPixelBuffer: outputPixelBuffer) } ///将提供的' CVPixelBuffer '图像渲染到视图。 /// -参数: /// - leftPixelBuffer:渲染到视图的左眼预览图像。 /// - rightPixelBuffer:渲染到视图的右眼预览图像。 func render1( pixelBuffer: CVPixelBuffer ) { var outputPixelBuffer: CVPixelBuffer? CVPixelBufferPoolCreatePixelBuffer( kCFAllocatorDefault, outputPixelBufferPool!, &outputPixelBuffer ) guard let outputBuffer = outputPixelBuffer else { print("Allocation failure: Could not get pixel buffer from pool. (\(self.description))") return } guard let inputTexture = makeTextureFromCVPixelBuffer( pixelBuffer: pixelBuffer, textureFormat: .bgra8Unorm ), let outputTexture = makeTextureFromCVPixelBuffer( pixelBuffer: outputBuffer, textureFormat: .bgra8Unorm ) else { return } //设置命令队列、缓冲区和编码器。 guard let commandQueue = commandQueue, let commandBuffer = commandQueue.makeCommandBuffer(), let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { print("Failed to create a Metal command queue.") CVMetalTextureCacheFlush(textureCache!, 0) return } commandEncoder.label = "BlendGPU" commandEncoder.setComputePipelineState(computePipelineState!) commandEncoder.setTexture(inputTexture, index: 0) commandEncoder.setTexture(outputTexture, index: 2) //设置线程组 let width = computePipelineState!.threadExecutionWidth let height = computePipelineState!.maxTotalThreadsPerThreadgroup / width let threadsPerThreadgroup = MTLSizeMake(width, height, 1) let threadgroupsPerGrid = MTLSize(width: inputTexture.width, height: inputTexture.height, depth: 1) commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup) commandEncoder.endEncoding() commandBuffer.commit() commandBuffer.waitUntilCompleted() guard let outputPixelBuffer else { return } self.image = CIImage(cvPixelBuffer: outputPixelBuffer) } ///将' image '绘制到这个视图上。 /// -参数rect:绘制图像的坐标和大小。 override func draw(_ rect: CGRect) { guard let image = image, let currentDrawable = currentDrawable, let commandBuffer = commandQueue?.makeCommandBuffer() else { return } let currentTexture = currentDrawable.texture let drawingBounds = CGRect(origin: .zero, size: drawableSize) let scaleX = drawableSize.width / image.extent.width let scaleY = drawableSize.height / image.extent.height let scaledImage = image.transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY)) context.render(scaledImage, to: currentTexture, commandBuffer: commandBuffer, bounds: drawingBounds, colorSpace: colorSpace) commandBuffer.present(currentDrawable) commandBuffer.commit() } ///从提供的' CVPixelBuffer '中创建一个' MTLTexture '用于着色器。 /// -参数: /// - pixelBuffer:提供的像素缓冲区转换为纹理。 /// - textureFormat:纹理的像素格式。 /// -返回:转换后的' MTLTexture ',如果可以创建的话。 private func makeTextureFromCVPixelBuffer( pixelBuffer: CVPixelBuffer, textureFormat: MTLPixelFormat ) -> MTLTexture? { let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) guard let textureCache else { return nil } //从图像缓冲区创建一个金属纹理。 var cvTextureOut: CVMetalTexture? CVMetalTextureCacheCreateTextureFromImage( kCFAllocatorDefault, textureCache, pixelBuffer, nil, textureFormat, width, height, 0, &cvTextureOut ) guard let cvTextureOut, let texture = CVMetalTextureGetTexture(cvTextureOut) else { CVMetalTextureCacheFlush(textureCache, 0) return nil } return texture } ///创建' CVPixelBufferPool '来为正在进行的转换帧写入提供像素缓冲区。 /// -参数大小:每个像素缓冲区的大小。 /// -返回:' CVPixelBufferPool ',如果它可以被创建。 private func createBufferPool(size: CGSize) -> CVPixelBufferPool? { let allocationThreshold = 5 let sourcePixelBufferAttributesDictionary = [ kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA), kCVPixelBufferWidthKey as String: NSNumber(value: Float(size.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(size.height)), kCVPixelBufferMetalCompatibilityKey as String: kCFBooleanTrue!, kCVPixelBufferIOSurfacePropertiesKey as String: [ kCVPixelBufferIOSurfaceCoreAnimationCompatibilityKey:kCFBooleanTrue, ] ] as [String : Any] let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: allocationThreshold] var cvPixelBufferPool: CVPixelBufferPool? CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, sourcePixelBufferAttributesDictionary as NSDictionary?, &cvPixelBufferPool) return cvPixelBufferPool } }