【发布时间】:2020-12-24 03:32:43
【问题描述】:
我正在开发适用于 iOS 和 macOS 的视频过滤器,它从默认摄像头捕获视频输入,应用过滤器 (MPSImageGaussianBlur),并使用 MTKView 对其进行渲染。
它在 iOS 上运行良好(iPhone 6s 和 iPhone 11 上为 13),但我在 MacOS(MacBook Pro 上为 10.15)上只看到一个红屏,不知道为什么。代码按预期重复调用 captureOutput() 和 draw()。
这是 VS2CameraSession,它执行大部分工作。 (请注意,我按照CVMetalTextureCacheCreateTextureFromImage returns -6660 on macOS 10.13 的建议在 videoSettings 中添加了 kCVPixelBufferMetalCompatibilityKey 标志)
import AVFoundation
import MetalPerformanceShaders
class VS2CameraSession: NSObject {
let gpu = MTLCreateSystemDefaultDevice()!
private let session = AVCaptureSession()
private let camera = AVCaptureDevice.default(for: .video)
private var textureCache:CVMetalTextureCache?
private var texture:MTLTexture?
func startRunning() {
CVMetalTextureCacheCreate(nil, nil, gpu, nil, &textureCache)
guard let camera = camera,
let input = try? AVCaptureDeviceInput(device: camera) else {
return
}
guard session.canAddInput(input) else {
return
}
session.addInput(input)
let output = AVCaptureVideoDataOutput()
output.alwaysDiscardsLateVideoFrames = true
#if os(macOS)
// https://stackoverflow.com/questions/46549906/cvmetaltexturecachecreatetexturefromimage-returns-6660-on-macos-10-13
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferMetalCompatibilityKey as String: true
]
#else
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
#endif
output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
session.addOutput(output)
session.startRunning()
}
func draw(drawable:CAMetalDrawable?) {
guard let texture = self.texture,
let drawable = drawable,
let commandQueue = gpu.makeCommandQueue(),
let commandBuffer = commandQueue.makeCommandBuffer() else {
return
}
// Apply filter(s)
let filter = MPSImageGaussianBlur(device:gpu, sigma: 10.0)
filter.encode(commandBuffer: commandBuffer, sourceTexture: texture, destinationTexture: drawable.texture)
commandBuffer.present(drawable)
commandBuffer.commit()
self.texture = nil // no need to draw it again
}
}
extension VS2CameraSession : AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
let textureCache = self.textureCache {
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
var textureRef:CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil,
.bgra8Unorm, width, height, 0, &textureRef)
texture = CVMetalTextureGetTexture(textureRef!)
}
}
}
这是 VS2CameraViewController,它使用 VS2CameraSession 来渲染它的视图。
import UIKit
import SwiftUI
import MetalKit
final class VS2CameraViewController: UIViewController {
let cameraSession = VS2CameraSession()
override func loadView() {
let metalView = MTKView()
metalView.device = self.cameraSession.gpu
metalView.delegate = self
metalView.clearColor = MTLClearColorMake(1, 1, 1, 1)
metalView.colorPixelFormat = MTLPixelFormat.bgra8Unorm
metalView.framebufferOnly = false
self.view = metalView
}
override func viewDidLoad() {
cameraSession.startRunning()
}
}
extension VS2CameraViewController : MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
cameraSession.draw(drawable: view.currentDrawable)
}
}
extension VS2CameraViewController : UIViewControllerRepresentable {
typealias UIViewControllerType = VS2CameraViewController
public func makeUIViewController(context: UIViewControllerRepresentableContext<VS2CameraViewController>) -> VS2CameraViewController {
return VS2CameraViewController()
}
public func updateUIViewController(_ uiViewController: VS2CameraViewController, context: UIViewControllerRepresentableContext<VS2CameraViewController>) {
}
}
完整的源代码可在https://github.com/snakajima/VideoShader2/tree/stack_overflow获得。
【问题讨论】:
标签: macos avfoundation metal avcapturesession cvpixelbuffer