在 MacOS 中使用 AVFoundation 和 Metal 时帧率非常慢

Very slow framerate with AVFoundation and Metal in MacOS

我正在尝试使 Apple 的 AVCamFilter 样本适应 MacOS。过滤似乎有效,但通过 Metal 渲染处理后的图像使我的帧速率达到每帧几秒。我尝试了不同的方法,但被困了很长时间。

这是项目 AVCamFilterMacOS - 任何对 AVFoundation with Metal 有更好了解的人都可以告诉我哪里出了问题吗?我一直在阅读文档并练习让未处理的图像显示,以及将模型等其他东西渲染到金属视图,但我似乎无法让处理过的 CMSampleBuffer 以合理的帧率渲染。

即使我跳过渲染器并将 videoPixelBuffer 直接发送到金属视图,视图的性能也相当不稳定。

这是我在控制器中使用的一些相关渲染代码:

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    processVideo(sampleBuffer: sampleBuffer)
}

func processVideo(sampleBuffer: CMSampleBuffer) { 如果 !renderingEnabled { return }

guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
  let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
    return
}

if !self.videoFilter.isPrepared {
  /*
   outputRetainedBufferCountHint is the number of pixel buffers the renderer retains. This value informs the renderer
   how to size its buffer pool and how many pixel buffers to preallocate. Allow 3 frames of latency to cover the dispatch_async call.
   */
  self.videoFilter.prepare(with: formatDescription, outputRetainedBufferCountHint: 3)
}

// Send the pixel buffer through the filter
guard let filteredBuffer = self.videoFilter.render(pixelBuffer: videoPixelBuffer) else {
  print("Unable to filter video buffer")
  return
}

self.previewView.pixelBuffer = filteredBuffer
}

并且来自渲染器:

func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? {
    if !isPrepared {
        assertionFailure("Invalid state: Not prepared.")
        return nil
    }

    var newPixelBuffer: CVPixelBuffer?
    CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &newPixelBuffer)
    guard let outputPixelBuffer = newPixelBuffer else {
        print("Allocation failure: Could not get pixel buffer from pool. (\(self.description))")
        return nil
    }
    guard let inputTexture = makeTextureFromCVPixelBuffer(pixelBuffer: pixelBuffer, textureFormat: .bgra8Unorm),
        let outputTexture = makeTextureFromCVPixelBuffer(pixelBuffer: outputPixelBuffer, textureFormat: .bgra8Unorm) else {
            return nil
    }

    // Set up command queue, buffer, and encoder.
    guard let commandQueue = commandQueue,
        let commandBuffer = commandQueue.makeCommandBuffer(),
        let commandEncoder = commandBuffer.makeComputeCommandEncoder() else {
            print("Failed to create a Metal command queue.")
            CVMetalTextureCacheFlush(textureCache!, 0)
            return nil
    }

    commandEncoder.label = "Rosy Metal"
    commandEncoder.setComputePipelineState(computePipelineState!)
    commandEncoder.setTexture(inputTexture, index: 0)
    commandEncoder.setTexture(outputTexture, index: 1)

    // Set up the thread groups.
    let width = computePipelineState!.threadExecutionWidth
    let height = computePipelineState!.maxTotalThreadsPerThreadgroup / width
    let threadsPerThreadgroup = MTLSizeMake(width, height, 1)
    let threadgroupsPerGrid = MTLSize(width: (inputTexture.width + width - 1) / width,
                                      height: (inputTexture.height + height - 1) / height,
                                      depth: 1)
    commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)

    commandEncoder.endEncoding()
    commandBuffer.commit()
    return outputPixelBuffer
}

func makeTextureFromCVPixelBuffer(pixelBuffer: CVPixelBuffer, textureFormat: MTLPixelFormat) -> MTLTexture? {
    let width = CVPixelBufferGetWidth(pixelBuffer)
    let height = CVPixelBufferGetHeight(pixelBuffer)

    // Create a Metal texture from the image buffer.
    var cvTextureOut: CVMetalTexture?
    CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil, textureFormat, width, height, 0, &cvTextureOut)

    guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
        CVMetalTextureCacheFlush(textureCache, 0)

        return nil
    }

    return texture
}

最后是金属视图:

override func draw(_ rect: CGRect) {
    var pixelBuffer: CVPixelBuffer?
    var mirroring = false
    var rotation: Rotation = .rotate0Degrees

    syncQueue.sync {
        pixelBuffer = internalPixelBuffer
        mirroring = internalMirroring
        rotation = internalRotation
    }

    guard let drawable = currentDrawable,
        let currentRenderPassDescriptor = currentRenderPassDescriptor,
        let previewPixelBuffer = pixelBuffer else {
            return
    }

    // Create a Metal texture from the image buffer.
    let width = CVPixelBufferGetWidth(previewPixelBuffer)
    let height = CVPixelBufferGetHeight(previewPixelBuffer)

    if textureCache == nil {
        createTextureCache()
    }
    var cvTextureOut: CVMetalTexture?
    CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                              textureCache!,
                                              previewPixelBuffer,
                                              nil,
                                              .bgra8Unorm,
                                              width,
                                              height,
                                              0,
                                              &cvTextureOut)
    guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
        print("Failed to create preview texture")

        CVMetalTextureCacheFlush(textureCache!, 0)
        return
    }

    if texture.width != textureWidth ||
        texture.height != textureHeight ||
        self.bounds != internalBounds ||
        mirroring != textureMirroring ||
        rotation != textureRotation {
        setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
    }

    // Set up command buffer and encoder
    guard let commandQueue = commandQueue else {
        print("Failed to create Metal command queue")
        CVMetalTextureCacheFlush(textureCache!, 0)
        return
    }

    guard let commandBuffer = commandQueue.makeCommandBuffer() else {
        print("Failed to create Metal command buffer")
        CVMetalTextureCacheFlush(textureCache!, 0)
        return
    }

    guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
        print("Failed to create Metal command encoder")
        CVMetalTextureCacheFlush(textureCache!, 0)
        return
    }

    commandEncoder.label = "Preview display"
    commandEncoder.setRenderPipelineState(renderPipelineState!)
    commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
    commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
    commandEncoder.setFragmentTexture(texture, index: 0)
    commandEncoder.setFragmentSamplerState(sampler, index: 0)
    commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
    commandEncoder.endEncoding()

    // Draw to the screen.
    commandBuffer.present(drawable)
    commandBuffer.commit()
}

所有这些代码都在链接项目中

捕获设备委托不拥有它们在回调中收到的样本缓冲区,因此接收方有责任确保在需要它们的内容时保留它们。该项目目前无法确保这一点。

相反,通过调用 CMSampleBufferGetImageBuffer 并将生成的像素缓冲区包装在纹理中,视图控制器允许释放样本缓冲区,这意味着未来对其相应像素缓冲区的操作是未定义的。

确保样本缓冲区足够长以供处理的一种方法是将私有成员添加到相机视图控制器class,以保留最近收到的样本缓冲区:

private var sampleBuffer: CMSampleBuffer!

然后在调用processVideo之前在captureOutput(...)方法中设置这个成员。您甚至不必进一步参考它;保留它的事实应该可以防止您看到的断断续续和不可预测的行为。

此解决方案可能并不完美,因为在捕获会话中断或其他暂停的情况下,它保留样本缓冲区的时间比绝对必要的时间长。您可以设计自己的方案来管理对象生命周期;重要的是确保根样本缓冲区对象一直存在,直到您处理完引用其内容的任何纹理。