通过 AVCaptureSession 视频输出了解 Metal 用法
Understanding Metal usage with AVCaptureSession video output
我正在尝试了解使用 Metal 操作视频输出 (CMPixelBuffer
) 的正确方法。
据我了解有MTKView
。
来自视频输出的每个 CMPixelBuffer
都被分配给 Metal Texture
的某些内容。所以最终预览是来自 MTKView
?
当我在屏幕上看到最终结果时,是不是:
1)CMSampleBuffer->Metal->CMSampleBuffer
或
2)CMSampleBuffer->Metal->MTKView
很困惑。有人可以把东西放在现场吗?
我鼓励您分两步完成此操作。首先,熟悉使用 Metal 和 MTKView
在屏幕上绘制任何东西。其次,在您知道如何使用 Metal 进行绘制(创建和编码到命令缓冲区、呈现可绘制对象等)之后,您可以将从 CMSampleBuffer
生成的纹理应用到全屏四边形或您想要的任何其他几何体.
@McZonk 提供了一个相机捕获示例 here,它实现了一个简单的示例。它使用双平面 YCbCr 样本缓冲区,但如果可用,您还可以请求 BGRA 格式的样本缓冲区,然后可以将其转换为单个 MTLTexture
和 CVMetalTextureCacheCreateTextureFromImage
并在着色器中采样,无需进一步手动转换。
从摄像机生成的样本缓冲区转换为金属纹理所需的最简单、准系统的最少代码:
@import UIKit;
@import AVFoundation;
@import CoreMedia;
#import <MetalKit/MetalKit.h>
#import <Metal/Metal.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>
@interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> {
NSString *_displayName;
NSString *serviceType;
}
@property (retain, nonatomic) SessionContainer *session;
@property (retain, nonatomic) AVCaptureSession *avSession;
@end;
#import "ViewController.h"
@interface ViewController () {
MTKView *_metalView;
id<MTLDevice> _device;
id<MTLCommandQueue> _commandQueue;
id<MTLTexture> _texture;
CVMetalTextureCacheRef _textureCache;
}
@property (strong, nonatomic) AVCaptureDevice *videoDevice;
@property (nonatomic) dispatch_queue_t sessionQueue;
@end
@implementation ViewController
- (void)viewDidLoad {
NSLog(@"%s", __PRETTY_FUNCTION__);
[super viewDidLoad];
_device = MTLCreateSystemDefaultDevice();
_metalView = [[MTKView alloc] initWithFrame:self.view.bounds];
[_metalView setContentMode:UIViewContentModeScaleAspectFit];
_metalView.device = _device;
_metalView.delegate = self;
_metalView.clearColor = MTLClearColorMake(1, 1, 1, 1);
_metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
_metalView.framebufferOnly = NO;
_metalView.autoResizeDrawable = NO;
CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);
[self.view addSubview:_metalView];
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
if ([self setupCamera]) {
[_avSession startRunning];
}
}
- (BOOL)setupCamera {
NSLog(@"%s", __PRETTY_FUNCTION__);
@try {
NSError * error;
_avSession = [[AVCaptureSession alloc] init];
[_avSession beginConfiguration];
[_avSession setSessionPreset:AVCaptureSessionPreset640x480];
// get list of devices; connect to front-facing camera
self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (self.videoDevice == nil) return FALSE;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
[_avSession addInput:input];
dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
[dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue];
[_avSession addOutput:dataOutput];
[_avSession commitConfiguration];
} @catch (NSException *exception) {
NSLog(@"%s - %@", __PRETTY_FUNCTION__, exception.description);
return FALSE;
} @finally {
return TRUE;
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
{
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVMetalTextureRef texture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture);
if(status == kCVReturnSuccess)
{
_metalView.drawableSize = CGSizeMake(width, height);
_texture = CVMetalTextureGetTexture(texture);
_commandQueue = [_device newCommandQueue];
CFRelease(texture);
}
}
}
- (void)drawInMTKView:(MTKView *)view {
// creating command encoder
if (_texture) {
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
id<MTLTexture> drawingTexture = view.currentDrawable.texture;
// set up and encode the filter
MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5];
[filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture];
// committing the drawing
[commandBuffer presentDrawable:view.currentDrawable];
[commandBuffer commit];
_texture = nil;
}
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
@end
我正在尝试了解使用 Metal 操作视频输出 (CMPixelBuffer
) 的正确方法。
据我了解有MTKView
。
来自视频输出的每个 CMPixelBuffer
都被分配给 Metal Texture
的某些内容。所以最终预览是来自 MTKView
?
当我在屏幕上看到最终结果时,是不是:
1)CMSampleBuffer->Metal->CMSampleBuffer
或
2)CMSampleBuffer->Metal->MTKView
很困惑。有人可以把东西放在现场吗?
我鼓励您分两步完成此操作。首先,熟悉使用 Metal 和 MTKView
在屏幕上绘制任何东西。其次,在您知道如何使用 Metal 进行绘制(创建和编码到命令缓冲区、呈现可绘制对象等)之后,您可以将从 CMSampleBuffer
生成的纹理应用到全屏四边形或您想要的任何其他几何体.
@McZonk 提供了一个相机捕获示例 here,它实现了一个简单的示例。它使用双平面 YCbCr 样本缓冲区,但如果可用,您还可以请求 BGRA 格式的样本缓冲区,然后可以将其转换为单个 MTLTexture
和 CVMetalTextureCacheCreateTextureFromImage
并在着色器中采样,无需进一步手动转换。
从摄像机生成的样本缓冲区转换为金属纹理所需的最简单、准系统的最少代码:
@import UIKit;
@import AVFoundation;
@import CoreMedia;
#import <MetalKit/MetalKit.h>
#import <Metal/Metal.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>
@interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> {
NSString *_displayName;
NSString *serviceType;
}
@property (retain, nonatomic) SessionContainer *session;
@property (retain, nonatomic) AVCaptureSession *avSession;
@end;
#import "ViewController.h"
@interface ViewController () {
MTKView *_metalView;
id<MTLDevice> _device;
id<MTLCommandQueue> _commandQueue;
id<MTLTexture> _texture;
CVMetalTextureCacheRef _textureCache;
}
@property (strong, nonatomic) AVCaptureDevice *videoDevice;
@property (nonatomic) dispatch_queue_t sessionQueue;
@end
@implementation ViewController
- (void)viewDidLoad {
NSLog(@"%s", __PRETTY_FUNCTION__);
[super viewDidLoad];
_device = MTLCreateSystemDefaultDevice();
_metalView = [[MTKView alloc] initWithFrame:self.view.bounds];
[_metalView setContentMode:UIViewContentModeScaleAspectFit];
_metalView.device = _device;
_metalView.delegate = self;
_metalView.clearColor = MTLClearColorMake(1, 1, 1, 1);
_metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
_metalView.framebufferOnly = NO;
_metalView.autoResizeDrawable = NO;
CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);
[self.view addSubview:_metalView];
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
if ([self setupCamera]) {
[_avSession startRunning];
}
}
- (BOOL)setupCamera {
NSLog(@"%s", __PRETTY_FUNCTION__);
@try {
NSError * error;
_avSession = [[AVCaptureSession alloc] init];
[_avSession beginConfiguration];
[_avSession setSessionPreset:AVCaptureSessionPreset640x480];
// get list of devices; connect to front-facing camera
self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (self.videoDevice == nil) return FALSE;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
[_avSession addInput:input];
dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
[dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue];
[_avSession addOutput:dataOutput];
[_avSession commitConfiguration];
} @catch (NSException *exception) {
NSLog(@"%s - %@", __PRETTY_FUNCTION__, exception.description);
return FALSE;
} @finally {
return TRUE;
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
{
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVMetalTextureRef texture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture);
if(status == kCVReturnSuccess)
{
_metalView.drawableSize = CGSizeMake(width, height);
_texture = CVMetalTextureGetTexture(texture);
_commandQueue = [_device newCommandQueue];
CFRelease(texture);
}
}
}
- (void)drawInMTKView:(MTKView *)view {
// creating command encoder
if (_texture) {
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
id<MTLTexture> drawingTexture = view.currentDrawable.texture;
// set up and encode the filter
MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5];
[filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture];
// committing the drawing
[commandBuffer presentDrawable:view.currentDrawable];
[commandBuffer commit];
_texture = nil;
}
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
@end