从 AVCaptureStillImageOutput 解压缩的 UIImage
UIImage uncompressed from AVCaptureStillImageOutput
这是我目前尝试的相机配置:
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetInputPriority];
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
NSError *errorVideo;
AVCaptureDeviceFormat *deviceFormat = nil;
for (AVCaptureDeviceFormat *format in videoDevice.formats) {
CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dim.width == 2592 && dim.height == 1936) {
deviceFormat = format;
break;
}
}
[videoDevice lockForConfiguration:&errorVideo];
if (deviceFormat) {
videoDevice.activeFormat = deviceFormat;
if ([videoDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
}
if ([videoDevice isAutoFocusRangeRestrictionSupported]) {
[videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar];
}
}
[videoDevice unlockForConfiguration];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ([session canAddInput:videoDeviceInput]) {
[session addInput:videoDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput]) {
[stillImageOutput setOutputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
[session addOutput:stillImageOutput];
}
这是我尝试从 CMSamplebuffer 获取 UIImage 的方法:
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer && !error) {
dispatch_async(dispatch_get_main_queue(), ^{
UIImage *image = [self imageFromSampleBuffer:imageDataSampleBuffer];
});
}
}];
这是 Apple 示例代码:
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
但图像始终为零。
经过一些调试。我发现这个函数 returns 总是 nil CMSampleBufferGetImageBuffer(sampleBuffer);
有人可以帮忙吗?
这是因为必须立即处理 CMSampleBufferRef,因为它的释放速度非常快且效率很高。
这是我生成图像的代码:
let connection = imageFileOutput.connectionWithMediaType(AVMediaTypeVideo)
if connection != nil {
imageFileOutput.captureStillImageAsynchronouslyFromConnection(connection) { [weak self] (buffer, err) -> Void in
if CMSampleBufferIsValid(buffer) {
let imageDataJpeg = self?.imageFromSampleBuffer(buffer)
} else {
print(err)
}
}
}
如您所见,我将其转换为图像,同时仍在该函数的范围内。一旦它是图像,我就会将其发送出去进行处理。
这是我目前尝试的相机配置:
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetInputPriority];
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
NSError *errorVideo;
AVCaptureDeviceFormat *deviceFormat = nil;
for (AVCaptureDeviceFormat *format in videoDevice.formats) {
CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dim.width == 2592 && dim.height == 1936) {
deviceFormat = format;
break;
}
}
[videoDevice lockForConfiguration:&errorVideo];
if (deviceFormat) {
videoDevice.activeFormat = deviceFormat;
if ([videoDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
}
if ([videoDevice isAutoFocusRangeRestrictionSupported]) {
[videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar];
}
}
[videoDevice unlockForConfiguration];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ([session canAddInput:videoDeviceInput]) {
[session addInput:videoDeviceInput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput]) {
[stillImageOutput setOutputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
[session addOutput:stillImageOutput];
}
这是我尝试从 CMSamplebuffer 获取 UIImage 的方法:
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer && !error) {
dispatch_async(dispatch_get_main_queue(), ^{
UIImage *image = [self imageFromSampleBuffer:imageDataSampleBuffer];
});
}
}];
这是 Apple 示例代码:
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
但图像始终为零。
经过一些调试。我发现这个函数 returns 总是 nil CMSampleBufferGetImageBuffer(sampleBuffer);
有人可以帮忙吗?
这是因为必须立即处理 CMSampleBufferRef,因为它的释放速度非常快且效率很高。
这是我生成图像的代码:
let connection = imageFileOutput.connectionWithMediaType(AVMediaTypeVideo)
if connection != nil {
imageFileOutput.captureStillImageAsynchronouslyFromConnection(connection) { [weak self] (buffer, err) -> Void in
if CMSampleBufferIsValid(buffer) {
let imageDataJpeg = self?.imageFromSampleBuffer(buffer)
} else {
print(err)
}
}
}
如您所见,我将其转换为图像,同时仍在该函数的范围内。一旦它是图像,我就会将其发送出去进行处理。