将 CMSampleBuffer 转换为 UIImage
Convert a CMSampleBuffer into a UIImage
这是一个将 CMSampleBuffer 转换为 UIImage 的函数(来自 Apple 文档的代码)
func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage {
// Get a CMSampleBuffer's Core Video image buffer for the media data
var imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0)
// Get the number of bytes per row for the pixel buffer
var baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
// Get the number of bytes per row for the pixel buffer
var bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
// Get the pixel buffer width and height
var width = CVPixelBufferGetWidth(imageBuffer)
var height = CVPixelBufferGetHeight(imageBuffer)
// Create a device-dependent RGB color space
var colorSpace = CGColorSpaceCreateDeviceRGB()
// Create a bitmap graphics context with the sample buffer data
let bitmapInfo = CGBitmapInfo(CGImageAlphaInfo.NoneSkipLast.rawValue)
var context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo)
// Create a Quartz image from the pixel data in the bitmap graphics context
var quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Create an image object from the Quartz image
var image = UIImage(CGImage: quartzImage)!
return image
}
当我尝试使用 UIImageView 可视化 UIImage 时,我什么也没得到。
有什么想法吗?
我刚刚在我当前的项目中完成了完全相同的功能,下面是我是如何让它工作的(通过大量谷歌搜索和一些反复试验):
let bitmapInfo = CGBitmapInfo(CGImageAlphaInfo.NoneSkipFirst.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue)
此外,请确保在主线程中呈现 UIImageView(您可能在相机会话线程中获取 CMSampleBuffer),因为 UIKit 只能在主线程中执行。否则,您将不得不等待非常非常长的时间才能显示图像。
这是 Swift 3.0 的解决方案,其中扩展了 CMSampleBuffer
,创建了一个变量,为您提供可选的 UIImage
。
import AVFoundation
extension CMSampleBuffer {
var uiImage: UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else { return nil }
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.noneSkipFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
guard let context = CGContext(data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: bitmapInfo.rawValue) else { return nil }
guard let cgImage = context.makeImage() else { return nil }
CVPixelBufferUnlockBaseAddress(imageBuffer,CVPixelBufferLockFlags(rawValue: 0));
return UIImage(cgImage: cgImage)
}
}
@Zigglzworth
需要为 avcaptureVideoDataOutput
设置 kCVPixelFormatType_32BGRA
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
这是一个将 CMSampleBuffer 转换为 UIImage 的函数(来自 Apple 文档的代码)
func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage {
// Get a CMSampleBuffer's Core Video image buffer for the media data
var imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0)
// Get the number of bytes per row for the pixel buffer
var baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
// Get the number of bytes per row for the pixel buffer
var bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
// Get the pixel buffer width and height
var width = CVPixelBufferGetWidth(imageBuffer)
var height = CVPixelBufferGetHeight(imageBuffer)
// Create a device-dependent RGB color space
var colorSpace = CGColorSpaceCreateDeviceRGB()
// Create a bitmap graphics context with the sample buffer data
let bitmapInfo = CGBitmapInfo(CGImageAlphaInfo.NoneSkipLast.rawValue)
var context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo)
// Create a Quartz image from the pixel data in the bitmap graphics context
var quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Create an image object from the Quartz image
var image = UIImage(CGImage: quartzImage)!
return image
}
当我尝试使用 UIImageView 可视化 UIImage 时,我什么也没得到。
有什么想法吗?
我刚刚在我当前的项目中完成了完全相同的功能,下面是我是如何让它工作的(通过大量谷歌搜索和一些反复试验):
let bitmapInfo = CGBitmapInfo(CGImageAlphaInfo.NoneSkipFirst.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue)
此外,请确保在主线程中呈现 UIImageView(您可能在相机会话线程中获取 CMSampleBuffer),因为 UIKit 只能在主线程中执行。否则,您将不得不等待非常非常长的时间才能显示图像。
这是 Swift 3.0 的解决方案,其中扩展了 CMSampleBuffer
,创建了一个变量,为您提供可选的 UIImage
。
import AVFoundation
extension CMSampleBuffer {
var uiImage: UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else { return nil }
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.noneSkipFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
guard let context = CGContext(data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: bitmapInfo.rawValue) else { return nil }
guard let cgImage = context.makeImage() else { return nil }
CVPixelBufferUnlockBaseAddress(imageBuffer,CVPixelBufferLockFlags(rawValue: 0));
return UIImage(cgImage: cgImage)
}
}
@Zigglzworth 需要为 avcaptureVideoDataOutput
设置 kCVPixelFormatType_32BGRA let videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]