将 CMSampleBuffer 转换为 UIImage
Convert CMSampleBuffer to UIImage
我正在尝试将 sampleBuffer 转换为 UIImage 并使用 colorspaceGray 在图像视图中显示它。但它显示如下图。我认为转换有问题。如何转换 CMSampleBuffer?
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
print("buffered")
let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let width: Int = CVPixelBufferGetWidth(imageBuffer)
let height: Int = CVPixelBufferGetHeight(imageBuffer)
let bytesPerRow: Int = CVPixelBufferGetBytesPerRow(imageBuffer)
let lumaBuffer = CVPixelBufferGetBaseAddress(imageBuffer)
//let planeCount : Int = CVPixelBufferGetPlaneCount(imageBuffer)
let grayColorSpace: CGColorSpace = CGColorSpaceCreateDeviceGray()
let context: CGContext = CGContext(data: lumaBuffer, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow , space: grayColorSpace, bitmapInfo: CGImageAlphaInfo.none.rawValue)!
let dstImageFilter: CGImage = context.makeImage()!
let imageRect : CGRect = CGRect(x: 0, y: 0, width: width, height: height)
context.draw(dstImageFilter, in: imageRect)
let image = UIImage(cgImage: dstImageFilter)
DispatchQueue.main.sync(execute: {() -> Void in
self.imageTest.image = image
})
}
看起来 CMSampleBuffer
正在为您提供 RGBA 数据,然后您可以从中直接构建灰度图像。您将需要构建一个新缓冲区,在其中为每个像素执行类似 gray = (pixel.red+pixel.green+pixel.blue)/3
的操作。或者您需要根据收到的数据创建一个普通的 RGBA 图像,然后将其转换为灰度。
但是在你的代码中你根本没有做任何转换。您使用 CVPixelBufferGetBaseAddress
将原始指针指向缓冲区,而不管其中的数据类型如何。然后你只需在创建图像时传递相同的指针,假设接收到的数据是灰度的。
转换很简单:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let ciimage = CIImage(cvPixelBuffer: imageBuffer)
let image = self.convert(cmage: ciimage)
}
// Convert CIImage to UIImage
func convert(cmage: CIImage) -> UIImage {
let context = CIContext(options: nil)
let cgImage = context.createCGImage(cmage, from: cmage.extent)!
let image = UIImage(cgImage: cgImage)
return image
}
现在可以使用 UIImage
上的新便捷方法进一步改进上述解决方案。我在下面概述了一个现代解决方案,也带有图像方向校正。该解决方案不使用 CGImage
转换,这提高了运行时性能。
func orientation() -> UIImage.Orientation {
let curDeviceOrientation = UIDevice.current.orientation
var exifOrientation: UIImage.Orientation
switch curDeviceOrientation {
case UIDeviceOrientation.portraitUpsideDown: // Device oriented vertically, Home button on the top
exifOrientation = .left
case UIDeviceOrientation.landscapeLeft: // Device oriented horizontally, Home button on the right
exifOrientation = .upMirrored
case UIDeviceOrientation.landscapeRight: // Device oriented horizontally, Home button on the left
exifOrientation = .down
case UIDeviceOrientation.portrait: // Device oriented vertically, Home button on the bottom
exifOrientation = .up
default:
exifOrientation = .up
}
return exifOrientation
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let imageBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciimage = CIImage(cvPixelBuffer: imageBuffer)
let image = UIImage(ciImage: ciimage, scale: 1.0, orientation: orientation())
}
我正在尝试将 sampleBuffer 转换为 UIImage 并使用 colorspaceGray 在图像视图中显示它。但它显示如下图。我认为转换有问题。如何转换 CMSampleBuffer?
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
print("buffered")
let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let width: Int = CVPixelBufferGetWidth(imageBuffer)
let height: Int = CVPixelBufferGetHeight(imageBuffer)
let bytesPerRow: Int = CVPixelBufferGetBytesPerRow(imageBuffer)
let lumaBuffer = CVPixelBufferGetBaseAddress(imageBuffer)
//let planeCount : Int = CVPixelBufferGetPlaneCount(imageBuffer)
let grayColorSpace: CGColorSpace = CGColorSpaceCreateDeviceGray()
let context: CGContext = CGContext(data: lumaBuffer, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow , space: grayColorSpace, bitmapInfo: CGImageAlphaInfo.none.rawValue)!
let dstImageFilter: CGImage = context.makeImage()!
let imageRect : CGRect = CGRect(x: 0, y: 0, width: width, height: height)
context.draw(dstImageFilter, in: imageRect)
let image = UIImage(cgImage: dstImageFilter)
DispatchQueue.main.sync(execute: {() -> Void in
self.imageTest.image = image
})
}
看起来 CMSampleBuffer
正在为您提供 RGBA 数据,然后您可以从中直接构建灰度图像。您将需要构建一个新缓冲区,在其中为每个像素执行类似 gray = (pixel.red+pixel.green+pixel.blue)/3
的操作。或者您需要根据收到的数据创建一个普通的 RGBA 图像,然后将其转换为灰度。
但是在你的代码中你根本没有做任何转换。您使用 CVPixelBufferGetBaseAddress
将原始指针指向缓冲区,而不管其中的数据类型如何。然后你只需在创建图像时传递相同的指针,假设接收到的数据是灰度的。
转换很简单:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
let ciimage = CIImage(cvPixelBuffer: imageBuffer)
let image = self.convert(cmage: ciimage)
}
// Convert CIImage to UIImage
func convert(cmage: CIImage) -> UIImage {
let context = CIContext(options: nil)
let cgImage = context.createCGImage(cmage, from: cmage.extent)!
let image = UIImage(cgImage: cgImage)
return image
}
现在可以使用 UIImage
上的新便捷方法进一步改进上述解决方案。我在下面概述了一个现代解决方案,也带有图像方向校正。该解决方案不使用 CGImage
转换,这提高了运行时性能。
func orientation() -> UIImage.Orientation {
let curDeviceOrientation = UIDevice.current.orientation
var exifOrientation: UIImage.Orientation
switch curDeviceOrientation {
case UIDeviceOrientation.portraitUpsideDown: // Device oriented vertically, Home button on the top
exifOrientation = .left
case UIDeviceOrientation.landscapeLeft: // Device oriented horizontally, Home button on the right
exifOrientation = .upMirrored
case UIDeviceOrientation.landscapeRight: // Device oriented horizontally, Home button on the left
exifOrientation = .down
case UIDeviceOrientation.portrait: // Device oriented vertically, Home button on the bottom
exifOrientation = .up
default:
exifOrientation = .up
}
return exifOrientation
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let imageBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciimage = CIImage(cvPixelBuffer: imageBuffer)
let image = UIImage(ciImage: ciimage, scale: 1.0, orientation: orientation())
}