didOutputSampleBuffer 丢帧
didOutputSampleBuffer drops frames
我正在编写一个用于长时间曝光图像拍摄的应用程序。
我用过func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
使用 CILightenBlendMode
应用 CIFilter
获得 CMSampleBuffer
。
问题是,混合时间太长,会导致掉帧。
我试图复制缓冲区:
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
但这并没有帮助,帧仍然下降。
完整代码:
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if(CameraService.longExposureRunning){
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("Dropped")
}
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage{
let blendEffect = CIFilter(name: "CILightenBlendMode")
blendEffect?.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
blendEffect?.setValue(cameraImage, forKey: kCIInputImageKey)
self.lastImage = blendEffect?.outputImage
print("Blending")
}else{
self.lastImage = cameraImage
}
let filteredImage = UIImage(CIImage: self.lastImage!)
dispatch_async(dispatch_get_main_queue())
{
imageView.image = filteredImage
}
}
}
我能想到的最明显的事情是在设置输出时进行检查。
确保您在 AVAssetWriterInput 上将 expectsDataInRealTime 设置为 true。
我怀疑 CoreImage 正在将所有帧连接成一个巨大的内核。您可能会发现 CIImageAccumulator
有帮助,但我可以通过强制 Core Image 渲染链并从每一帧重新开始来让您的代码正常工作。
我已经将您的 lastImage
变量的类型更改为可选的 UIImage
并添加了一个名为 context
的常量,它是一个 CIContext
。有了这些就可以很好地工作:
使用:
let context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer:false])
用于 GPU 而不是 CPU 渲染。
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage {
let blendEffect = CIFilter(name: "CILightenBlendMode")!
blendEffect.setValue(
CIImage(image: backgroundImage),
forKey: kCIInputBackgroundImageKey)
blendEffect.setValue(
cameraImage, forKey:
kCIInputImageKey)
let imageRef = self.context.createCGImage(
blendEffect.outputImage!,
fromRect: blendEffect.outputImage!.extent)
self.lastImage = UIImage(CGImage: imageRef)
print("Blending")
}else{
let imageRef = self.context.createCGImage(
cameraImage,
fromRect: cameraImage.extent)
self.lastImage = UIImage(CGImage: imageRef)
}
let filteredImage = self.lastImage
dispatch_async(dispatch_get_main_queue())
{
self.imageView.image = filteredImage
}
}
}
时髦的效果!
西蒙
我正在编写一个用于长时间曝光图像拍摄的应用程序。
我用过func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
使用 CILightenBlendMode
应用 CIFilter
获得 CMSampleBuffer
。
问题是,混合时间太长,会导致掉帧。 我试图复制缓冲区:
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
但这并没有帮助,帧仍然下降。
完整代码:
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if(CameraService.longExposureRunning){
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("Dropped")
}
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage{
let blendEffect = CIFilter(name: "CILightenBlendMode")
blendEffect?.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
blendEffect?.setValue(cameraImage, forKey: kCIInputImageKey)
self.lastImage = blendEffect?.outputImage
print("Blending")
}else{
self.lastImage = cameraImage
}
let filteredImage = UIImage(CIImage: self.lastImage!)
dispatch_async(dispatch_get_main_queue())
{
imageView.image = filteredImage
}
}
}
我能想到的最明显的事情是在设置输出时进行检查。
确保您在 AVAssetWriterInput 上将 expectsDataInRealTime 设置为 true。
我怀疑 CoreImage 正在将所有帧连接成一个巨大的内核。您可能会发现 CIImageAccumulator
有帮助,但我可以通过强制 Core Image 渲染链并从每一帧重新开始来让您的代码正常工作。
我已经将您的 lastImage
变量的类型更改为可选的 UIImage
并添加了一个名为 context
的常量,它是一个 CIContext
。有了这些就可以很好地工作:
使用:
let context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer:false])
用于 GPU 而不是 CPU 渲染。
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage {
let blendEffect = CIFilter(name: "CILightenBlendMode")!
blendEffect.setValue(
CIImage(image: backgroundImage),
forKey: kCIInputBackgroundImageKey)
blendEffect.setValue(
cameraImage, forKey:
kCIInputImageKey)
let imageRef = self.context.createCGImage(
blendEffect.outputImage!,
fromRect: blendEffect.outputImage!.extent)
self.lastImage = UIImage(CGImage: imageRef)
print("Blending")
}else{
let imageRef = self.context.createCGImage(
cameraImage,
fromRect: cameraImage.extent)
self.lastImage = UIImage(CGImage: imageRef)
}
let filteredImage = self.lastImage
dispatch_async(dispatch_get_main_queue())
{
self.imageView.image = filteredImage
}
}
}
时髦的效果!
西蒙