CustomVideoCompositorClass - 绘制时出现 CGContext 错误(CGImage、CGRect)
CustomVideoCompositorClass - CGContext error on draw(CGImage, CGRect)
我已按照 说明实现视频(小)超过视频(大)。
获取资产,创建合成...像这样
...
let videoComposition = AVMutableVideoComposition()
videoComposition.customVideoCompositorClass = CustomComposition.self
...
主要问题是什么时候需要CGContext绘制cgimage:
class CustomComposition: NSObject, AVVideoCompositing {
var requiredPixelBufferAttributesForRenderContext: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
]
var sourcePixelBufferAttributes: [String : Any]? = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
]
override init() {
super.init()
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
guard let destination: CVPixelBuffer = request.renderContext.newPixelBuffer() else { return request.finish(with: NSError(domain: "", code: 3, userInfo: nil)) }
if request.sourceTrackIDs.count == 2 {
guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
CVPixelBufferLockBaseAddress(front, .readOnly)
CVPixelBufferLockBaseAddress(back, .readOnly)
CVPixelBufferLockBaseAddress(destination, [])
renderFrontBuffer(front, back: back, to: destination)
CVPixelBufferUnlockBaseAddress(destination, [])
CVPixelBufferUnlockBaseAddress(back, .readOnly)
CVPixelBufferUnlockBaseAddress(front, .readOnly)
}
request.finish(withComposedVideoFrame: destination)
CVBufferRemoveAllAttachments(destination)
}
func renderFrontBuffer(_ front: CVPixelBuffer, back: CVPixelBuffer, to destination: CVPixelBuffer) {
var gc: CGContext? = nil
guard let frontImage: CGImage = createSourceImage(from: front) else { return }
guard let backImage: CGImage = createSourceImage(from: back) else { return }
let width: Int = CVPixelBufferGetWidth(destination) //Int(Config.renderSize.width)
let height: Int = CVPixelBufferGetHeight(destination)
let frame = CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height))
let colorSpace = backImage.colorSpace!
gc = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
// MARK: - Place Back (big)
//gc?.draw(backImage, in: frame) // <- Problem: EXC_BAD_ACCESS
gc?.beginPath()
gc?.addRect(CGRect.init(x: 24, y: 64, width: 67, height: 120))
gc?.setFillColor(UIColor.yellow.cgColor)
gc?.fillPath()
// MARK: - Place Front (small)
//gc?.draw(frontImage, in: frame) // <- Problem: EXC_BAD_ACCESS
}
func createSourceImage(from buffer: CVPixelBuffer) -> CGImage? {
let width: Int = CVPixelBufferGetWidth(buffer)
let height: Int = CVPixelBufferGetHeight(buffer)
let stride: Int = CVPixelBufferGetBytesPerRow(buffer)
var data = CVPixelBufferGetBaseAddress(buffer)
let rgb = CGColorSpaceCreateDeviceRGB()
let provider = CGDataProvider(dataInfo: nil, data: &data, size: height * stride) { (_, _, _) in }
var image: CGImage? = nil
let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
if let provider = provider {
image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
}
return image
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) { }
}
当未调用 .draw 时,渲染类似这样的内容并具有两个视频的音频(没关系),否则调用时会立即出现 EXC_BAD_ACCESS
错误。
我已经尝试使用 Zombie Object 进行调试,但没有。
我也尝试保存cgimage的png,还是一样的错误
...
let uimg = UIImage(cgImage: frontImage)
let data = uimg.pngData() <- EXC_BAD_ACCESS
...
我的猜测是我在 createSourceImage
和 CGDataProvider
中遗漏了一些东西,或者在 requiredPixelBufferAttributesForRenderContext
或 sourcePixelBufferAttributes
数组中遗漏了一些东西。
或者如果有人有其他想法如何实现这个(视频上的视频),谢谢。
看看这个,你可以试试这个!
https://i.stack.imgur.com/jFJil.jpg
class MyCustomComposition: NSObject, AVVideoCompositing {
var sourcePixelBufferAttributes: [String : Any]? {
get {
return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
}
}
var requiredPixelBufferAttributesForRenderContext: [String : Any] {
get {
return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
}
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
}
func startRequest(_ asyncVideoCompositionRequest: AVAsynchronousVideoCompositionRequest) {
let request = asyncVideoCompositionRequest
//Destination Buffer
let destination = request.renderContext.newPixelBuffer()
//Grab Frames
//My Video track id is 1 , 2 for back and front videos. Please Check your 'trackID' using 'request.sourceTrackIDs' then use it
if request.sourceTrackIDs.count != 2 { return }
guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
CVPixelBufferLockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
CVPixelBufferLockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
CVPixelBufferLockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
renderFromBuffer(destination: destination!, front: front, back: back)
CVPixelBufferUnlockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
CVPixelBufferUnlockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
CVPixelBufferUnlockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
request.finish(withComposedVideoFrame: destination!)
}
private func renderFromBuffer(destination: CVPixelBuffer, front: CVPixelBuffer, back: CVPixelBuffer) {
let width = CVPixelBufferGetWidth(destination)
let height = CVPixelBufferGetHeight(destination)
let newContext = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
let frontImage = createSourceImageFromReferance(buffer: front)
let backImage = createSourceImageFromReferance(buffer: back)
//DRAW 'BACK' IMAGE
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
let backImagerect = CGRect(x: 0, y: 0, width: width, height: height)
newContext?.draw(backImage, in: backImagerect)
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
newContext?.restoreGState()
//DRAW 'FRONT' IMAGE AT CENTER
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
let frontImageSize = CGSize(width: frontImage.width , height: frontImage.height)
let centerPoint = CGPoint(x: CGFloat(width / 2) - (frontImageSize.width / 2), y: CGFloat(height / 2) - (frontImageSize.height / 2))
let frontImagerect = CGRect(origin: centerPoint, size: frontImageSize)
//Clip Front Video using BezierPath
let bezierPath = UIBezierPath(roundedRect: frontImagerect, cornerRadius: 50)
newContext?.addPath(bezierPath.cgPath)
newContext?.clip()
newContext?.draw(frontImage, in: frontImagerect)
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
newContext?.restoreGState()
newContext?.flush()
}
private func createSourceImageFromReferance(buffer: CVPixelBuffer) -> CGImage {
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let stride = CVPixelBufferGetBytesPerRow(buffer)
let data = CVPixelBufferGetBaseAddress(buffer)
let rgb = CGColorSpaceCreateDeviceRGB()
let releaseMaskImagePixelData: CGDataProviderReleaseDataCallback = { (info: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) -> () in
// https://developer.apple.com/reference/coregraphics/cgdataproviderreleasedatacallback
return
}
let provider:CGDataProvider? = CGDataProvider(dataInfo: nil, data: data!, size: height * stride, releaseData: releaseMaskImagePixelData)
let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
var image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider!, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
var rect = CGRect(x: 0, y: 0, width: Int(width), height: Int(height))
let newContext = CGContext(data: nil, width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.draw(image!, in: rect)
newContext?.restoreGState()
let im = (newContext!.makeImage())!
newContext?.flush()
return im
}
}
为以上 class 创建如下合成:
let mainInstructionCompostion = AVMutableVideoComposition()
mainInstructionCompostion.frameDuration = CMTime(value: 1, timescale: 30)
mainInstructionCompostion.renderSize = videoInfo.videoSize
mainInstructionCompostion.instructions = [mainInstruction]
mainInstructionCompostion.customVideoCompositorClass = MyCustomComposition.self
我已按照
获取资产,创建合成...像这样
...
let videoComposition = AVMutableVideoComposition()
videoComposition.customVideoCompositorClass = CustomComposition.self
...
主要问题是什么时候需要CGContext绘制cgimage:
class CustomComposition: NSObject, AVVideoCompositing {
var requiredPixelBufferAttributesForRenderContext: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
]
var sourcePixelBufferAttributes: [String : Any]? = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
kCVPixelBufferOpenGLCompatibilityKey as String : NSNumber(value: true)
]
override init() {
super.init()
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
guard let destination: CVPixelBuffer = request.renderContext.newPixelBuffer() else { return request.finish(with: NSError(domain: "", code: 3, userInfo: nil)) }
if request.sourceTrackIDs.count == 2 {
guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
CVPixelBufferLockBaseAddress(front, .readOnly)
CVPixelBufferLockBaseAddress(back, .readOnly)
CVPixelBufferLockBaseAddress(destination, [])
renderFrontBuffer(front, back: back, to: destination)
CVPixelBufferUnlockBaseAddress(destination, [])
CVPixelBufferUnlockBaseAddress(back, .readOnly)
CVPixelBufferUnlockBaseAddress(front, .readOnly)
}
request.finish(withComposedVideoFrame: destination)
CVBufferRemoveAllAttachments(destination)
}
func renderFrontBuffer(_ front: CVPixelBuffer, back: CVPixelBuffer, to destination: CVPixelBuffer) {
var gc: CGContext? = nil
guard let frontImage: CGImage = createSourceImage(from: front) else { return }
guard let backImage: CGImage = createSourceImage(from: back) else { return }
let width: Int = CVPixelBufferGetWidth(destination) //Int(Config.renderSize.width)
let height: Int = CVPixelBufferGetHeight(destination)
let frame = CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height))
let colorSpace = backImage.colorSpace!
gc = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
// MARK: - Place Back (big)
//gc?.draw(backImage, in: frame) // <- Problem: EXC_BAD_ACCESS
gc?.beginPath()
gc?.addRect(CGRect.init(x: 24, y: 64, width: 67, height: 120))
gc?.setFillColor(UIColor.yellow.cgColor)
gc?.fillPath()
// MARK: - Place Front (small)
//gc?.draw(frontImage, in: frame) // <- Problem: EXC_BAD_ACCESS
}
func createSourceImage(from buffer: CVPixelBuffer) -> CGImage? {
let width: Int = CVPixelBufferGetWidth(buffer)
let height: Int = CVPixelBufferGetHeight(buffer)
let stride: Int = CVPixelBufferGetBytesPerRow(buffer)
var data = CVPixelBufferGetBaseAddress(buffer)
let rgb = CGColorSpaceCreateDeviceRGB()
let provider = CGDataProvider(dataInfo: nil, data: &data, size: height * stride) { (_, _, _) in }
var image: CGImage? = nil
let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
if let provider = provider {
image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
}
return image
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) { }
}
当未调用 .draw 时,渲染类似这样的内容并具有两个视频的音频(没关系),否则调用时会立即出现 EXC_BAD_ACCESS
错误。
我已经尝试使用 Zombie Object 进行调试,但没有。
我也尝试保存cgimage的png,还是一样的错误
...
let uimg = UIImage(cgImage: frontImage)
let data = uimg.pngData() <- EXC_BAD_ACCESS
...
我的猜测是我在 createSourceImage
和 CGDataProvider
中遗漏了一些东西,或者在 requiredPixelBufferAttributesForRenderContext
或 sourcePixelBufferAttributes
数组中遗漏了一些东西。
或者如果有人有其他想法如何实现这个(视频上的视频),谢谢。
看看这个,你可以试试这个!
https://i.stack.imgur.com/jFJil.jpg
class MyCustomComposition: NSObject, AVVideoCompositing {
var sourcePixelBufferAttributes: [String : Any]? {
get {
return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
}
}
var requiredPixelBufferAttributesForRenderContext: [String : Any] {
get {
return ["\(kCVPixelBufferPixelFormatTypeKey)": kCVPixelFormatType_32BGRA]
}
}
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
}
func startRequest(_ asyncVideoCompositionRequest: AVAsynchronousVideoCompositionRequest) {
let request = asyncVideoCompositionRequest
//Destination Buffer
let destination = request.renderContext.newPixelBuffer()
//Grab Frames
//My Video track id is 1 , 2 for back and front videos. Please Check your 'trackID' using 'request.sourceTrackIDs' then use it
if request.sourceTrackIDs.count != 2 { return }
guard let front = request.sourceFrame(byTrackID: 2) else { return request.finish(with: NSError(domain: "", code: 1, userInfo: nil)) }
guard let back = request.sourceFrame(byTrackID: 1) else { return request.finish(with: NSError(domain: "", code: 2, userInfo: nil)) }
CVPixelBufferLockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
CVPixelBufferLockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
CVPixelBufferLockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
renderFromBuffer(destination: destination!, front: front, back: back)
CVPixelBufferUnlockBaseAddress(destination!, CVPixelBufferLockFlags.readOnly)
CVPixelBufferUnlockBaseAddress(back, CVPixelBufferLockFlags.readOnly)
CVPixelBufferUnlockBaseAddress(front, CVPixelBufferLockFlags.readOnly)
request.finish(withComposedVideoFrame: destination!)
}
private func renderFromBuffer(destination: CVPixelBuffer, front: CVPixelBuffer, back: CVPixelBuffer) {
let width = CVPixelBufferGetWidth(destination)
let height = CVPixelBufferGetHeight(destination)
let newContext = CGContext(data: CVPixelBufferGetBaseAddress(destination), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(destination), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
let frontImage = createSourceImageFromReferance(buffer: front)
let backImage = createSourceImageFromReferance(buffer: back)
//DRAW 'BACK' IMAGE
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
let backImagerect = CGRect(x: 0, y: 0, width: width, height: height)
newContext?.draw(backImage, in: backImagerect)
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
newContext?.restoreGState()
//DRAW 'FRONT' IMAGE AT CENTER
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
let frontImageSize = CGSize(width: frontImage.width , height: frontImage.height)
let centerPoint = CGPoint(x: CGFloat(width / 2) - (frontImageSize.width / 2), y: CGFloat(height / 2) - (frontImageSize.height / 2))
let frontImagerect = CGRect(origin: centerPoint, size: frontImageSize)
//Clip Front Video using BezierPath
let bezierPath = UIBezierPath(roundedRect: frontImagerect, cornerRadius: 50)
newContext?.addPath(bezierPath.cgPath)
newContext?.clip()
newContext?.draw(frontImage, in: frontImagerect)
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.translateBy(x: 0, y: -CGFloat(height / 2))
newContext?.restoreGState()
newContext?.flush()
}
private func createSourceImageFromReferance(buffer: CVPixelBuffer) -> CGImage {
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
let stride = CVPixelBufferGetBytesPerRow(buffer)
let data = CVPixelBufferGetBaseAddress(buffer)
let rgb = CGColorSpaceCreateDeviceRGB()
let releaseMaskImagePixelData: CGDataProviderReleaseDataCallback = { (info: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) -> () in
// https://developer.apple.com/reference/coregraphics/cgdataproviderreleasedatacallback
return
}
let provider:CGDataProvider? = CGDataProvider(dataInfo: nil, data: data!, size: height * stride, releaseData: releaseMaskImagePixelData)
let last = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Big.rawValue | CGImageAlphaInfo.premultipliedLast.rawValue)
var image = CGImage(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: stride, space: rgb, bitmapInfo: last, provider: provider!, decode: nil, shouldInterpolate: false, intent: .defaultIntent)
var rect = CGRect(x: 0, y: 0, width: Int(width), height: Int(height))
let newContext = CGContext(data: nil, width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
newContext?.saveGState()
newContext?.translateBy(x: 0, y: CGFloat(height))
newContext?.scaleBy(x: 1.0, y: -1.0)
newContext?.draw(image!, in: rect)
newContext?.restoreGState()
let im = (newContext!.makeImage())!
newContext?.flush()
return im
}
}
为以上 class 创建如下合成:
let mainInstructionCompostion = AVMutableVideoComposition()
mainInstructionCompostion.frameDuration = CMTime(value: 1, timescale: 30)
mainInstructionCompostion.renderSize = videoInfo.videoSize
mainInstructionCompostion.instructions = [mainInstruction]
mainInstructionCompostion.customVideoCompositorClass = MyCustomComposition.self