使用带有 MTLTexture 的 CARenderer 在屏幕外渲染动画 CALayer
Rendering animated CALayer off-screen using CARenderer with MTLTexture
我想将动画 NSView
(或仅底层 CALayer
)渲染成一系列图像,而不在屏幕上显示视图 完全.我想出了如何使用 CARenderer
和 MTLTexture
来做到这一点,但是下面的方法存在一些问题。
这在操场上运行并将输出存储到您下载的 Off-screen Render
文件夹中:
import AppKit
import Metal
import QuartzCore
import PlaygroundSupport
let view = NSView(frame: CGRect(x: 0, y: 0, width: 600, height: 400))
let circle = NSView(frame: CGRect(x: 0, y: 0, width: 50, height: 50))
circle.wantsLayer = true
circle.layer?.backgroundColor = NSColor.red.cgColor
circle.layer?.cornerRadius = 25
view.wantsLayer = true
view.addSubview(circle)
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm, width: 600, height: 400, mipmapped: false)
textureDescriptor.usage = [MTLTextureUsage.shaderRead, .shaderWrite, .renderTarget]
let device = MTLCreateSystemDefaultDevice()!
let texture: MTLTexture = device.makeTexture(descriptor: textureDescriptor)!
let context = CIContext(mtlDevice: device)
let renderer = CARenderer(mtlTexture: texture)
renderer.layer = view.layer
renderer.bounds = view.frame
let outputURL: URL = try! FileManager.default.url(for: .downloadsDirectory, in: .userDomainMask, appropriateFor: nil, create: false).appendingPathComponent("Off-screen Render")
try? FileManager.default.removeItem(at: outputURL)
try! FileManager.default.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
var frameNumber: Int = 0
func render() {
Swift.print("Rendering frame #\(frameNumber)…")
renderer.beginFrame(atTime: CACurrentMediaTime(), timeStamp: nil)
renderer.addUpdate(renderer.bounds)
renderer.render()
renderer.endFrame()
let ciImage: CIImage = CIImage(mtlTexture: texture)!
let cgImage: CGImage = context.createCGImage(ciImage, from: ciImage.extent)!
let url: URL = outputURL.appendingPathComponent("frame-\(frameNumber).png")
let destination: CGImageDestination = CGImageDestinationCreateWithURL(url as CFURL, kUTTypePNG, 1, nil)!
CGImageDestinationAddImage(destination, cgImage, nil)
guard CGImageDestinationFinalize(destination) else { fatalError() }
frameNumber += 1
}
var timer: Timer?
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.25
view.animator().frame.origin = CGPoint(x: 550, y: 350)
}, completionHandler: {
timer?.invalidate()
render()
Swift.print("Finished off-screen rendering of \(frameNumber) frames in \(outputURL.path)…")
})
// Make the first render immediately after the animation start and after it completes. For the purpose
// of this demo timer is used instead of display link.
render()
timer = Timer.scheduledTimer(withTimeInterval: 1 / 30, repeats: true, block: { _ in render() })
以上代码的问题如下面的附件所示:
纹理不会被清理,每个下一帧都绘制在前一个渲染的顶部。我知道我可以使用 replace(region:…)
,但怀疑它与具有清晰颜色描述的渲染通道相比效率不高。这是真的?渲染通道可以与 CARenderer
一起使用吗?
第一帧(在实际项目中是两三帧)经常是空的。我怀疑这与 CARenderer
渲染中或 CGImage
使用 Core Image 构建期间的某些异步行为有关。如何避免这种情况?纹理上是否有某种等待渲染完成的回调?
在与 Apple 开发者技术支持交谈后发现:
Core Image defers the rendering until the client requests the access to the frame buffer, i.e. CVPixelBufferLockBaseAddress
.
所以,解决方案就是在调用 CIContext.render
之后简单地执行 CVPixelBufferLockBaseAddress
,如下所示:
for frameNumber in 0 ..< frameCount {
var pixelBuffer: CVPixelBuffer?
guard let pixelBufferPool: CVPixelBufferPool = pixelBufferAdaptor.pixelBufferPool else { preconditionFailure() }
precondition(CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &pixelBuffer) == kCVReturnSuccess)
let ciImage = CIImage(cgImage: frameImage)
context.render(ciImage, to: pixelBuffer!)
precondition(CVPixelBufferLockBaseAddress(pixelBuffer!, []) == kCVReturnSuccess)
defer { precondition(CVPixelBufferUnlockBaseAddress(pixelBuffer!, []) == kCVReturnSuccess) }
let bytes = UnsafeBufferPointer(start: CVPixelBufferGetBaseAddress(pixelBuffer!)!.assumingMemoryBound(to: UInt8.self), count: CVPixelBufferGetDataSize(pixelBuffer!))
precondition(bytes.contains(where: { [=10=] != 0 }))
while !input.isReadyForMoreMediaData { Thread.sleep(forTimeInterval: 10 / 1000) }
precondition(pixelBufferAdaptor.append(pixelBuffer!, withPresentationTime: CMTime(seconds: Double(frameNumber) * frameRate, preferredTimescale: 600)))
}
P.S。这与 问题的答案相同 - 您可能想查看它以更深入地了解在使用 AVFoundation 时此问题可能发生在何处以及如何发生。虽然问题不一样,但是解决方法是完全一样的。
我认为您可以使用 AVVideoCompositionCoreAnimationTool
来渲染带有动画的视图。
我想将动画 NSView
(或仅底层 CALayer
)渲染成一系列图像,而不在屏幕上显示视图 完全.我想出了如何使用 CARenderer
和 MTLTexture
来做到这一点,但是下面的方法存在一些问题。
这在操场上运行并将输出存储到您下载的 Off-screen Render
文件夹中:
import AppKit
import Metal
import QuartzCore
import PlaygroundSupport
let view = NSView(frame: CGRect(x: 0, y: 0, width: 600, height: 400))
let circle = NSView(frame: CGRect(x: 0, y: 0, width: 50, height: 50))
circle.wantsLayer = true
circle.layer?.backgroundColor = NSColor.red.cgColor
circle.layer?.cornerRadius = 25
view.wantsLayer = true
view.addSubview(circle)
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm, width: 600, height: 400, mipmapped: false)
textureDescriptor.usage = [MTLTextureUsage.shaderRead, .shaderWrite, .renderTarget]
let device = MTLCreateSystemDefaultDevice()!
let texture: MTLTexture = device.makeTexture(descriptor: textureDescriptor)!
let context = CIContext(mtlDevice: device)
let renderer = CARenderer(mtlTexture: texture)
renderer.layer = view.layer
renderer.bounds = view.frame
let outputURL: URL = try! FileManager.default.url(for: .downloadsDirectory, in: .userDomainMask, appropriateFor: nil, create: false).appendingPathComponent("Off-screen Render")
try? FileManager.default.removeItem(at: outputURL)
try! FileManager.default.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
var frameNumber: Int = 0
func render() {
Swift.print("Rendering frame #\(frameNumber)…")
renderer.beginFrame(atTime: CACurrentMediaTime(), timeStamp: nil)
renderer.addUpdate(renderer.bounds)
renderer.render()
renderer.endFrame()
let ciImage: CIImage = CIImage(mtlTexture: texture)!
let cgImage: CGImage = context.createCGImage(ciImage, from: ciImage.extent)!
let url: URL = outputURL.appendingPathComponent("frame-\(frameNumber).png")
let destination: CGImageDestination = CGImageDestinationCreateWithURL(url as CFURL, kUTTypePNG, 1, nil)!
CGImageDestinationAddImage(destination, cgImage, nil)
guard CGImageDestinationFinalize(destination) else { fatalError() }
frameNumber += 1
}
var timer: Timer?
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.25
view.animator().frame.origin = CGPoint(x: 550, y: 350)
}, completionHandler: {
timer?.invalidate()
render()
Swift.print("Finished off-screen rendering of \(frameNumber) frames in \(outputURL.path)…")
})
// Make the first render immediately after the animation start and after it completes. For the purpose
// of this demo timer is used instead of display link.
render()
timer = Timer.scheduledTimer(withTimeInterval: 1 / 30, repeats: true, block: { _ in render() })
以上代码的问题如下面的附件所示:
纹理不会被清理,每个下一帧都绘制在前一个渲染的顶部。我知道我可以使用
replace(region:…)
,但怀疑它与具有清晰颜色描述的渲染通道相比效率不高。这是真的?渲染通道可以与CARenderer
一起使用吗?第一帧(在实际项目中是两三帧)经常是空的。我怀疑这与
CARenderer
渲染中或CGImage
使用 Core Image 构建期间的某些异步行为有关。如何避免这种情况?纹理上是否有某种等待渲染完成的回调?
在与 Apple 开发者技术支持交谈后发现:
Core Image defers the rendering until the client requests the access to the frame buffer, i.e.
CVPixelBufferLockBaseAddress
.
所以,解决方案就是在调用 CIContext.render
之后简单地执行 CVPixelBufferLockBaseAddress
,如下所示:
for frameNumber in 0 ..< frameCount {
var pixelBuffer: CVPixelBuffer?
guard let pixelBufferPool: CVPixelBufferPool = pixelBufferAdaptor.pixelBufferPool else { preconditionFailure() }
precondition(CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &pixelBuffer) == kCVReturnSuccess)
let ciImage = CIImage(cgImage: frameImage)
context.render(ciImage, to: pixelBuffer!)
precondition(CVPixelBufferLockBaseAddress(pixelBuffer!, []) == kCVReturnSuccess)
defer { precondition(CVPixelBufferUnlockBaseAddress(pixelBuffer!, []) == kCVReturnSuccess) }
let bytes = UnsafeBufferPointer(start: CVPixelBufferGetBaseAddress(pixelBuffer!)!.assumingMemoryBound(to: UInt8.self), count: CVPixelBufferGetDataSize(pixelBuffer!))
precondition(bytes.contains(where: { [=10=] != 0 }))
while !input.isReadyForMoreMediaData { Thread.sleep(forTimeInterval: 10 / 1000) }
precondition(pixelBufferAdaptor.append(pixelBuffer!, withPresentationTime: CMTime(seconds: Double(frameNumber) * frameRate, preferredTimescale: 600)))
}
P.S。这与
我认为您可以使用 AVVideoCompositionCoreAnimationTool
来渲染带有动画的视图。