使用 Metal 导出视频时的像素化伪像
pixelation artifact when exporting video using Metal
我有一个非常奇怪的错误,只发生在随机设备上。
我正在使用 avassetwriter 导出电影并使用 Metal drawing/shader,但我在动画 mp4 电影中得到了一些彩色像素化伪像。
视频:
https://drive.google.com/file/d/1g6KyL18JqclOW1kVQkif3o3zaqylYaLk/view?usp=sharing
在 image/frame 中您可以看到彩色伪像像素,这通常发生在线条移动时(动画线条)
有什么想法吗?
谢谢
初始化
init?(outputURL url: URL, size: CGSize) {
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: .mp4)
} catch let error{
print(error)
return nil
}
let compressionProperties = NSDictionary(dictionary: [
AVVideoAverageBitRateKey:ClipSettings.bitrate
])
let outputSettings: [String: Any]
if #available(iOS 11.0, *),AVAssetExportSession.allExportPresets().contains(AVAssetExportPresetHEVCHighestQuality) {
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.hevc,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
} else {
// Fallback on earlier versions
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
}
assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = false
let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String : size.width,
kCVPixelBufferHeightKey as String : size.height
]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributes)
if assetWriter.canAdd(assetWriterVideoInput){
assetWriter.add(assetWriterVideoInput)
}else{
print("add failed")
}
这是金属质感写框的方法
func writeFrame(forTexture texture: MTLTexture, time: TimeInterval, podSticker: PodStickerView) {
if !isRecording {
return
}
let fps: Int32 = 60
let intervalDuration = CFTimeInterval(1.0 / Double(fps))
let timescale: Float = 600
let kTimescale: Int32 = Int32(timescale)
let frameDuration = CMTimeMake(
value: Int64( floor(timescale / Float(fps)) ),
timescale: kTimescale
)
var waitTime = 300.0 //fixes dropped frames
while !assetWriterVideoInput.isReadyForMoreMediaData {
let waitIntervale: TimeInterval = 0.001 * waitTime
let maxDate = Date(timeIntervalSinceNow: waitIntervale)
RunLoop.current.run(until: maxDate)
waitTime += 200.0 // add 200ms every time
}
guard let pixelBufferPool = assetWriterPixelBufferInput.pixelBufferPool else {
print("Pixel buffer asset writer input did not have a pixel buffer pool available; cannot retrieve frame")
return
}
var maybePixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &maybePixelBuffer)
if status != kCVReturnSuccess {
print("Could not get pixel buffer from asset writer input; dropping frame...")
return
}
guard let pixelBuffer = maybePixelBuffer else { return }
CVPixelBufferLockBaseAddress(pixelBuffer, [])
let pixelBufferBytes = CVPixelBufferGetBaseAddress(pixelBuffer)!
// Use the bytes per row value from the pixel buffer since its stride may be rounded up to be 16-byte aligned
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let region = MTLRegionMake2D(0, 0, texture.width, texture.height)
texture.getBytes(pixelBufferBytes, bytesPerRow: bytesPerRow, from: region, mipmapLevel: 0)
let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameNumber))
Engine.renderTime = presentationTime.seconds
//write video
self.assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: presentationTime)
CVPixelBufferUnlockBaseAddress(pixelBuffer,[])
// }
}
我不得不将 MTLTexture 的用法从 .shaderRead 更改为 .shaderWrite。
根据苹果 "In iOS devices with GPU family 5, Metal doesn’t apply lossless compression to the given texture if you set this option."
我有一个非常奇怪的错误,只发生在随机设备上。 我正在使用 avassetwriter 导出电影并使用 Metal drawing/shader,但我在动画 mp4 电影中得到了一些彩色像素化伪像。
视频: https://drive.google.com/file/d/1g6KyL18JqclOW1kVQkif3o3zaqylYaLk/view?usp=sharing
在 image/frame 中您可以看到彩色伪像像素,这通常发生在线条移动时(动画线条)
有什么想法吗? 谢谢
初始化
init?(outputURL url: URL, size: CGSize) {
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: .mp4)
} catch let error{
print(error)
return nil
}
let compressionProperties = NSDictionary(dictionary: [
AVVideoAverageBitRateKey:ClipSettings.bitrate
])
let outputSettings: [String: Any]
if #available(iOS 11.0, *),AVAssetExportSession.allExportPresets().contains(AVAssetExportPresetHEVCHighestQuality) {
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.hevc,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
} else {
// Fallback on earlier versions
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
}
assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = false
let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String : size.width,
kCVPixelBufferHeightKey as String : size.height
]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributes)
if assetWriter.canAdd(assetWriterVideoInput){
assetWriter.add(assetWriterVideoInput)
}else{
print("add failed")
}
这是金属质感写框的方法
func writeFrame(forTexture texture: MTLTexture, time: TimeInterval, podSticker: PodStickerView) {
if !isRecording {
return
}
let fps: Int32 = 60
let intervalDuration = CFTimeInterval(1.0 / Double(fps))
let timescale: Float = 600
let kTimescale: Int32 = Int32(timescale)
let frameDuration = CMTimeMake(
value: Int64( floor(timescale / Float(fps)) ),
timescale: kTimescale
)
var waitTime = 300.0 //fixes dropped frames
while !assetWriterVideoInput.isReadyForMoreMediaData {
let waitIntervale: TimeInterval = 0.001 * waitTime
let maxDate = Date(timeIntervalSinceNow: waitIntervale)
RunLoop.current.run(until: maxDate)
waitTime += 200.0 // add 200ms every time
}
guard let pixelBufferPool = assetWriterPixelBufferInput.pixelBufferPool else {
print("Pixel buffer asset writer input did not have a pixel buffer pool available; cannot retrieve frame")
return
}
var maybePixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &maybePixelBuffer)
if status != kCVReturnSuccess {
print("Could not get pixel buffer from asset writer input; dropping frame...")
return
}
guard let pixelBuffer = maybePixelBuffer else { return }
CVPixelBufferLockBaseAddress(pixelBuffer, [])
let pixelBufferBytes = CVPixelBufferGetBaseAddress(pixelBuffer)!
// Use the bytes per row value from the pixel buffer since its stride may be rounded up to be 16-byte aligned
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let region = MTLRegionMake2D(0, 0, texture.width, texture.height)
texture.getBytes(pixelBufferBytes, bytesPerRow: bytesPerRow, from: region, mipmapLevel: 0)
let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameNumber))
Engine.renderTime = presentationTime.seconds
//write video
self.assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: presentationTime)
CVPixelBufferUnlockBaseAddress(pixelBuffer,[])
// } }
我不得不将 MTLTexture 的用法从 .shaderRead 更改为 .shaderWrite。
根据苹果 "In iOS devices with GPU family 5, Metal doesn’t apply lossless compression to the given texture if you set this option."