将 UIImage 转换为 CIImage 失败
Converting UIImage to CIImage fails
我正在制作视频录制 iOS 应用。我需要为录制的视频添加时间戳,因此我将 CMSampleBuffer 转换为 UIImage 并在其中添加时间戳并将其转换回 CMSampleBuffer。在将 UIImage 转换为 CMSample 缓冲区的过程中,我需要将 UIImage 转换为 CIImage,但此过程失败。如何正确地将 UIImage 转换为 CIImage?
func appendVideo(from sampleBuffer: CMSampleBuffer) {
// - Guards
guard let videoInput = assetWriter?.inputs.first(where: { [=10=].mediaType == .video }) else {
print("video input not found")
return
}
guard videoInput.isReadyForMoreMediaData else {
print("video input not ready for more media data")
return
}
// - Timestamp
let sample: Sample = Sample(sampleBuffer: sampleBuffer)
guard let ciImage = generateCIImage(from: sampleBuffer) else {
print("CIImage creation from sampleBuffer failed")
return
}
let uiImage = UIImage(ciImage: ciImage)
guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
fatalError("should not reach here")
}
////////////////////////////////////
// THIS PROCESS FAILS
////////////////////////////////////
guard let timestampAddedCiImage = timestampAddedImage.ciImage else {
print("UIImage to CIImage conversion faield")
return
}
guard let timestampAddedCvpixelBuffer = generateCVPixelBuffer(from: timestampAddedCiImage) else {
print("CVPixelBuffer creation from CIImage failed")
return
}
guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
print("CMSampleBuffer creation from CVPixelBufer failed")
return
}
DispatchQueue.main.sync { [weak self] in
self?.compositeImageView.image = timestampAddedImage
}
print("append video")
videoInput.append(timestampAddedSampleBuffer)
}
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
// Timestamp to add to image
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
let newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext()
return newImage
}
// MARK: - CMSampleBuffer <-> Image Conversion
// ref: https://fromatom.hatenablog.com/entry/2019/10/28/172628
// CMSampleFubber > CIImage
func generateCIImage(from sampleBuffer: CMSampleBuffer) -> CIImage? {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return nil
}
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
return ciImage
}
func generateCVPixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer: CVPixelBuffer!
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(ciImage.extent.size.width), Int(ciImage.extent.size.height), kCVPixelFormatType_32BGRA, attrs, &pixelBuffer)
guard status == kCVReturnSuccess else {
print("CVPixelBufferCreateに失敗")
return nil
}
let ciContext = CIContext()
ciContext.render(ciImage, to: pixelBuffer, bounds: ciImage.extent, colorSpace: CGColorSpaceCreateDeviceRGB())
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
var sampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo = timingInfo
var videoInfo: CMVideoFormatDescription!
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: cvPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo,
sampleTiming: &timimgInfo,
sampleBufferOut: &sampleBuffer)
return sampleBuffer
}
private final class Sample {
let timingInfo: CMSampleTimingInfo
init(sampleBuffer: CMSampleBuffer) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let duration = CMSampleBufferGetDuration(sampleBuffer)
let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
}
}
请像这样更新您的 addTimestamp 函数,然后重试:
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
// Timestamp to add to image
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
guard let newImage = UIGraphicsGetImageFromCurrentImageContext(),
let cgImage = newImage.cgImage else { return nil }
UIGraphicsEndImageContext()
let ciImage = CIImage(cgImage: cgImage)
return UIImage(ciImage: ciImage)
}
我正在制作视频录制 iOS 应用。我需要为录制的视频添加时间戳,因此我将 CMSampleBuffer 转换为 UIImage 并在其中添加时间戳并将其转换回 CMSampleBuffer。在将 UIImage 转换为 CMSample 缓冲区的过程中,我需要将 UIImage 转换为 CIImage,但此过程失败。如何正确地将 UIImage 转换为 CIImage?
func appendVideo(from sampleBuffer: CMSampleBuffer) {
// - Guards
guard let videoInput = assetWriter?.inputs.first(where: { [=10=].mediaType == .video }) else {
print("video input not found")
return
}
guard videoInput.isReadyForMoreMediaData else {
print("video input not ready for more media data")
return
}
// - Timestamp
let sample: Sample = Sample(sampleBuffer: sampleBuffer)
guard let ciImage = generateCIImage(from: sampleBuffer) else {
print("CIImage creation from sampleBuffer failed")
return
}
let uiImage = UIImage(ciImage: ciImage)
guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
fatalError("should not reach here")
}
////////////////////////////////////
// THIS PROCESS FAILS
////////////////////////////////////
guard let timestampAddedCiImage = timestampAddedImage.ciImage else {
print("UIImage to CIImage conversion faield")
return
}
guard let timestampAddedCvpixelBuffer = generateCVPixelBuffer(from: timestampAddedCiImage) else {
print("CVPixelBuffer creation from CIImage failed")
return
}
guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
print("CMSampleBuffer creation from CVPixelBufer failed")
return
}
DispatchQueue.main.sync { [weak self] in
self?.compositeImageView.image = timestampAddedImage
}
print("append video")
videoInput.append(timestampAddedSampleBuffer)
}
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
// Timestamp to add to image
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
let newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext()
return newImage
}
// MARK: - CMSampleBuffer <-> Image Conversion
// ref: https://fromatom.hatenablog.com/entry/2019/10/28/172628
// CMSampleFubber > CIImage
func generateCIImage(from sampleBuffer: CMSampleBuffer) -> CIImage? {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return nil
}
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
return ciImage
}
func generateCVPixelBuffer(from ciImage: CIImage) -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer: CVPixelBuffer!
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(ciImage.extent.size.width), Int(ciImage.extent.size.height), kCVPixelFormatType_32BGRA, attrs, &pixelBuffer)
guard status == kCVReturnSuccess else {
print("CVPixelBufferCreateに失敗")
return nil
}
let ciContext = CIContext()
ciContext.render(ciImage, to: pixelBuffer, bounds: ciImage.extent, colorSpace: CGColorSpaceCreateDeviceRGB())
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
var sampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo = timingInfo
var videoInfo: CMVideoFormatDescription!
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: cvPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo,
sampleTiming: &timimgInfo,
sampleBufferOut: &sampleBuffer)
return sampleBuffer
}
private final class Sample {
let timingInfo: CMSampleTimingInfo
init(sampleBuffer: CMSampleBuffer) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let duration = CMSampleBufferGetDuration(sampleBuffer)
let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
}
}
请像这样更新您的 addTimestamp 函数,然后重试:
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
// Timestamp to add to image
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
guard let newImage = UIGraphicsGetImageFromCurrentImageContext(),
let cgImage = newImage.cgImage else { return nil }
UIGraphicsEndImageContext()
let ciImage = CIImage(cgImage: cgImage)
return UIImage(ciImage: ciImage)
}