iOS Swift VideoToolBox 解压缩帧 EXC_BAD_ADDRESS 错误
iOS Swift VideoToolBox decompress frame EXC_BAD_ADDRESS error
我试图通过互联网将 CMSampleBuffer 转换为数据并创建其副本。
我已经压缩了相机捕获的 CMSampleBuffer通过 VTCompressionSession with h264.
现在我构建了 CMSampleBuffer,创建了 VTDecompressionSession 但是在 VTDecompressionSessionDecodeFrame() 时出现内存错误我不知道如何修复.
我在互联网上看到很多 h264 示例,但它们都使用旧的 Objective-C 语言。
我正在使用 Xcode 11.3 Swift 5.1 并已部署到 iOS 设备 运行 iOS 12.1.
var sps,pps:UnsafePointer<UInt8>?
var spsSize=0,ppsSize:Int=0
var parameterSetCount=0
var nalUnitHeaderLength:Int32=0
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(CMSampleBufferGetFormatDescription(encodedSamples.last!)!, parameterSetIndex: 0, parameterSetPointerOut: &sps, parameterSetSizeOut: &spsSize, parameterSetCountOut: ¶meterSetCount, nalUnitHeaderLengthOut: &nalUnitHeaderLength)
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(CMSampleBufferGetFormatDescription(encodedSamples.last!)!, parameterSetIndex: 1, parameterSetPointerOut: &pps, parameterSetSizeOut: &ppsSize, parameterSetCountOut: ¶meterSetCount, nalUnitHeaderLengthOut: &nalUnitHeaderLength)
let dataBuffer:CMBlockBuffer={
var buffer:CMBlockBuffer?
let bufferData:Data={
let blockBuffer=CMSampleBufferGetDataBuffer(encodedSamples.last!)!
var totalLength:Int=0
var data:UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &totalLength, dataPointerOut: &data)
return Data(bytes: data!,count:totalLength)
}()
var bbd=Data()
bbd.append(bufferData)
let status=CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault,memoryBlock: &bbd, blockLength: bbd.count,blockAllocator: kCFAllocatorDefault,customBlockSource: nil, offsetToData: 0, dataLength: bbd.count,flags: 0, blockBufferOut: &buffer)
if status != kCMBlockBufferNoErr{print(status)}
return buffer!
}()
let formatDes:CMFormatDescription={
let dataParamArray = [sps!,pps!]
let parameterSetPointers = UnsafePointer<UnsafePointer<UInt8>>(dataParamArray)
var formatDescription:CMFormatDescription?
CMVideoFormatDescriptionCreateFromH264ParameterSets(allocator: kCFAllocatorDefault, parameterSetCount: parameterSetCount, parameterSetPointers: parameterSetPointers, parameterSetSizes: UnsafePointer<Int>([spsSize,ppsSize]), nalUnitHeaderLength: nalUnitHeaderLength, formatDescriptionOut: &formatDescription)
return formatDescription!
}()
var timingInfo:CMSampleTimingInfo={
var t=CMSampleTimingInfo()
CMSampleBufferGetSampleTimingInfoArray(encodedSamples.last!, entryCount: 1, arrayToFill: &t, entriesNeededOut: nil)
return t
}()
var sampleSize=CMBlockBufferGetDataLength(dataBuffer)
var sampleBuffer:CMSampleBuffer!
let status=CMSampleBufferCreate(allocator: kCFAllocatorDefault, dataBuffer: dataBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: formatDes, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer)
var attachmentsArray=CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true) as! Array<Dictionary<String, Any>>
attachmentsArray[0]=[kCMSampleAttachmentKey_DependsOnOthers as String:false]
if status != noErr{print(status)}
if #available(iOS 9, *){
var session:VTDecompressionSession?
VTDecompressionSessionCreate(allocator: kCFAllocatorDefault, formatDescription: formatDes, decoderSpecification: nil, imageBufferAttributes: [kCVPixelBufferOpenGLESCompatibilityKey:true,kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA] as CFDictionary, outputCallback: nil, decompressionSessionOut: &session)
print(VTDecompressionSessionCanAcceptFormatDescription(session!, formatDescription: formatDes))
var info=VTDecodeInfoFlags()
let flags = VTDecodeFrameFlags._EnableAsynchronousDecompression
let status=VTDecompressionSessionDecodeFrame(session!, sampleBuffer: sampleBuffer, flags: flags, infoFlagsOut: &info){status,infoFlags,imageBuffer,presentationTimeStamp,presentationDuration in
print(imageBuffer!)
}//EXC_BAD_ADDRESS HERE!!
print(status,info)
VTDecompressionSessionInvalidate(session!)
}
我找到问题了。 CMBlockBufferCreateWithMemoryBlock() 的参数 memoryBlock 不能使用 '&Data' 作为指针。
let dataBuffer:CMBlockBuffer={
var buffer:CMBlockBuffer?
let bufferData:Data={
let blockBuffer=CMSampleBufferGetDataBuffer(encodedSamples.last!)!
var totalLength:Int=0
var data:UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &totalLength, dataPointerOut: &data)
return Data(bytes: data!,count:totalLength)
}()
var bbd=Data()
bbd.append(bufferData)
let status=CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault,memoryBlock: UnsafeMutableRawPointer(mutating: (bbd as NSData).bytes), blockLength: bbd.count,blockAllocator: kCFAllocatorDefault,customBlockSource: nil, offsetToData: 0, dataLength: bbd.count,flags: 0, blockBufferOut: &buffer)
if status != kCMBlockBufferNoErr{print(status)}
return buffer!
}()
这是 Swift 语言中的一个非常罕见的问题。在我尝试打印我创建的 CMBlockBuffer 中的数据后,我终于通过使用 UnsafeMutableRawPointer((data as NSData).bytes).
解决了
我试图通过互联网将 CMSampleBuffer 转换为数据并创建其副本。
我已经压缩了相机捕获的 CMSampleBuffer通过 VTCompressionSession with h264.
现在我构建了 CMSampleBuffer,创建了 VTDecompressionSession 但是在 VTDecompressionSessionDecodeFrame() 时出现内存错误我不知道如何修复.
我在互联网上看到很多 h264 示例,但它们都使用旧的 Objective-C 语言。
我正在使用 Xcode 11.3 Swift 5.1 并已部署到 iOS 设备 运行 iOS 12.1.
var sps,pps:UnsafePointer<UInt8>?
var spsSize=0,ppsSize:Int=0
var parameterSetCount=0
var nalUnitHeaderLength:Int32=0
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(CMSampleBufferGetFormatDescription(encodedSamples.last!)!, parameterSetIndex: 0, parameterSetPointerOut: &sps, parameterSetSizeOut: &spsSize, parameterSetCountOut: ¶meterSetCount, nalUnitHeaderLengthOut: &nalUnitHeaderLength)
CMVideoFormatDescriptionGetH264ParameterSetAtIndex(CMSampleBufferGetFormatDescription(encodedSamples.last!)!, parameterSetIndex: 1, parameterSetPointerOut: &pps, parameterSetSizeOut: &ppsSize, parameterSetCountOut: ¶meterSetCount, nalUnitHeaderLengthOut: &nalUnitHeaderLength)
let dataBuffer:CMBlockBuffer={
var buffer:CMBlockBuffer?
let bufferData:Data={
let blockBuffer=CMSampleBufferGetDataBuffer(encodedSamples.last!)!
var totalLength:Int=0
var data:UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &totalLength, dataPointerOut: &data)
return Data(bytes: data!,count:totalLength)
}()
var bbd=Data()
bbd.append(bufferData)
let status=CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault,memoryBlock: &bbd, blockLength: bbd.count,blockAllocator: kCFAllocatorDefault,customBlockSource: nil, offsetToData: 0, dataLength: bbd.count,flags: 0, blockBufferOut: &buffer)
if status != kCMBlockBufferNoErr{print(status)}
return buffer!
}()
let formatDes:CMFormatDescription={
let dataParamArray = [sps!,pps!]
let parameterSetPointers = UnsafePointer<UnsafePointer<UInt8>>(dataParamArray)
var formatDescription:CMFormatDescription?
CMVideoFormatDescriptionCreateFromH264ParameterSets(allocator: kCFAllocatorDefault, parameterSetCount: parameterSetCount, parameterSetPointers: parameterSetPointers, parameterSetSizes: UnsafePointer<Int>([spsSize,ppsSize]), nalUnitHeaderLength: nalUnitHeaderLength, formatDescriptionOut: &formatDescription)
return formatDescription!
}()
var timingInfo:CMSampleTimingInfo={
var t=CMSampleTimingInfo()
CMSampleBufferGetSampleTimingInfoArray(encodedSamples.last!, entryCount: 1, arrayToFill: &t, entriesNeededOut: nil)
return t
}()
var sampleSize=CMBlockBufferGetDataLength(dataBuffer)
var sampleBuffer:CMSampleBuffer!
let status=CMSampleBufferCreate(allocator: kCFAllocatorDefault, dataBuffer: dataBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: formatDes, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer)
var attachmentsArray=CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true) as! Array<Dictionary<String, Any>>
attachmentsArray[0]=[kCMSampleAttachmentKey_DependsOnOthers as String:false]
if status != noErr{print(status)}
if #available(iOS 9, *){
var session:VTDecompressionSession?
VTDecompressionSessionCreate(allocator: kCFAllocatorDefault, formatDescription: formatDes, decoderSpecification: nil, imageBufferAttributes: [kCVPixelBufferOpenGLESCompatibilityKey:true,kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA] as CFDictionary, outputCallback: nil, decompressionSessionOut: &session)
print(VTDecompressionSessionCanAcceptFormatDescription(session!, formatDescription: formatDes))
var info=VTDecodeInfoFlags()
let flags = VTDecodeFrameFlags._EnableAsynchronousDecompression
let status=VTDecompressionSessionDecodeFrame(session!, sampleBuffer: sampleBuffer, flags: flags, infoFlagsOut: &info){status,infoFlags,imageBuffer,presentationTimeStamp,presentationDuration in
print(imageBuffer!)
}//EXC_BAD_ADDRESS HERE!!
print(status,info)
VTDecompressionSessionInvalidate(session!)
}
我找到问题了。 CMBlockBufferCreateWithMemoryBlock() 的参数 memoryBlock 不能使用 '&Data' 作为指针。
let dataBuffer:CMBlockBuffer={
var buffer:CMBlockBuffer?
let bufferData:Data={
let blockBuffer=CMSampleBufferGetDataBuffer(encodedSamples.last!)!
var totalLength:Int=0
var data:UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &totalLength, dataPointerOut: &data)
return Data(bytes: data!,count:totalLength)
}()
var bbd=Data()
bbd.append(bufferData)
let status=CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault,memoryBlock: UnsafeMutableRawPointer(mutating: (bbd as NSData).bytes), blockLength: bbd.count,blockAllocator: kCFAllocatorDefault,customBlockSource: nil, offsetToData: 0, dataLength: bbd.count,flags: 0, blockBufferOut: &buffer)
if status != kCMBlockBufferNoErr{print(status)}
return buffer!
}()
这是 Swift 语言中的一个非常罕见的问题。在我尝试打印我创建的 CMBlockBuffer 中的数据后,我终于通过使用 UnsafeMutableRawPointer((data as NSData).bytes).
解决了