在 Swift 3 使用 AVCaptureVideoDataOutput 录制视频
Recording Video Using AVCaptureVideoDataOutput at Swift 3
我们花了相当多的时间来解决这个问题但没有结果,我决定在这里提问。
我们正在使用AVCaptureVideoDataOutput
获取摄像机实时视频的像素数据,并在captureOutput
函数中使用。但我们也想使用该数据录制视频。此外,我们想知道这个视频录制是否会像用 AVCaptureMovieFileOutput
.
制作的录制视频一样压缩。
我想通知您,使用 AVCaptureMovieFileOutput
我们录制没有问题。但是 AVCaptureMovieFileOutput
和 AVCaptureVideoDataOutput
不能同时工作。
您可以在下面找到我们的 captureOutput
功能;
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
videoWidth = CVPixelBufferGetWidth(imageBuffer)
videoHeight = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
var bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue)
let context = CGContext(data: baseAddress, width: videoWidth, height: videoHeight, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue)
let imageRef = context!.makeImage()
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let data = imageRef!.dataProvider!.data as! NSData
let pixels = data.bytes.assumingMemoryBound(to: UInt8.self)
/* Because what we are doing with pixel data irrelevant to the question we omitted the rest of the code to make it simple */
}
经过一段时间的生活后,我发现了如何在获取像素信息时录制视频以对实时视频进行一些基本分析。
首先,我设置 AVAssetWriter 并在给出实际记录顺序之前调用该函数。
var sampleBufferGlobal : CMSampleBuffer?
let writerFileName = "tempVideoAsset.mov"
var presentationTime : CMTime!
var outputSettings = [String: Any]()
var videoWriterInput: AVAssetWriterInput!
var assetWriter: AVAssetWriter!
func setupAssetWriter () {
eraseFile(fileToErase: writerFileName)
presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBufferGlobal!)
outputSettings = [AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : NSNumber(value: Float(videoWidth)),
AVVideoHeightKey : NSNumber(value: Float(videoHeight))] as [String : Any]
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
assetWriter = try? AVAssetWriter(outputURL: createFileURL(writerFileName), fileType: AVFileTypeQuickTimeMovie)
assetWriter.add(videoWriterInput)
}
我写了另外一个函数来录音,在同一个函数中将sampleBuffer复制到sampleBufferGlobal,sampleBufferGlobal = sampleBuffer后,在captureOutput函数中调用了那个函数来录音。
func writeVideoFromData() {
if assetWriter?.status == AVAssetWriterStatus.unknown {
if (( assetWriter?.startWriting ) != nil) {
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: presentationTime)
}
}
if assetWriter?.status == AVAssetWriterStatus.writing {
if (videoWriterInput.isReadyForMoreMediaData == true) {
if videoWriterInput.append(sampleBufferGlobal!) == false {
print(" we have a problem writing video")
}
}
}
}
然后为了停止录制我使用了以下函数。
func stopAssetWriter() {
videoWriterInput.markAsFinished()
assetWriter?.finishWriting(completionHandler: {
if (self.assetWriter?.status == AVAssetWriterStatus.failed) {
print("creating movie file is failed ")
} else {
print(" creating movie file was a success ")
DispatchQueue.main.async(execute: { () -> Void in
})
}
})
}
我们花了相当多的时间来解决这个问题但没有结果,我决定在这里提问。
我们正在使用AVCaptureVideoDataOutput
获取摄像机实时视频的像素数据,并在captureOutput
函数中使用。但我们也想使用该数据录制视频。此外,我们想知道这个视频录制是否会像用 AVCaptureMovieFileOutput
.
我想通知您,使用 AVCaptureMovieFileOutput
我们录制没有问题。但是 AVCaptureMovieFileOutput
和 AVCaptureVideoDataOutput
不能同时工作。
您可以在下面找到我们的 captureOutput
功能;
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
videoWidth = CVPixelBufferGetWidth(imageBuffer)
videoHeight = CVPixelBufferGetHeight(imageBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
var bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue)
let context = CGContext(data: baseAddress, width: videoWidth, height: videoHeight, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue)
let imageRef = context!.makeImage()
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let data = imageRef!.dataProvider!.data as! NSData
let pixels = data.bytes.assumingMemoryBound(to: UInt8.self)
/* Because what we are doing with pixel data irrelevant to the question we omitted the rest of the code to make it simple */
}
经过一段时间的生活后,我发现了如何在获取像素信息时录制视频以对实时视频进行一些基本分析。
首先,我设置 AVAssetWriter 并在给出实际记录顺序之前调用该函数。
var sampleBufferGlobal : CMSampleBuffer?
let writerFileName = "tempVideoAsset.mov"
var presentationTime : CMTime!
var outputSettings = [String: Any]()
var videoWriterInput: AVAssetWriterInput!
var assetWriter: AVAssetWriter!
func setupAssetWriter () {
eraseFile(fileToErase: writerFileName)
presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBufferGlobal!)
outputSettings = [AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : NSNumber(value: Float(videoWidth)),
AVVideoHeightKey : NSNumber(value: Float(videoHeight))] as [String : Any]
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
assetWriter = try? AVAssetWriter(outputURL: createFileURL(writerFileName), fileType: AVFileTypeQuickTimeMovie)
assetWriter.add(videoWriterInput)
}
我写了另外一个函数来录音,在同一个函数中将sampleBuffer复制到sampleBufferGlobal,sampleBufferGlobal = sampleBuffer后,在captureOutput函数中调用了那个函数来录音。
func writeVideoFromData() {
if assetWriter?.status == AVAssetWriterStatus.unknown {
if (( assetWriter?.startWriting ) != nil) {
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: presentationTime)
}
}
if assetWriter?.status == AVAssetWriterStatus.writing {
if (videoWriterInput.isReadyForMoreMediaData == true) {
if videoWriterInput.append(sampleBufferGlobal!) == false {
print(" we have a problem writing video")
}
}
}
}
然后为了停止录制我使用了以下函数。
func stopAssetWriter() {
videoWriterInput.markAsFinished()
assetWriter?.finishWriting(completionHandler: {
if (self.assetWriter?.status == AVAssetWriterStatus.failed) {
print("creating movie file is failed ")
} else {
print(" creating movie file was a success ")
DispatchQueue.main.async(execute: { () -> Void in
})
}
})
}