如何从同一个 AVCaptureSession 捕获照片和视频?
How can I capture photo and video from the same AVCaptureSession?
我正在尝试制作类似 SnapChat 的应用程序。从同一个按钮,我愿意允许用户拍照(在内部修饰)和录制视频(长按)。
我正在为此使用 AVFoundation。棘手的部分是我不能让它在同一个 AVCaptureSession 中正常工作。我的意思是,两次捕获我只有 1 个预览层,如何根据用户与记录按钮的交互启动正确的层?有人已经用过类似的东西吗?
这是我的一段代码:
import UIKit
import AVFoundation
protocol RecordCameraDelegate {
func didSavedOutputFile(url: URL!, error: Error?)
func didSavedImage(image: UIImage?)
}
// MARK: - Camera
class RecordCamera : NSObject {
var videoLayer : AVCaptureVideoPreviewLayer!
var delegate : RecordCameraDelegate!
var capturedPhoto : UIImage?
fileprivate var captureSession = AVCaptureSession()
fileprivate var photoSession = AVCaptureSession()
fileprivate var movieOutput = AVCaptureMovieFileOutput()
fileprivate var cameraDevice : AVCaptureDevicePosition!
fileprivate let stillImageOutput = AVCaptureStillImageOutput()
// Devices
fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { [=10=].position == .front }.first
}()
fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { [=10=].position == .back }.first
}()
fileprivate lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
}()
fileprivate var tempFilePath: URL = {
let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: tempPath) {
do {
try FileManager.default.removeItem(atPath: tempPath)
} catch let error { print("Can't create File URL: \(String(describing: error))") }
}
return URL(string: tempPath)!
}()
// MARK: - Initialization
init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) {
super.init()
cameraDevice = cameraPosition
// Video
self.configureToRecord(view: view)
// Photo
self.configureToCapturePhoto()
}
func configureToRecord(view: UIView? = nil) {
captureSession.beginConfiguration()
defer {
// commit & stop session
captureSession.commitConfiguration()
if !captureSession.isRunning { captureSession.startRunning() }
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Start configuration
if !captureSession.isRunning {
// layer
if let validView = view {
videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer.frame = validView.bounds
validView.layer.addSublayer(videoLayer)
}
// add device inputs (front camera and mic)
if cameraDevice == .front {
captureSession.addInput(deviceInputFrom(device: frontCameraDevice))
} else {
captureSession.addInput(deviceInputFrom(device: backCameraDevice))
}
}
captureSession.addInput(deviceInputFrom(device: micDevice))
// Output
movieOutput.movieFragmentInterval = kCMTimeInvalid
// Remove previous output
if let existingOutput = captureSession.outputs.first as? AVCaptureOutput {
captureSession.removeOutput(existingOutput)
}
// Add Movie Output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
}
func configureToCapturePhoto() {
photoSession.beginConfiguration()
defer { photoSession.commitConfiguration() }
photoSession.sessionPreset = AVCaptureSessionPresetPhoto
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if #available(iOS 10.0, *) {
let cameraOutput = AVCapturePhotoOutput()
// Add Photo Output
if photoSession.canAddOutput(cameraOutput) {
photoSession.addOutput(cameraOutput)
}
}
else {
// Add Photo Output
if photoSession.canAddOutput(stillImageOutput) {
photoSession.addOutput(stillImageOutput)
}
}
}
func takePicture() {
if #available(iOS 10.0, *) {
let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput
// Capture Picture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 828,
kCVPixelBufferHeightKey as String: 828
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
else {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
//UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return }
self.capturedPhoto = UIImage(data: validData)
}
}
}
}
// MARK: - Record Methods
func startRecording() {
// Take picture
print("Camera started recording")
self.takePicture()
// Start recording
movieOutput.startRecording(
toOutputFileURL: tempFilePath,
recordingDelegate: self
)
}
func stopRecording() {
print("Camera stopped recording")
movieOutput.stopRecording()
}
// MARK: - Modes
func cameraMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
}
}
}
print("Record Camera --> Set VIDEO Mode")
}
func audioMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// Remove..
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice
|| deviceInput.device == frontCameraDevice {
captureSession.removeInput(deviceInput)
}
}
}
print("Record Camera --> Set AUDIO Mode")
}
// MARK: - Util methods
fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured: \(String(describing: outError))")
return nil
}
}
func swipeCamera() {
cameraDevice = cameraDevice == .front ? .back : .front
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// Remove...
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice && cameraDevice == .front {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
} else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
}
}
}
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
photoSession.addInput(validFrontDevice)
print("Record Camera --> Swipe to Front Camera")
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
photoSession.addInput(validBackDevice)
print("Record Camera --> Swipe to Back Camera")
}
}
}
}
}
// MARK: - Capture Output
extension RecordCamera : AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
// Not implemented
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard error == nil else {
if let photo = capturedPhoto {
delegate?.didSavedImage(image: photo)
}
return
}
delegate?.didSavedOutputFile(url: outputFileURL, error: error)
}
}
@available(iOS 10.0, *)
extension RecordCamera : AVCapturePhotoCaptureDelegate {
func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
print("Picture taken")
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))")
capturedPhoto = nil
//self.delegate.didSavedImage(image: nil)
return
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print("Photo Saved!")
capturedPhoto = UIImage(data: imageData)
//self.delegate.didSavedImage(image: image)
}
}
}
我做了几乎与您需要的相同的功能。
我已经创建并配置了一个捕获会话。
对于视频输出,我使用了 AVCaptureVideoDataOutput class,对于音频,AVCaptureAudioDataOutput class,对于照片,我使用了 AVCaptureStillImageOutput。
我使用 AVAssetWriter
来录制视频和音频,因为我需要执行自定义视频操作。录制时间为
AVCaptureVideoDataOutputSampleBufferDelegate
方法。
该委托方法如下所示。
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if !isRecordingVideo {
return
}
if captureOutput == self.videoOutput {
assetVideoWriterQueue.async {
if self.shouldStartWritingSession {
self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
self.shouldStartWritingSession = false
}
if self.assetWriterInputCamera.isReadyForMoreMediaData {
self.assetWriterInputCamera.append(sampleBuffer)
}
}
}
if captureOutput == self.audioOutput {
assetAudioWriterQueue.async {
let shouldStartWritingSession = self.shouldStartWritingSession
if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
self.assetWriterInputMicrofone.append(sampleBuffer)
}
if shouldStartWritingSession {
print("In audioOutput and CANNOT Record")
}
}
}
}
我的静态图像捕捉是这样的:
func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
guard self.state == .running else {
completion(false, nil)
return
}
backgroundQueue.async {
let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)
self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
defer {
self.state = .running
}
guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
DispatchQueue.main.async {
completion(false, nil)
}
return
}
let image = UIImage(data: imageData)
DispatchQueue.main.async {
completion(true, image)
}
})
}
}
您可以了解如何在 Whosebug 上使用资产编写器。
例如,您可能熟悉
我正在尝试制作类似 SnapChat 的应用程序。从同一个按钮,我愿意允许用户拍照(在内部修饰)和录制视频(长按)。
我正在为此使用 AVFoundation。棘手的部分是我不能让它在同一个 AVCaptureSession 中正常工作。我的意思是,两次捕获我只有 1 个预览层,如何根据用户与记录按钮的交互启动正确的层?有人已经用过类似的东西吗?
这是我的一段代码:
import UIKit
import AVFoundation
protocol RecordCameraDelegate {
func didSavedOutputFile(url: URL!, error: Error?)
func didSavedImage(image: UIImage?)
}
// MARK: - Camera
class RecordCamera : NSObject {
var videoLayer : AVCaptureVideoPreviewLayer!
var delegate : RecordCameraDelegate!
var capturedPhoto : UIImage?
fileprivate var captureSession = AVCaptureSession()
fileprivate var photoSession = AVCaptureSession()
fileprivate var movieOutput = AVCaptureMovieFileOutput()
fileprivate var cameraDevice : AVCaptureDevicePosition!
fileprivate let stillImageOutput = AVCaptureStillImageOutput()
// Devices
fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { [=10=].position == .front }.first
}()
fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { [=10=].position == .back }.first
}()
fileprivate lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
}()
fileprivate var tempFilePath: URL = {
let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: tempPath) {
do {
try FileManager.default.removeItem(atPath: tempPath)
} catch let error { print("Can't create File URL: \(String(describing: error))") }
}
return URL(string: tempPath)!
}()
// MARK: - Initialization
init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) {
super.init()
cameraDevice = cameraPosition
// Video
self.configureToRecord(view: view)
// Photo
self.configureToCapturePhoto()
}
func configureToRecord(view: UIView? = nil) {
captureSession.beginConfiguration()
defer {
// commit & stop session
captureSession.commitConfiguration()
if !captureSession.isRunning { captureSession.startRunning() }
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Start configuration
if !captureSession.isRunning {
// layer
if let validView = view {
videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer.frame = validView.bounds
validView.layer.addSublayer(videoLayer)
}
// add device inputs (front camera and mic)
if cameraDevice == .front {
captureSession.addInput(deviceInputFrom(device: frontCameraDevice))
} else {
captureSession.addInput(deviceInputFrom(device: backCameraDevice))
}
}
captureSession.addInput(deviceInputFrom(device: micDevice))
// Output
movieOutput.movieFragmentInterval = kCMTimeInvalid
// Remove previous output
if let existingOutput = captureSession.outputs.first as? AVCaptureOutput {
captureSession.removeOutput(existingOutput)
}
// Add Movie Output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
}
func configureToCapturePhoto() {
photoSession.beginConfiguration()
defer { photoSession.commitConfiguration() }
photoSession.sessionPreset = AVCaptureSessionPresetPhoto
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if #available(iOS 10.0, *) {
let cameraOutput = AVCapturePhotoOutput()
// Add Photo Output
if photoSession.canAddOutput(cameraOutput) {
photoSession.addOutput(cameraOutput)
}
}
else {
// Add Photo Output
if photoSession.canAddOutput(stillImageOutput) {
photoSession.addOutput(stillImageOutput)
}
}
}
func takePicture() {
if #available(iOS 10.0, *) {
let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput
// Capture Picture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 828,
kCVPixelBufferHeightKey as String: 828
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
else {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
//UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return }
self.capturedPhoto = UIImage(data: validData)
}
}
}
}
// MARK: - Record Methods
func startRecording() {
// Take picture
print("Camera started recording")
self.takePicture()
// Start recording
movieOutput.startRecording(
toOutputFileURL: tempFilePath,
recordingDelegate: self
)
}
func stopRecording() {
print("Camera stopped recording")
movieOutput.stopRecording()
}
// MARK: - Modes
func cameraMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
}
}
}
print("Record Camera --> Set VIDEO Mode")
}
func audioMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// Remove..
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice
|| deviceInput.device == frontCameraDevice {
captureSession.removeInput(deviceInput)
}
}
}
print("Record Camera --> Set AUDIO Mode")
}
// MARK: - Util methods
fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured: \(String(describing: outError))")
return nil
}
}
func swipeCamera() {
cameraDevice = cameraDevice == .front ? .back : .front
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { [=10=] as? AVCaptureInput } ?? []
// Remove...
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice && cameraDevice == .front {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
} else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
}
}
}
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
photoSession.addInput(validFrontDevice)
print("Record Camera --> Swipe to Front Camera")
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
photoSession.addInput(validBackDevice)
print("Record Camera --> Swipe to Back Camera")
}
}
}
}
}
// MARK: - Capture Output
extension RecordCamera : AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
// Not implemented
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard error == nil else {
if let photo = capturedPhoto {
delegate?.didSavedImage(image: photo)
}
return
}
delegate?.didSavedOutputFile(url: outputFileURL, error: error)
}
}
@available(iOS 10.0, *)
extension RecordCamera : AVCapturePhotoCaptureDelegate {
func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
print("Picture taken")
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))")
capturedPhoto = nil
//self.delegate.didSavedImage(image: nil)
return
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print("Photo Saved!")
capturedPhoto = UIImage(data: imageData)
//self.delegate.didSavedImage(image: image)
}
}
}
我做了几乎与您需要的相同的功能。 我已经创建并配置了一个捕获会话。 对于视频输出,我使用了 AVCaptureVideoDataOutput class,对于音频,AVCaptureAudioDataOutput class,对于照片,我使用了 AVCaptureStillImageOutput。
我使用 AVAssetWriter
来录制视频和音频,因为我需要执行自定义视频操作。录制时间为
AVCaptureVideoDataOutputSampleBufferDelegate
方法。
该委托方法如下所示。
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if !isRecordingVideo {
return
}
if captureOutput == self.videoOutput {
assetVideoWriterQueue.async {
if self.shouldStartWritingSession {
self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
self.shouldStartWritingSession = false
}
if self.assetWriterInputCamera.isReadyForMoreMediaData {
self.assetWriterInputCamera.append(sampleBuffer)
}
}
}
if captureOutput == self.audioOutput {
assetAudioWriterQueue.async {
let shouldStartWritingSession = self.shouldStartWritingSession
if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
self.assetWriterInputMicrofone.append(sampleBuffer)
}
if shouldStartWritingSession {
print("In audioOutput and CANNOT Record")
}
}
}
}
我的静态图像捕捉是这样的:
func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
guard self.state == .running else {
completion(false, nil)
return
}
backgroundQueue.async {
let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)
self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
defer {
self.state = .running
}
guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
DispatchQueue.main.async {
completion(false, nil)
}
return
}
let image = UIImage(data: imageData)
DispatchQueue.main.async {
completion(true, image)
}
})
}
}
您可以了解如何在 Whosebug 上使用资产编写器。
例如,您可能熟悉