在 Iphone 使用 AVFoundation 拍摄照片时,图像拍摄速度比闪光灯慢
Image capture is slower than flash in Iphone photo capture using AVFoundation
我们正在使用最新的 Swift 代码和 AVFoundation API 来创建具有闪光灯、倒车摄像头和拍摄照片功能的相机应用程序。代码需要支持 ios10 以上。
我们遇到的问题是相机闪光灯出现,但没有在照片中捕捉到(基本上相机闪光灯出现比照片捕捉早一点,或者照片捕捉比闪光灯慢一点)这使得我们的闪光灯功能没用。
这是我们的相机捕捉代码:
//Function to capture the image from the camera session -> this gets called from the ViewController Outlet action OnCapture
func capture() throws {
guard captureSession.isRunning else {
throw CameraRuntimeError.captureSessionIsMissing
}
let settings = AVCapturePhotoSettings()
if getCurrentCamera().isFlashAvailable {
settings.flashMode = self.flashMode
}
self.photoOutput?.capturePhoto(with: settings, delegate: self)
}
这是代表:
extension CameraFunctions: AVCapturePhotoCaptureDelegate {
private static let failedToConvertToJPEGErrorCode = "JPEGERROR"
private static let failedToCaptureImage = "CAMERROR"
public func photoOutput(_ captureOutput: AVCapturePhotoOutput,
didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?,
previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
resolvedSettings: AVCaptureResolvedPhotoSettings,
bracketSettings: AVCaptureBracketedStillImageSettings?, error: Swift.Error?) {
if error != nil {
onPhotoCaptured(StringResult(error: ServicesError(CameraFunctions.failedToCaptureImage, error!.localizedDescription)))
}
if let buffer = photoSampleBuffer, let data = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buffer, previewPhotoSampleBuffer: nil) {
let encodedString = //DO ENCODING OF THE PHOTO
onPhotoCaptured(encodedString)
} else {
onPhotoCaptured(StringResult(error: ServicesError(CameraFunctions.failedToConvertToJPEGErrorCode, CameraRuntimeError.failedToConvertImageToJPEG.localizedDescription)))
}
closeCaptureSession()
}
}
onPhotoCaptured 出现在 ViewController。
如果我们做错了什么,请告诉我们。
为照片输出设置准备好的设置解决了这个问题:
func capture(_ delegate: AVCapturePhotoCaptureDelegate, _ onError: @escaping (Error) -> Void) throws {
guard captureSession.isRunning else {
throw CameraRuntimeError.captureSessionIsMissing
}
let settings: AVCapturePhotoSettings
if #available(iOS 11.0, *) {
settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
settings.isAutoStillImageStabilizationEnabled = true
} else {
settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
}
if getCurrentCamera().isFlashAvailable {
settings.flashMode = self.flashMode
}
//This statement did the magic
self.photoOutput?.setPreparedPhotoSettingsArray([settings]) { (suc: Bool, err: Error?) -> Void in
if suc {
self.photoOutput?.capturePhoto(with: settings, delegate: delegate)
}
if err != nil {
onError(err!)
}
}
}
我们正在使用最新的 Swift 代码和 AVFoundation API 来创建具有闪光灯、倒车摄像头和拍摄照片功能的相机应用程序。代码需要支持 ios10 以上。
我们遇到的问题是相机闪光灯出现,但没有在照片中捕捉到(基本上相机闪光灯出现比照片捕捉早一点,或者照片捕捉比闪光灯慢一点)这使得我们的闪光灯功能没用。
这是我们的相机捕捉代码:
//Function to capture the image from the camera session -> this gets called from the ViewController Outlet action OnCapture
func capture() throws {
guard captureSession.isRunning else {
throw CameraRuntimeError.captureSessionIsMissing
}
let settings = AVCapturePhotoSettings()
if getCurrentCamera().isFlashAvailable {
settings.flashMode = self.flashMode
}
self.photoOutput?.capturePhoto(with: settings, delegate: self)
}
这是代表:
extension CameraFunctions: AVCapturePhotoCaptureDelegate {
private static let failedToConvertToJPEGErrorCode = "JPEGERROR"
private static let failedToCaptureImage = "CAMERROR"
public func photoOutput(_ captureOutput: AVCapturePhotoOutput,
didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?,
previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
resolvedSettings: AVCaptureResolvedPhotoSettings,
bracketSettings: AVCaptureBracketedStillImageSettings?, error: Swift.Error?) {
if error != nil {
onPhotoCaptured(StringResult(error: ServicesError(CameraFunctions.failedToCaptureImage, error!.localizedDescription)))
}
if let buffer = photoSampleBuffer, let data = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buffer, previewPhotoSampleBuffer: nil) {
let encodedString = //DO ENCODING OF THE PHOTO
onPhotoCaptured(encodedString)
} else {
onPhotoCaptured(StringResult(error: ServicesError(CameraFunctions.failedToConvertToJPEGErrorCode, CameraRuntimeError.failedToConvertImageToJPEG.localizedDescription)))
}
closeCaptureSession()
}
}
onPhotoCaptured 出现在 ViewController。
如果我们做错了什么,请告诉我们。
为照片输出设置准备好的设置解决了这个问题:
func capture(_ delegate: AVCapturePhotoCaptureDelegate, _ onError: @escaping (Error) -> Void) throws {
guard captureSession.isRunning else {
throw CameraRuntimeError.captureSessionIsMissing
}
let settings: AVCapturePhotoSettings
if #available(iOS 11.0, *) {
settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
settings.isAutoStillImageStabilizationEnabled = true
} else {
settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])
}
if getCurrentCamera().isFlashAvailable {
settings.flashMode = self.flashMode
}
//This statement did the magic
self.photoOutput?.setPreparedPhotoSettingsArray([settings]) { (suc: Bool, err: Error?) -> Void in
if suc {
self.photoOutput?.capturePhoto(with: settings, delegate: delegate)
}
if err != nil {
onError(err!)
}
}
}