SwiftUI AVCapturePhotoOutput 不工作

SwiftUI AVCapturePhotoOutput Does Not Work

我有一个带有 SwiftUI 生命周期的简单 SwiftUI 应用程序,我正在尝试使用 AVFoundation 自动拍照。我会根据条件或 基于计时器 - 但对于这个例子,我只想在启动时拍照 并显示它(不是预览层)。我不希望需要任何用户操作。

我显然不了解正确的设置和捕获。

import SwiftUI
import AVFoundation

struct ContentView: View {

    let dataStore = DataStore.shared

    @State private var captureSession = AVCaptureSession()
    @State private var backCamera : AVCaptureDevice?
    @State private var frontCamera : AVCaptureDevice?

    @State private var currentCamera : AVCaptureDevice?
    @State private var photoOutput : AVCapturePhotoOutput?

    @State private var capturedImage: UIImage?

    var body: some View {
        VStack {
            Text("Take a Photo Automatically")
                .padding()

            ZStack {
                RoundedRectangle(cornerRadius: 0)
                    .stroke(Color.blue, lineWidth: 4)
                    .frame(width: 320, height: 240, alignment: .center)

                Image(uiImage: dataStore.capturedImage)
            }
            
            Spacer()
        }
        .onAppear {
        
            if UIImagePickerController.isSourceTypeAvailable(.camera){
                self.setupCaptureSession()
                self.setupDevices()
                self.setupInputOutput()
                self.startRunningCaptureSession()
            } else {
                print("No Camera is Available")
            }
        }
    }

    func setupCaptureSession() {
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
    }//setupCaptureSession

    func setupDevices() {
    
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video, position: .unspecified)
    
        let devices = deviceDiscoverySession.devices
        for device in devices {
            if device.position == AVCaptureDevice.Position.back {
                backCamera = device
            } else if device.position == AVCaptureDevice.Position.front {
                frontCamera = device
            }//if else
        }//for in
    
        currentCamera = frontCamera
    
    }//setupDevices

    func setupInputOutput() {
    
        do {
            //you only get here if there is a camera ( ! ok )
            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutput = AVCapturePhotoOutput()
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: {(success, error) in
            })
            captureSession.addOutput(photoOutput!)
            captureSession.commitConfiguration()

        } catch {
            print("Error creating AVCaptureDeviceInput:", error)
        }
    
    }//setupInputOutput

    func startRunningCaptureSession() {
        let settings = AVCapturePhotoSettings()
    
        captureSession.startRunning()
        photoOutput?.capturePhoto(with: settings, delegate: PhotoDelegate())
    
    }//startRunningCaptureSession

}//struct

class PhotoDelegate: NSObject, AVCapturePhotoCaptureDelegate {

    let dataStore = DataStore.shared

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {

        guard let data = photo.fileDataRepresentation(),
              let image =  UIImage(data: data)  else {
                return
        }

        dataStore.capturedImage = image
    }
}//photo delegate

class DataStore {
    static let shared = DataStore()
    private init() {}

    @Published var capturedImage: UIImage = UIImage()
}//dataStore

如有任何指导,我们将不胜感激。 Xcode 12.5.1 iOS 14.5

第二次尝试添加示例:

稍后编辑。在使用 Philip Dukhov 提供的更改纠正我的方法后, 我仍然有一个问题 - 图像仍然很暗 - 以至于物体都 面目全非。试了很多次,看来相机需要一些 捕获前的设置时间非常短。虽然它看起来不像 好的编程计划,我在会话捕获之前设置了一个轻微的延迟。作为 仅仅 0.1 秒似乎就足够了。现在开始捕获:

func startRunningCaptureSession() {
    let settings = AVCapturePhotoSettings()
    captureSession.startRunning()

    //don't know why this is needed - but it works. Low number of tests at 0.1 all work
    DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
        self.photoOutput?.capturePhoto(with: settings, delegate: self)
    }
            
}//start Running Capture Session

如果您知道更好的方法 - 请告诉我。

主要问题是您创建了 PhotoDelegate 但不存储它。在iOS中,通常将delegate对象存储为弱引用,以防止循环引用/保留循环。

您只需在视图中创建另一个 属性 即可解决此问题,但我建议您创建一个模型 class。如果您正在做与视图本身无关的事情,则表明您最好将其移动到其他地方,例如 ObservableObject。您还可以将其作为您的委托,这样您就不必创建单独的对象并使用单例:这是您做错事的另一个迹象。

class CaptureModel: NSObject, ObservableObject {
    let captureSession = AVCaptureSession()
    var backCamera: AVCaptureDevice?
    var frontCamera: AVCaptureDevice?
    var photoOutput: AVCapturePhotoOutput?
    var currentCamera: AVCaptureDevice?
    @Published
    var capturedImage: UIImage?

    override init() {
        super.init()
        setupCaptureSession()
        setupDevices()
        setupInputOutput()
    }

    func setupCaptureSession() {
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
    }//setupCaptureSession

    func setupDevices() {

        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video, position: .unspecified)

        let devices = deviceDiscoverySession.devices
        for device in devices {
            if device.position == AVCaptureDevice.Position.back {
                backCamera = device
            } else if device.position == AVCaptureDevice.Position.front {
                frontCamera = device
            }//if else
        }//for in

        currentCamera = frontCamera

    }//setupDevices

    func setupInputOutput() {

        do {
            //you only get here if there is a camera ( ! ok )
            let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
            captureSession.addInput(captureDeviceInput)
            photoOutput = AVCapturePhotoOutput()
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: {(success, error) in
            })
            captureSession.addOutput(photoOutput!)
            captureSession.commitConfiguration()

        } catch {
            print("Error creating AVCaptureDeviceInput:", error)
        }

    }//setupInputOutput

    func startRunningCaptureSession() {
        let settings = AVCapturePhotoSettings()

        captureSession.startRunning()
        photoOutput?.capturePhoto(with: settings, delegate: self)
    }//startRunningCaptureSession

    func stopRunningCaptureSession() {
        captureSession.stopRunning()
    }//startRunningCaptureSession
}

extension CaptureModel: AVCapturePhotoCaptureDelegate {
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        guard let data = photo.fileDataRepresentation(),
              let image = UIImage(data: data) else {
            return
        }
        capturedImage = image
    }
}

struct ContentView: View {
    @StateObject
    var model = CaptureModel()

    var body: some View {
        VStack {
            Text("Take a Photo Automatically")
                .padding()

            ZStack {
                RoundedRectangle(cornerRadius: 0)
                    .stroke(Color.blue, lineWidth: 4)
                    .frame(width: 320, height: 240, alignment: .center)

                model.capturedImage.map { capturedImage in
                    Image(uiImage: capturedImage)
                }
            }

            Spacer()
        }
            .onAppear {
                if UIImagePickerController.isSourceTypeAvailable(.camera) {
                    model.startRunningCaptureSession()
                } else {
                    print("No Camera is Available")
                }
            }
            .onDisappear {
                model.stopRunningCaptureSession()
            }
    }
}//struct