AVCaptureMetadataObjectDelegate 未收到回调

AVCaptureMetadataObjectDelegate not receiving callback

我正在制作二维码扫描仪。当所有代码都写在 ViewController 内的一个地方时,我的代码可以正常工作,但是当我将它模块化时,我就不会在 AVCaptureMetadataOutputObjectsDelegate.

内获得回调
import Foundation
import UIKit
import AVFoundation

class CameraSource : NSObject {

    private var session                     : AVCaptureSession?
    private var inputDevice             : AVCaptureDeviceInput?
    private var videoPreviewLayer   : AVCaptureVideoPreviewLayer?

    private var captureMetadataOutput : AVCaptureMetadataOutput?

    func setCaptureMetadataOutput() {
        self.captureMetadataOutput = nil
        self.captureMetadataOutput = AVCaptureMetadataOutput()
    }

    func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
        return self.captureMetadataOutput
    }

    func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
        self.inputDevice = inputDevice
    }

    func getInputDevice() -> AVCaptureDeviceInput? {
        return self.inputDevice
    }

    func setSession(session : AVCaptureSession?) {
        self.session = session
    }

    func getSession() -> AVCaptureSession? {
        return self.session
    }

    func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
        assert(self.captureMetadataOutput != nil)
        self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        self.captureMetadataOutput!.metadataObjectTypes = metaObjects
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
        assert(session != nil)

        videoPreviewLayer                                                           = AVCaptureVideoPreviewLayer(session: session!)
        videoPreviewLayer!.videoGravity                                 = videoGravity
        videoPreviewLayer!.connection!.videoOrientation = orientation
    }

    func addVideoLayerToImageView(imageView : UIImageView) {
        assert(self.videoPreviewLayer != nil)

        imageView.layer.addSublayer(self.videoPreviewLayer!)
        self.videoPreviewLayer!.frame = imageView.bounds
    }

    func startSession() {
        assert(session != nil)
        self.session!.startRunning()
    }


    /*==========================================================================
    STATIC FUNCTIONS
    ==========================================================================*/

    static func getBackCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
    }

    static func getFrontCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
    }

    static func isCameraAvailable() -> Bool {
        if #available(iOS 10.0, *) {
            let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                                                                 mediaType: AVMediaType.video,
                                                                                                                 position: .unspecified).devices.count
            if count > 0 { return true }
        }
        else {
            let count = AVCaptureDevice.devices(for: AVMediaType.video).count
            if count > 0 { return true }
        }
        return false
    }


    /*==========================================================================
    CAMERA BUILDER CLASS
    ==========================================================================*/

    class Builder {

        var cameraSource : CameraSource

        init() {
            cameraSource = CameraSource()
        }

        func createSession() -> Builder {
            if (cameraSource.getSession() != nil) {
                cameraSource.setSession(session: nil)
            }
            cameraSource.setSession(session: AVCaptureSession())
            return self
        }

        func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
            assert(cameraSource.getSession() != nil)

            cameraSource.getSession()!.sessionPreset = preset
            return self
        }

        func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {

            try self.prepareInputDevice(camera: camera)
            try self.addInputToSession()

            assert(cameraSource.inputDevice != nil)
            return self
        }

        func addOutputToSessionForMetaData() throws -> CameraSource {
            cameraSource.setCaptureMetadataOutput()

            assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)

            if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
                throw AppErrorCode.cameraError("Unable to attach output to camera session")
            }
            cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)

            return self.cameraSource
        }

        /*==========================================================================
        BUILDER PRIVATE FUNCTIONS
        ==========================================================================*/

        private func prepareInputDevice(camera : AVCaptureDevice) throws {
            do {
                let inputDevice = try AVCaptureDeviceInput(device: camera)
                cameraSource.setInputDevice(inputDevice: inputDevice)

            } catch let error as NSError {
                print(error.localizedDescription)
                throw AppErrorCode.cameraError("Unable to attach input to camera session")
            }
        }

        private func addInputToSession() throws {
            if(cameraSource.getSession() == nil) {
                throw AppErrorCode.cameraError("Unable to create camera session")
            }

            assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))

            cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
        }

    }


}

我的二维码看起来像

import UIKit
import Foundation
import AVFoundation

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

class QRScanner : NSObject {

    private var cameraSource : CameraSource?

    var delegate : QRScannerDelegate?

    func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
        do {
            self.delegate = delegate
            self.cameraSource = try CameraSource
                .Builder()
                .createSession()
                .setSessionPreset(preset: .photo)
                .attachInputDevice(camera: CameraSource.getBackCamera())
                .addOutputToSessionForMetaData()

            self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)

        } catch let err as NSError {
            print(err.localizedDescription)
            self.cameraSource = nil
            throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
        }

        return self
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
        return self
    }

    func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
        return self
    }

    func startSession() {
        assert(cameraSource != nil)
        self.cameraSource!.startSession()
    }
}

extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

        print("Delegate called")
        if metadataObjects.count == 0 {

            self.delegate?.scannedData("No Data")

        } else {

            let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
            if metadataObj.type == AVMetadataObject.ObjectType.qr {

                if metadataObj.stringValue != nil {
                    print("Scanner Getting data: \(metadataObj.stringValue!)")
                    self.delegate?.scannedData(metadataObj.stringValue!)
                }

            }

        }
    }
}

我已经在我的 ViewController 中实现了 QRScannerDelegate,但我没有得到任何东西。此外,我什至没有在 AVCaptureMetadataOutputObjectsDelegate 中收到回调。

我尝试将 ViewController 实例作为 AVCaptureMetadataOutputObjectsDelegate 传递,然后我收到了带有扫描信息的回调。

So My question is why is this happening?

1) 当我通过正常 class 作为 AVCaptureMetadataOutputObjectsDelegate 我没有收到回调。但是。

2) 当我将 UIViewController 实例作为 AVCaptureMetadataOutputObjectsDelegate 传递时,我可以获得回调。

更新

这就是我从 View Controller

中调用 prepareCamera 的方式
override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

如果不知道你是怎么调用 prepareCamera 的,很难确定,因为这是触发 setMetadataObjectsDelegate 的原因,但对我来说,你可能没有保持对 [=16= 的强烈引用] 在你的 ViewController 中(将其实例化为实例变量)这可以解释为什么当你的 ViewController 是你的 AVCaptureMetadataOutputObjectsDelegate 时回调被命中,因为 ViewController 仍在内存中.

还值得注意的是,如果 ViewController 是您的 QRScannerDelegate,您需要将委托定义为 weak var delegate : QRScannerDelegate? 以防止内存泄漏。

编辑: 变化

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try self.qrScanner.prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

并改变

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

protocol QRScannerDelegate: class {
    func scannedData(_ scannedString : String)
}

允许弱委托

AVCaptureMetadataOutputObjectsDelegate 很难,但您可以用它做一些非常酷的事情!所以坚持下去。

我提取了一些我前一段时间写的 QRScanner 代码,如果你想检查它,我把它放在一个要点中供你查看。它比您拥有的要精简一些,但您可能会发现它很有用。 https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2