captureOutput 未被委托调用

captureOutput not being called from delegate

所以我意识到有几个关于此的问题,但我觉得我已经检查了所有这些问题,但仍然没有弄清楚我做错了什么或有什么不同。我从一个我知道在 viewDidLoad 之后发生的函数中调用此 class。我想做的是能够从给定的 AVCaptureDevice 或摄像机 ID 获取视频流,并将其作为一系列 evaluateJavascript 调用传递到 WebView 中,我可能需要稍后对其进行优化,但我在获取 captureOutput 时遇到了麻烦被称为。当然,我不需要预览就可以捕获输出,对吗?我已经确认权限是正确的并且代码达到了在捕获会话上设置示例缓冲区委托的地步。有什么想法吗?

class CameraPlugin:
NSObject,
AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureMetadataOutputObjectsDelegate,
AVCapturePhotoCaptureDelegate {

private var capturePhotoCompletion: ((Result<UIImage, Error>) -> ())?
private var scanBarcodeCompletion: ((Result<String, Error>) -> ())?
let captureSession = AVCaptureSession()
private var videoSampleListener: VideoSampleListener?


func startStreamingCamera(cameraId: String?, camera: AVCaptureDevice?, listener: VideoSampleListener) {
    self.videoSampleListener = listener
    var inputCam = camera
    if (cameraId != nil) {
        inputCam = self.retrieveVideoCaptureDeviceFromId(id: cameraId!)
        if (inputCam == nil) {
            return
        }
    } else if (inputCam == nil) {
        return
    }
    
    self.haveCaptureDeviceAccess(type: .video) { granted in
        if granted {
            do {
                let captureDeviceInput = try AVCaptureDeviceInput(device: inputCam!)
                let captureDeviceOutput = AVCaptureVideoDataOutput()
                
                guard
                    self.captureSession.canAddInput(captureDeviceInput),
                    self.captureSession.canAddOutput(captureDeviceOutput)
                else {
                    return
                }
                
                self.captureSession.addInput(captureDeviceInput)
                self.captureSession.addOutput(captureDeviceOutput)
                
                captureDeviceOutput.setSampleBufferDelegate(self, queue: .global())
            } catch {
            }
        } else {
        }
    }
}


/*
 * Converts captured video frame to a jpeg image
 */
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
        return
    }
    let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
    guard let cgImage = CIContext().createCGImage(ciImage, from: ciImage.extent) else {
        return
    }
    let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
    
    guard let imageData = image.jpegData(compressionQuality: 0.7)?.base64EncodedString() else {
        return
    }
    
    if (videoSampleListener != nil) {
        videoSampleListener!.receivedVideoSample(imageData: imageData)
    }
}

func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

}

private func configurePhotoSettings() -> AVCapturePhotoSettings {
    let settings = AVCapturePhotoSettings()
    settings.isHighResolutionPhotoEnabled = true
    return settings
}

private func retrieveBarcodeMetadataObjectTypes() -> [AVMetadataObject.ObjectType] {
    return [
        AVMetadataObject.ObjectType.upce,
        AVMetadataObject.ObjectType.code39,
        AVMetadataObject.ObjectType.code39Mod43,
        AVMetadataObject.ObjectType.ean13,
        AVMetadataObject.ObjectType.ean8,
        AVMetadataObject.ObjectType.code93,
        AVMetadataObject.ObjectType.code128,
        AVMetadataObject.ObjectType.pdf417,
        AVMetadataObject.ObjectType.qr,
        AVMetadataObject.ObjectType.aztec,
        AVMetadataObject.ObjectType.interleaved2of5,
        AVMetadataObject.ObjectType.itf14,
        AVMetadataObject.ObjectType.dataMatrix
    ]
}

private func haveCaptureDeviceAccess(type: AVMediaType, completion: @escaping (Bool) -> ()) {
    switch AVCaptureDevice.authorizationStatus(for: type) {
    case .denied:
        completion(false)
        
    case .notDetermined:
        AVCaptureDevice.requestAccess(for: type) { granted in
            completion(granted)
        }
    
    default:
        completion(true)
    }
}

func retrieveVideoCaptureDeviceFromId(id: String) -> AVCaptureDevice? {
    return self.retrieveAvailableVideoCaptureDevices().first(where: { device in device.uniqueID == id })
}

func retrieveAvailableVideoCaptureDevices() -> [AVCaptureDevice] {
    let discoverySession = AVCaptureDevice.DiscoverySession(
        deviceTypes: self.retrievePlatformDeviceTypes(),
        mediaType: .video,
        position: .unspecified
    )
    return discoverySession.devices
}

private func retrievePlatformDeviceTypes() -> [AVCaptureDevice.DeviceType] {
    var deviceTypes: [AVCaptureDevice.DeviceType] = [
        .builtInDualCamera,
        .builtInWideAngleCamera,
        .builtInTelephotoCamera
    ]
    if #available(iOS 11.1, *) {
        deviceTypes += [
            .builtInTrueDepthCamera
        ]
    }
    if #available(iOS 13.0, *) {
        deviceTypes += [
            .builtInDualWideCamera,
            .builtInTripleCamera,
            .builtInUltraWideCamera
        ]
    }
    return deviceTypes
}

}

调用代码:

guard let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
        return
    }
    let camPlugin = CameraPlugin()
    camPlugin.startStreamingCamera(cameraId: nil, camera: videoCaptureDevice, listener: self)

更新:这里的代码其实并没有什么问题。我遇到的问题是上面描述的 class 的实例正在被垃圾收集。