iOS Swift - AVCaptureSession - 根据帧速率捕获帧

iOS Swift - AVCaptureSession - Capture frames respecting frame rate

我正在尝试构建一个应用程序,该应用程序将从相机捕获帧并使用 OpenCV 处理它们,然后再将这些文件保存到设备,但以特定的帧速率。

我目前坚持的事实是 AVCaptureVideoDataOutputSampleBufferDelegate 似乎不遵守 AVCaptureDevice.activeVideoMinFrameDurationAVCaptureDevice.activeVideoMaxFrameDuration 设置。

captureOutput 运行速度远高于上述设置所显示的每秒 2 帧。

您是否知道如何在有或没有委托的情况下实现这一目标?

ViewController:

override func viewDidLoad() {
    super.viewDidLoad()

}

override func viewDidAppear(animated: Bool) {
    setupCaptureSession()
}

func setupCaptureSession() {

    let session : AVCaptureSession = AVCaptureSession()
    session.sessionPreset = AVCaptureSessionPreset1280x720

    let videoDevices : [AVCaptureDevice] = AVCaptureDevice.devices() as! [AVCaptureDevice]

    for device in videoDevices {
        if device.position == AVCaptureDevicePosition.Back {
            let captureDevice : AVCaptureDevice = device

            do {
                try captureDevice.lockForConfiguration()
                captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 2)
                captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 2)
                captureDevice.unlockForConfiguration()

                let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

                if session.canAddInput(input) {
                    try session.addInput(input)
                }

                let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()

                let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
                output.setSampleBufferDelegate(self, queue: dispatch_queue)

                session.addOutput(output)

                session.startRunning()

                let previewLayer = AVCaptureVideoPreviewLayer(session: session)
                previewLayer.connection.videoOrientation = .LandscapeRight

                let previewBounds : CGRect = CGRectMake(0,0,self.view.frame.width/2,self.view.frame.height+20)
                previewLayer.backgroundColor = UIColor.blackColor().CGColor
                previewLayer.frame = previewBounds
                previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
                self.imageView.layer.addSublayer(previewLayer)

                self.previewMat.frame = CGRectMake(previewBounds.width, 0, previewBounds.width, previewBounds.height)

            } catch _ {

            }
            break
        }
    }

}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
    self.wrapper.processBuffer(self.getUiImageFromBuffer(sampleBuffer), self.previewMat)
}

所以我找到问题了。

activeVideoMinFrameDuration 属性 上方 AVCaptureDevice.h 的评论部分中指出:

On iOS, the receiver's activeVideoMinFrameDuration resets to its default value under the following conditions:

  • The receiver's activeFormat changes
  • The receiver's AVCaptureDeviceInput's session's sessionPreset changes
  • The receiver's AVCaptureDeviceInput is added to a session

最后一个要点导致了我的问题,所以执行以下操作为我解决了问题:

        do {

            let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

            if session.canAddInput(input) {
                try session.addInput(input)
            }

            try captureDevice.lockForConfiguration()
            captureDevice.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 2)
            captureDevice.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 2)
            captureDevice.unlockForConfiguration()

            let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()

            let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
            output.setSampleBufferDelegate(self, queue: dispatch_queue)

            session.addOutput(output)