如何以 240fps 调用 captureOutput?

How can I call captureOutput at 240fps?

我想以高帧率保存图像。 IO很贵,不是240fpsexactly.But可能超过120fps.

我所做的是在 viewDidLoad 中设置 captureDivece, 并在 captureOutput 中记录时间戳并查看速率。

但我注意到 captureOutput 始终为 30fps。 你能告诉我哪里错了吗?

感谢您抽空回答!

这是我的代码和结果:

//
//  ViewController.swift
//  CustomCamera
//
//  Created by chunibyo on 2021/3/8.
//

import UIKit
import AVFoundation
import Vision
import VideoToolbox

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
    
    @IBOutlet weak var captureButton: UIButton!
    let sessionQueue = DispatchQueue(label: "Session Queue")
    var status = false
    private var MyCaptureDevice: AVCaptureDevice?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
        
        captureButton.layer.cornerRadius = captureButton.frame.width / 2
        captureButton.layer.masksToBounds = true
        captureButton.layer.zPosition = 10
        
        guard let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) else {return}

        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {return}
        
        // 1
        for vFormat in captureDevice.formats {
            // 2
            let ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange]
            let frameRates = ranges[0]
            // 3
            if frameRates.maxFrameRate == 240 {
                // 4
                try? captureDevice.lockForConfiguration()
                captureDevice.activeFormat = vFormat as AVCaptureDevice.Format
                captureDevice.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: Int32(240))
                captureDevice.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: Int32(240))
                captureDevice.videoZoomFactor = captureDevice.minAvailableVideoZoomFactor
                captureDevice.unlockForConfiguration()
            }
        }
        
        let captureSession = AVCaptureSession();
        // captureSession.sessionPreset = .photo
        captureSession.addInput(input)
        captureSession.startRunning();
        
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame
        
        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
        dataOutput.alwaysDiscardsLateVideoFrames = true;
        captureSession.addOutput(dataOutput)
        
        print(captureDevice.minAvailableVideoZoomFactor)
        print(captureDevice.maxAvailableVideoZoomFactor)
        
        MyCaptureDevice = captureDevice
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        print(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))
//        if !status { return }
//        guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
//        guard let uiImage = UIImage(pixelBuffer: pixelBuffer) else { return }
//
//        sessionQueue.async {
//            guard let captureDevice = self.MyCaptureDevice else { return }
//            if captureDevice.videoZoomFactor >= (captureDevice.maxAvailableVideoZoomFactor - 0.2) { return }
//            UIImageWriteToSavedPhotosAlbum(uiImage, nil, nil, nil)
//            try? captureDevice.lockForConfiguration()
//            captureDevice.videoZoomFactor += 0.1
//            captureDevice.unlockForConfiguration()
//        }
    }

    @IBAction func captureControl(_ sender: UIButton) {
        DispatchQueue.main.async {
            if self.status {
                self.captureButton.backgroundColor = .white
                print("stop")
                self.status = !self.status
            }
            else {
                self.captureButton.backgroundColor = .red
                print("recording...")
                self.status = !self.status
            }
            
        }
    }
}

extension UIImage {
    public convenience init?(pixelBuffer: CVPixelBuffer) {
        var cgImage: CGImage?
        VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
        guard let _cgImage = cgImage else { return nil }
        self.init(cgImage: _cgImage)
    }
}

console log(Whosebug 不允许我 post 图片,抱歉)

captureOutput 只有日志是 240fps。

captureOutput拍照到相册大约是70~100fps。

此代码可以获得 240fps 日志。

//
//  ViewController.swift
//  CustomCamera
//
//  Created by chunibyo on 2021/3/8.
//

import UIKit
import AVFoundation
import Vision
import VideoToolbox

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
    
    @IBOutlet weak var captureButton: UIButton!
    let sessionQueue = DispatchQueue(label: "Session Queue")
    var status = false
    var zoomStatus = 1
    private var MyCaptureDevice: AVCaptureDevice?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
        
        captureButton.layer.cornerRadius = captureButton.frame.width / 2
        captureButton.layer.masksToBounds = true
        captureButton.layer.zPosition = 10
        
        
        guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {return}

        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {return}
        
        let captureSession = AVCaptureSession();
        // captureSession.sessionPreset = .photo
        captureSession.addInput(input)
        
        // 1
        for vFormat in captureDevice.formats {
            // 2
            let ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange]

            let frameRates = ranges[0]
            // 3
            if frameRates.maxFrameRate == 240 {
                // 4
                try? captureDevice.lockForConfiguration()
                captureDevice.activeFormat = vFormat as AVCaptureDevice.Format
                captureDevice.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: Int32(240))
                captureDevice.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: Int32(240))
                captureDevice.videoZoomFactor = captureDevice.minAvailableVideoZoomFactor
                captureDevice.unlockForConfiguration()
            }
        }
        
        captureSession.startRunning();
        
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame
        
        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
        dataOutput.alwaysDiscardsLateVideoFrames = true;
        captureSession.addOutput(dataOutput)
        
        print(captureDevice.minAvailableVideoZoomFactor)
        print(captureDevice.maxAvailableVideoZoomFactor)
        
        MyCaptureDevice = captureDevice
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        print(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))
//        if !status { return }
//        guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
//        guard let uiImage = UIImage(pixelBuffer: pixelBuffer) else { return }
//        UIImageWriteToSavedPhotosAlbum(uiImage, nil, nil, nil)
//
//        guard let captureDevice = self.MyCaptureDevice else { return }
//        if self.zoomStatus == 1 && captureDevice.videoZoomFactor >= CGFloat(Int32(captureDevice.maxAvailableVideoZoomFactor * 0.6)) { self.zoomStatus = -1
//        }
//        else if self.zoomStatus == -1 && captureDevice.videoZoomFactor <= (captureDevice.minAvailableVideoZoomFactor + 1.0) {
//            self.zoomStatus = 1
//        }
//        UIImageWriteToSavedPhotosAlbum(uiImage, nil, nil, nil)
//        try? captureDevice.lockForConfiguration()
//        captureDevice.videoZoomFactor += (0.1 * CGFloat(self.zoomStatus))
//        captureDevice.unlockForConfiguration()
    }

    @IBAction func captureControl(_ sender: UIButton) {
        DispatchQueue.main.async {
            if self.status {
                self.captureButton.backgroundColor = .white
                print("stop")
                self.status = !self.status
            }
            else {
                self.captureButton.backgroundColor = .red
                print("recording...")
                self.status = !self.status
            }
            
        }
    }
}

extension UIImage {
    public convenience init?(pixelBuffer: CVPixelBuffer) {
        var cgImage: CGImage?
        VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
        guard let _cgImage = cgImage else { return nil }
        self.init(cgImage: _cgImage)
    }
}