Swift IOS activeVideoMinFrameDuration 不工作
Swift IOS activeVideoMinFrameDuration not working
我有一个代码可以从应用程序中捕获预览图像以进行图像处理。为此,我需要控制预览的帧速率,但是设置帧速率的代码对预览流没有影响。这是代码
import UIKit
import AVFoundation
import Starscream
import SwiftyJSON
class ViewController: UIViewController, WebSocketDelegate {
var captureSession: AVCaptureSession?
var rearCamera: AVCaptureDevice?
var rearCameraInput: AVCaptureDeviceInput?
var videoPreviewOutput: AVCaptureVideoDataOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var socket: WebSocket!
let server = WebSocketServer()
var isConnected = false
var json:JSON = JSON()
var json1:JSON = JSON()
var jsonAction:JSON = JSON()
var captureDevice: AVCaptureDevice?
var hr_raw1 = [Int]()
var time_raw1 = [Int]()
var bpm_list = [Double]()
var time_list = [Int]()
var frames1 = [Int]()
var start_time:Int = -1
var current_time:Int = 0
var last_time:Int = 0
var init_stamp:Int = -1
var current_stamp:Int = 0
var last_stamp:Int = 0
@IBOutlet var bpmText: UILabel!
@IBOutlet var fpsText: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
var request = URLRequest(url: URL(string: "wss://jznm58xz5i.execute-api.ap-south-1.amazonaws.com/production")!)
request.timeoutInterval = 10
socket = WebSocket(request: request)
socket.delegate = self
socket.connect()
self.captureSession = AVCaptureSession()
self.captureSession?.sessionPreset = .inputPriority
let session = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)
self.rearCamera = session.devices.first
if let rearCamera = self.rearCamera {
// we try to create the input from the found camera
self.rearCameraInput = try? AVCaptureDeviceInput(device: rearCamera)
if let rearCameraInput = rearCameraInput {
// always make sure the AVCaptureSession can accept the selected input
if ((captureSession?.canAddInput(rearCameraInput)) != nil) {
// add the input to the current session
captureSession?.addInput(rearCameraInput)
}
}
}
do{
try rearCamera?.lockForConfiguration()
try? rearCamera?.setTorchModeOn(level: 1.0)
rearCamera?.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 30)
rearCamera?.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 30)
rearCamera?.unlockForConfiguration()
}catch{
}
if let captureSession = captureSession {
// create the preview layer with the configuration you want
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.connection?.videoOrientation = .portrait
// then add the layer to your current view
view.layer.insertSublayer(self.videoPreviewLayer!, at: 0)
self.videoPreviewLayer?.frame = view.frame
}
self.videoPreviewOutput = AVCaptureVideoDataOutput()
self.videoPreviewOutput!.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer"))
// always make sure the AVCaptureSession can accept the selected output
if ((captureSession?.canAddOutput(self.videoPreviewOutput!)) != nil) {
// add the output to the current session
captureSession?.addOutput(self.videoPreviewOutput!)
}
self.captureSession?.commitConfiguration()
self.captureSession?.startRunning()
}
会话开始后手电筒也立即关闭,我不明白为什么会这样。
使用此检查它是否支持所需的 FPS:
extension AVCaptureDevice {
func configureFPS(_ inputFrameRate: Int) {
var isFPSSupported = false
do {
let supportedFrameRange = activeFormat.videoSupportedFrameRateRanges
for range in supportedFrameRange {
if (range.maxFrameRate >= Double(inputFrameRate) && range.minFrameRate <= Double(inputFrameRate)) {
isFPSSupported = true
break
}
}
if isFPSSupported {
try lockForConfiguration()
activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: Int32(inputFrameRate))
activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: Int32(inputFrameRate))
unlockForConfiguration()
}
} catch {
print("lockForConfiguration error: \(error.localizedDescription)")
}
}
}
我有一个代码可以从应用程序中捕获预览图像以进行图像处理。为此,我需要控制预览的帧速率,但是设置帧速率的代码对预览流没有影响。这是代码
import UIKit
import AVFoundation
import Starscream
import SwiftyJSON
class ViewController: UIViewController, WebSocketDelegate {
var captureSession: AVCaptureSession?
var rearCamera: AVCaptureDevice?
var rearCameraInput: AVCaptureDeviceInput?
var videoPreviewOutput: AVCaptureVideoDataOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var socket: WebSocket!
let server = WebSocketServer()
var isConnected = false
var json:JSON = JSON()
var json1:JSON = JSON()
var jsonAction:JSON = JSON()
var captureDevice: AVCaptureDevice?
var hr_raw1 = [Int]()
var time_raw1 = [Int]()
var bpm_list = [Double]()
var time_list = [Int]()
var frames1 = [Int]()
var start_time:Int = -1
var current_time:Int = 0
var last_time:Int = 0
var init_stamp:Int = -1
var current_stamp:Int = 0
var last_stamp:Int = 0
@IBOutlet var bpmText: UILabel!
@IBOutlet var fpsText: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
var request = URLRequest(url: URL(string: "wss://jznm58xz5i.execute-api.ap-south-1.amazonaws.com/production")!)
request.timeoutInterval = 10
socket = WebSocket(request: request)
socket.delegate = self
socket.connect()
self.captureSession = AVCaptureSession()
self.captureSession?.sessionPreset = .inputPriority
let session = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)
self.rearCamera = session.devices.first
if let rearCamera = self.rearCamera {
// we try to create the input from the found camera
self.rearCameraInput = try? AVCaptureDeviceInput(device: rearCamera)
if let rearCameraInput = rearCameraInput {
// always make sure the AVCaptureSession can accept the selected input
if ((captureSession?.canAddInput(rearCameraInput)) != nil) {
// add the input to the current session
captureSession?.addInput(rearCameraInput)
}
}
}
do{
try rearCamera?.lockForConfiguration()
try? rearCamera?.setTorchModeOn(level: 1.0)
rearCamera?.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 30)
rearCamera?.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 30)
rearCamera?.unlockForConfiguration()
}catch{
}
if let captureSession = captureSession {
// create the preview layer with the configuration you want
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.connection?.videoOrientation = .portrait
// then add the layer to your current view
view.layer.insertSublayer(self.videoPreviewLayer!, at: 0)
self.videoPreviewLayer?.frame = view.frame
}
self.videoPreviewOutput = AVCaptureVideoDataOutput()
self.videoPreviewOutput!.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer"))
// always make sure the AVCaptureSession can accept the selected output
if ((captureSession?.canAddOutput(self.videoPreviewOutput!)) != nil) {
// add the output to the current session
captureSession?.addOutput(self.videoPreviewOutput!)
}
self.captureSession?.commitConfiguration()
self.captureSession?.startRunning()
}
会话开始后手电筒也立即关闭,我不明白为什么会这样。
使用此检查它是否支持所需的 FPS:
extension AVCaptureDevice {
func configureFPS(_ inputFrameRate: Int) {
var isFPSSupported = false
do {
let supportedFrameRange = activeFormat.videoSupportedFrameRateRanges
for range in supportedFrameRange {
if (range.maxFrameRate >= Double(inputFrameRate) && range.minFrameRate <= Double(inputFrameRate)) {
isFPSSupported = true
break
}
}
if isFPSSupported {
try lockForConfiguration()
activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: Int32(inputFrameRate))
activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: Int32(inputFrameRate))
unlockForConfiguration()
}
} catch {
print("lockForConfiguration error: \(error.localizedDescription)")
}
}
}