AVCaptureVideoPreviewLayer 的比例
Scale of AVCaptureVideoPreviewLayer
我目前正在 Swift 应用程序中处理 QR 扫描视图。
我想将 VideoPreview 居中放置在我的视图中间。
视图如下所示:
视图(白色)称为 ScanView
,我想使图像预览与 ScanView
大小相同并将其居中。
代码片段:
感谢您的帮助!
这是一个可行的解决方案:
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var innerView: UIView!
var session: AVCaptureSession?
var input: AVCaptureDeviceInput?
var previewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer?.frame.size = self.innerView.frame.size
}
private func createSession() {
do {
self.session = AVCaptureSession()
if let device = AVCaptureDevice.default(for: AVMediaType.video) {
self.input = try AVCaptureDeviceInput(device: device)
self.session?.addInput(self.input!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.previewLayer?.frame.size = self.innerView.frame.size
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.innerView.layer.addSublayer(self.previewLayer!)
//______ 1. solution with Video camera ______//
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
self.session?.startRunning()
//______ 2. solution with QR code ______//
let videoOutput = AVCaptureMetadataOutput()
videoOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
// explanation here:
videoOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
self.session?.startRunning()
}
} catch _ {
}
}
//MARK: AVCaptureVideoDataOutputSampleBufferDelegate
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
// awesome stuff here
}
}
//MARK: AVCaptureMetadataOutputObjectsDelegate
func setMetadataObjectsDelegate(_ objectsDelegate: AVCaptureMetadataOutputObjectsDelegate?, queue objectsCallbackQueue: DispatchQueue?) {
}
}
要求:
- 设置:
Privacy - Camera Usage Description
innerView
必须初始化,我用 Storyboard 做的有以下限制:
这里是结果:
我和Philip Dz有同样的问题。最后通过将 setupVideo()
函数从 viewDidLoad
移动到 viewDidAppear
:
来解决问题
- 在 viewDidLoad 中调用 setupVideo()
- 在 viewDidAppear 中调用 setupVideo():
也许我有点晚了,但我刚刚实现了 QRScanner,并且根据设备 运行 可以缩放视频流。这是通过 AVCaptureDevice.videoZoomFactor
属性 实现的。所以,为了增强小方QRScanner的用户体验,可以稍微修改上面的代码,在self.session?.startRunning()
之前插入下面一行device.zoomFactor = min(YOUR_ZOOM_FACTOR_VALUE, device.activeFormat.videoMaxZoomFactor)
我目前正在 Swift 应用程序中处理 QR 扫描视图。 我想将 VideoPreview 居中放置在我的视图中间。
视图如下所示:
视图(白色)称为 ScanView
,我想使图像预览与 ScanView
大小相同并将其居中。
代码片段:
感谢您的帮助!
这是一个可行的解决方案:
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var innerView: UIView!
var session: AVCaptureSession?
var input: AVCaptureDeviceInput?
var previewLayer: AVCaptureVideoPreviewLayer?
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer?.frame.size = self.innerView.frame.size
}
private func createSession() {
do {
self.session = AVCaptureSession()
if let device = AVCaptureDevice.default(for: AVMediaType.video) {
self.input = try AVCaptureDeviceInput(device: device)
self.session?.addInput(self.input!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.previewLayer?.frame.size = self.innerView.frame.size
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.innerView.layer.addSublayer(self.previewLayer!)
//______ 1. solution with Video camera ______//
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
self.session?.startRunning()
//______ 2. solution with QR code ______//
let videoOutput = AVCaptureMetadataOutput()
videoOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
self.session?.canAddOutput(videoOutput)
self.session?.addOutput(videoOutput)
// explanation here:
videoOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
self.session?.startRunning()
}
} catch _ {
}
}
//MARK: AVCaptureVideoDataOutputSampleBufferDelegate
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
// awesome stuff here
}
}
//MARK: AVCaptureMetadataOutputObjectsDelegate
func setMetadataObjectsDelegate(_ objectsDelegate: AVCaptureMetadataOutputObjectsDelegate?, queue objectsCallbackQueue: DispatchQueue?) {
}
}
要求:
- 设置:
Privacy - Camera Usage Description
innerView
必须初始化,我用 Storyboard 做的有以下限制:
这里是结果:
我和Philip Dz有同样的问题。最后通过将 setupVideo()
函数从 viewDidLoad
移动到 viewDidAppear
:
- 在 viewDidLoad 中调用 setupVideo()
- 在 viewDidAppear 中调用 setupVideo():
也许我有点晚了,但我刚刚实现了 QRScanner,并且根据设备 运行 可以缩放视频流。这是通过 AVCaptureDevice.videoZoomFactor
属性 实现的。所以,为了增强小方QRScanner的用户体验,可以稍微修改上面的代码,在self.session?.startRunning()
device.zoomFactor = min(YOUR_ZOOM_FACTOR_VALUE, device.activeFormat.videoMaxZoomFactor)