使用 AVFoundation 在 Swift 3.0 中扫描条形码或二维码
Scanning Barcode or QR code in Swift 3.0 using AVFoundation
我正在关注此 tutorial 并尝试将代码从 Swift 2.0 转换为 3.0。但是当我启动该应用程序时,该应用程序无法运行!我的意思是,什么都没有发生!这是我的代码:
ViewController:
class ViewController: UIViewController ,BarcodeDelegate {
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let barcodeViewController: BarcodeViewController = segue.destination as! BarcodeViewController
barcodeViewController.delegate = self
}
func barcodeReaded(barcode: String) {
codeTextView.text = barcode
print(barcode)
}
}
条码VC:
import AVFoundation
protocol BarcodeDelegate {
func barcodeReaded(barcode: String)
}
class BarcodeViewController: UIViewController,AVCaptureMetadataOutputObjectsDelegate {
var delegate: BarcodeDelegate?
var captureSession: AVCaptureSession!
var code: String?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
print("works")
self.captureSession = AVCaptureSession();
let videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
let videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
if self.captureSession.canAddInput(videoInput) {
self.captureSession.addInput(videoInput)
} else {
print("Could not add video input")
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code]
} else {
print("Could not add metadata output")
}
let previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.frame = self.view.layer.bounds
self.view.layer .addSublayer(previewLayer!)
self.captureSession.startRunning()
} catch let error as NSError {
print("Error while creating vide input device: \(error.localizedDescription)")
}
}
//I THINK THIS METHOD NOT CALL !
private func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
// This is the delegate'smethod that is called when a code is readed
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
// If the code is not empty the code is ready and we call out delegate to pass the code.
if code!.isEmpty {
print("is empty")
}else {
self.captureSession.stopRunning()
self.dismiss(animated: true, completion: nil)
self.delegate?.barcodeReaded(barcode: code!)
}
}
}
这是输出:
2016-09-17 18:10:26.000919 BarcodeScaning[2610:674253] [MC] System group container for systemgroup.com.apple.configurationprofiles path is /private/var/containers/Shared/SystemGroup/systemgroup.com.apple.configurationprofiles
2016-09-17 18:10:26.007782 BarcodeScaning[2610:674253] [MC] Reading from public effective user settings.
您需要将 NSCameraUsageDescription 添加到 Info.plist 文件中才能使其正常工作!
只需在 info.plist 中添加一行并在 NSCameraUsageDescription 中输入新创建的行并添加一个字符串 用于通知用户为什么需要在您的应用中访问相机。
这应该可以解决问题![=10=]
第一步需要声明对任何用户私有数据类型的访问权限,这是 iOS10 中的新要求。您可以通过将使用密钥添加到您应用的 Info.plist
中来实现带有目的字符串。
因为如果您使用以下框架之一并且未能声明使用,您的应用程序将在首次访问时崩溃:
Contacts, Calendar, Reminders, Photos, Bluetooth Sharing, Microphone, Camera, Location, Health, HomeKit, Media Library, Motion, CallKit, Speech Recognition, SiriKit, TV Provider.
为避免崩溃,您需要将建议的密钥添加到 Info.plist
:
然后系统在要求用户允许访问时显示目的字符串:
有关它的更多信息,您可以阅读这篇文章:
我对您的 BarcodeViewController
做了一些修改,使其正常工作,如下所示:
BarcodeViewController
import UIKit
import AVFoundation
protocol BarcodeDelegate {
func barcodeReaded(barcode: String)
}
class BarcodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var delegate: BarcodeDelegate?
var videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var output = AVCaptureMetadataOutput()
var previewLayer: AVCaptureVideoPreviewLayer?
var captureSession = AVCaptureSession()
var code: String?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.clear
self.setupCamera()
}
private func setupCamera() {
let input = try? AVCaptureDeviceInput(device: videoCaptureDevice)
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
if let videoPreviewLayer = self.previewLayer {
videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer.frame = self.view.bounds
view.layer.addSublayer(videoPreviewLayer)
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode, AVMetadataObjectTypeEAN13Code]
} else {
print("Could not add metadata output")
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession.isRunning == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession.isRunning == true) {
captureSession.stopRunning();
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// This is the delegate's method that is called when a code is read
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
self.dismiss(animated: true, completion: nil)
self.delegate?.barcodeReaded(barcode: code!)
print(code!)
}
}
}
重要的一点是在 viewWillAppear(:)
和 viewWillDisappear(:)
方法中声明全局变量并启动和停止 captureSession
。在您之前的代码中,我认为它根本没有被调用,因为它从未进入处理条形码的方法。
希望对你有所帮助。
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
print("caught QR code")
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
if code!.isEmpty {
print("is empty")
} else {
self.captureSession.stopRunning()
self.dismiss(animated: true, completion: nil)
self.delegate?.gotQRCode(code: code!)
}
}
}
看起来方法的签名在 Swift 3 中发生了一些变化。这是正确的版本
此处 更新为 Swift 4 没有强制解包、弱协议、在后台线程中执行昂贵的代码和其他改进。
注意 AVCaptureMetadataOutputObjectsDelegate
的方法从
captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
到
metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
import UIKit
import AVFoundation
protocol BarcodeDelegate: class {
func barcodeRead(barcode: String)
}
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
weak var delegate: BarcodeDelegate?
var output = AVCaptureMetadataOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var captureSession = AVCaptureSession()
override func viewDidLoad() {
super.viewDidLoad()
setupCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
DispatchQueue.global(qos: .userInitiated).async {
if !self.captureSession.isRunning {
self.captureSession.startRunning()
}
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
DispatchQueue.global(qos: .userInitiated).async {
if self.captureSession.isRunning {
self.captureSession.stopRunning()
}
}
}
fileprivate func setupCamera() {
guard let device = AVCaptureDevice.default(for: .video),
let input = try? AVCaptureDeviceInput(device: device) else {
return
}
DispatchQueue.global(qos: .userInitiated).async {
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: .global(qos: .userInitiated))
if Set([.qr, .ean13]).isSubset(of: metadataOutput.availableMetadataObjectTypes) {
metadataOutput.metadataObjectTypes = [.qr, .ean13]
}
} else {
print("Could not add metadata output")
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.videoGravity = .resizeAspectFill
DispatchQueue.main.async {
self.previewLayer.frame = self.view.bounds
self.view.layer.addSublayer(self.previewLayer)
}
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// This is the delegate's method that is called when a code is read
for metadata in metadataObjects {
if let readableObject = metadata as? AVMetadataMachineReadableCodeObject,
let code = readableObject.stringValue {
dismiss(animated: true)
delegate?.barcodeRead(barcode: code)
print(code)
}
}
}
}
Swift4 中的条码扫描器适用于所有代码类型
下面根据iOS中的条码扫描,分享一些心得。
- 将条码扫描器逻辑与视图逻辑分开,
- 在 .plist 文件中添加条目
- 设置
exposurePointOfInterest
和focusPointOfInterest
- 使用正确转换的 CGRect
设置 rectOfInterests
- 设置
focusMode
和exposureMode
- 在更改相机捕获设置时使用
lockForConfiguration
正确锁定 captureDevice
在 .plist 文件中添加条目
在 Info.plist 文件中添加以下代码以允许您的应用程序访问 iPhone 的相机:
<key>NSCameraUsageDescription</key>
<string>Allow access to camera</string>
设置 exposurePointOfInterest 和 focusPointOfInterest
exposurePointOfInterest
和 focusPointOfInterest
允许更好的扫描质量,更快地将相机聚焦在屏幕的中心点上。
设置 rectOfInterests
这 属性 让相机只聚焦在屏幕的一部分。这样可以更快地扫描代码,只关注屏幕中央显示的代码 - 这是有用的,而后台几乎没有其他代码可用。
设置对焦模式和曝光模式
属性应设置如下:
device.focusMode = .continuousAutoFocus
device.exposureMode = .continuousAutoExposure
这允许连续对焦并设置曝光以扫描代码。
演示
在这里你可以找到实现这个想法的现成项目:
https://github.com/lukszar/QuickScanner
我正在关注此 tutorial 并尝试将代码从 Swift 2.0 转换为 3.0。但是当我启动该应用程序时,该应用程序无法运行!我的意思是,什么都没有发生!这是我的代码:
ViewController:
class ViewController: UIViewController ,BarcodeDelegate {
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let barcodeViewController: BarcodeViewController = segue.destination as! BarcodeViewController
barcodeViewController.delegate = self
}
func barcodeReaded(barcode: String) {
codeTextView.text = barcode
print(barcode)
}
}
条码VC:
import AVFoundation
protocol BarcodeDelegate {
func barcodeReaded(barcode: String)
}
class BarcodeViewController: UIViewController,AVCaptureMetadataOutputObjectsDelegate {
var delegate: BarcodeDelegate?
var captureSession: AVCaptureSession!
var code: String?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
print("works")
self.captureSession = AVCaptureSession();
let videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
do {
let videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
if self.captureSession.canAddInput(videoInput) {
self.captureSession.addInput(videoInput)
} else {
print("Could not add video input")
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode,AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypePDF417Code]
} else {
print("Could not add metadata output")
}
let previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer?.frame = self.view.layer.bounds
self.view.layer .addSublayer(previewLayer!)
self.captureSession.startRunning()
} catch let error as NSError {
print("Error while creating vide input device: \(error.localizedDescription)")
}
}
//I THINK THIS METHOD NOT CALL !
private func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
// This is the delegate'smethod that is called when a code is readed
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
// If the code is not empty the code is ready and we call out delegate to pass the code.
if code!.isEmpty {
print("is empty")
}else {
self.captureSession.stopRunning()
self.dismiss(animated: true, completion: nil)
self.delegate?.barcodeReaded(barcode: code!)
}
}
}
这是输出:
2016-09-17 18:10:26.000919 BarcodeScaning[2610:674253] [MC] System group container for systemgroup.com.apple.configurationprofiles path is /private/var/containers/Shared/SystemGroup/systemgroup.com.apple.configurationprofiles 2016-09-17 18:10:26.007782 BarcodeScaning[2610:674253] [MC] Reading from public effective user settings.
您需要将 NSCameraUsageDescription 添加到 Info.plist 文件中才能使其正常工作!
只需在 info.plist 中添加一行并在 NSCameraUsageDescription 中输入新创建的行并添加一个字符串 用于通知用户为什么需要在您的应用中访问相机。
这应该可以解决问题![=10=]
第一步需要声明对任何用户私有数据类型的访问权限,这是 iOS10 中的新要求。您可以通过将使用密钥添加到您应用的 Info.plist
中来实现带有目的字符串。
因为如果您使用以下框架之一并且未能声明使用,您的应用程序将在首次访问时崩溃:
Contacts, Calendar, Reminders, Photos, Bluetooth Sharing, Microphone, Camera, Location, Health, HomeKit, Media Library, Motion, CallKit, Speech Recognition, SiriKit, TV Provider.
为避免崩溃,您需要将建议的密钥添加到 Info.plist
:
然后系统在要求用户允许访问时显示目的字符串:
有关它的更多信息,您可以阅读这篇文章:
我对您的 BarcodeViewController
做了一些修改,使其正常工作,如下所示:
BarcodeViewController
import UIKit
import AVFoundation
protocol BarcodeDelegate {
func barcodeReaded(barcode: String)
}
class BarcodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var delegate: BarcodeDelegate?
var videoCaptureDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var output = AVCaptureMetadataOutput()
var previewLayer: AVCaptureVideoPreviewLayer?
var captureSession = AVCaptureSession()
var code: String?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.clear
self.setupCamera()
}
private func setupCamera() {
let input = try? AVCaptureDeviceInput(device: videoCaptureDevice)
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
if let videoPreviewLayer = self.previewLayer {
videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer.frame = self.view.bounds
view.layer.addSublayer(videoPreviewLayer)
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode, AVMetadataObjectTypeEAN13Code]
} else {
print("Could not add metadata output")
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession.isRunning == false) {
captureSession.startRunning();
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession.isRunning == true) {
captureSession.stopRunning();
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// This is the delegate's method that is called when a code is read
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
self.dismiss(animated: true, completion: nil)
self.delegate?.barcodeReaded(barcode: code!)
print(code!)
}
}
}
重要的一点是在 viewWillAppear(:)
和 viewWillDisappear(:)
方法中声明全局变量并启动和停止 captureSession
。在您之前的代码中,我认为它根本没有被调用,因为它从未进入处理条形码的方法。
希望对你有所帮助。
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
print("caught QR code")
for metadata in metadataObjects {
let readableObject = metadata as! AVMetadataMachineReadableCodeObject
let code = readableObject.stringValue
if code!.isEmpty {
print("is empty")
} else {
self.captureSession.stopRunning()
self.dismiss(animated: true, completion: nil)
self.delegate?.gotQRCode(code: code!)
}
}
}
看起来方法的签名在 Swift 3 中发生了一些变化。这是正确的版本
此处
注意 AVCaptureMetadataOutputObjectsDelegate
的方法从
captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
到
metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
import UIKit
import AVFoundation
protocol BarcodeDelegate: class {
func barcodeRead(barcode: String)
}
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
weak var delegate: BarcodeDelegate?
var output = AVCaptureMetadataOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var captureSession = AVCaptureSession()
override func viewDidLoad() {
super.viewDidLoad()
setupCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
DispatchQueue.global(qos: .userInitiated).async {
if !self.captureSession.isRunning {
self.captureSession.startRunning()
}
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
DispatchQueue.global(qos: .userInitiated).async {
if self.captureSession.isRunning {
self.captureSession.stopRunning()
}
}
}
fileprivate func setupCamera() {
guard let device = AVCaptureDevice.default(for: .video),
let input = try? AVCaptureDeviceInput(device: device) else {
return
}
DispatchQueue.global(qos: .userInitiated).async {
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
let metadataOutput = AVCaptureMetadataOutput()
if self.captureSession.canAddOutput(metadataOutput) {
self.captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: .global(qos: .userInitiated))
if Set([.qr, .ean13]).isSubset(of: metadataOutput.availableMetadataObjectTypes) {
metadataOutput.metadataObjectTypes = [.qr, .ean13]
}
} else {
print("Could not add metadata output")
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer.videoGravity = .resizeAspectFill
DispatchQueue.main.async {
self.previewLayer.frame = self.view.bounds
self.view.layer.addSublayer(self.previewLayer)
}
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
// This is the delegate's method that is called when a code is read
for metadata in metadataObjects {
if let readableObject = metadata as? AVMetadataMachineReadableCodeObject,
let code = readableObject.stringValue {
dismiss(animated: true)
delegate?.barcodeRead(barcode: code)
print(code)
}
}
}
}
Swift4 中的条码扫描器适用于所有代码类型
下面根据iOS中的条码扫描,分享一些心得。
- 将条码扫描器逻辑与视图逻辑分开,
- 在 .plist 文件中添加条目
- 设置
exposurePointOfInterest
和focusPointOfInterest
- 使用正确转换的 CGRect 设置
- 设置
focusMode
和exposureMode
- 在更改相机捕获设置时使用
lockForConfiguration
正确锁定 captureDevice
rectOfInterests
在 .plist 文件中添加条目
在 Info.plist 文件中添加以下代码以允许您的应用程序访问 iPhone 的相机:
<key>NSCameraUsageDescription</key>
<string>Allow access to camera</string>
设置 exposurePointOfInterest 和 focusPointOfInterest
exposurePointOfInterest
和 focusPointOfInterest
允许更好的扫描质量,更快地将相机聚焦在屏幕的中心点上。
设置 rectOfInterests
这 属性 让相机只聚焦在屏幕的一部分。这样可以更快地扫描代码,只关注屏幕中央显示的代码 - 这是有用的,而后台几乎没有其他代码可用。
设置对焦模式和曝光模式 属性应设置如下:
device.focusMode = .continuousAutoFocus
device.exposureMode = .continuousAutoExposure
这允许连续对焦并设置曝光以扫描代码。
演示
在这里你可以找到实现这个想法的现成项目: https://github.com/lukszar/QuickScanner