Xcode 9/Swift 4 AVCaptureMetadataOutput setMetadataObjectTypes 使用 availableMetadataObjectTypes
Xcode 9/Swift 4 AVCaptureMetadataOutput setMetadataObjectTypes use availableMetadataObjectTypes
好像有很多类似我遇到的问题:
AVmetadata changes with swift 4 xcode 9
AVCaptureMetadataOutput setMetadataObjectTypes unsupported type found
还有一个处理 AVFoundation 的 Apple 错误:
https://forums.developer.apple.com/thread/86810#259270
但其中 none 似乎确实是我的答案。
我的代码 运行 在 swift 3 中很棒,但只会在 swift 4 中出错。使用上述链接中的解决方案根本没有任何变化。
代码:
import UIKit
import AVFoundation
class BarCodeScanViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
weak var delegate: FlowControllerDelegate?
var captureSession: AVCaptureSession = AVCaptureSession()
var previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.canAddInput(videoInput)
} else {
failed()
return
}
// let captureMetadataOutput = AVCaptureMetadataOutput()
let metadataOutput = AVCaptureMetadataOutput()
if captureSession.canAddOutput(metadataOutput) {
captureSession.addOutput(metadataOutput)
// Check status of camera permissions
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
// metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.upce]
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
// captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if(captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
if captureSession.isRunning == true {
captureSession.stopRunning()
}
super.viewWillDisappear(animated)
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metatdataObject = metadataObjects.first {
guard let readableObject = metatdataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
当我在 Xcode 8 和 swift 3 中构建此代码时,它工作正常。当我 运行 它在 Xcode 9 swift 4 它在添加媒体类型时崩溃:
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
在这两种情况下,我正在构建一个 iOS 11 设备,该设备之前没有测试版。
我试过“__”,看看是不是上面提到的Apple bug。
如果我注释掉代码 运行s 但没有捕获。
Apple 是否引入了其他一些错误?还有其他人遇到这个问题吗?
如有任何帮助,我们将不胜感激。
谢谢
清晰度的更多信息:
Leevi Graham 是正确的,Apple 确实在没有适当文档的情况下更改了堆栈。这导致它看起来像有一个错误。
澄清对我有帮助的评论:
委托回调已更改自:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
到
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
但是,我遇到的真正问题是您不再设置一长串类型来获取 metadataObjectTypes
。您现在只需为所有可用类型设置:
metadataOutput.metadataObjectTypes =
metadataOutput.availableMetadataObjectTypes
所以...
这实际上是一个 API 问题。为此提出了几个雷达问题。但是 Apple 已经修改了他们的 AVFoundation 文档来解决这个问题。
好像有很多类似我遇到的问题:
AVmetadata changes with swift 4 xcode 9
AVCaptureMetadataOutput setMetadataObjectTypes unsupported type found
还有一个处理 AVFoundation 的 Apple 错误:
https://forums.developer.apple.com/thread/86810#259270
但其中 none 似乎确实是我的答案。
我的代码 运行 在 swift 3 中很棒,但只会在 swift 4 中出错。使用上述链接中的解决方案根本没有任何变化。
代码:
import UIKit
import AVFoundation
class BarCodeScanViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
weak var delegate: FlowControllerDelegate?
var captureSession: AVCaptureSession = AVCaptureSession()
var previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.canAddInput(videoInput)
} else {
failed()
return
}
// let captureMetadataOutput = AVCaptureMetadataOutput()
let metadataOutput = AVCaptureMetadataOutput()
if captureSession.canAddOutput(metadataOutput) {
captureSession.addOutput(metadataOutput)
// Check status of camera permissions
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
// metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.upce]
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
// captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if(captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
if captureSession.isRunning == true {
captureSession.stopRunning()
}
super.viewWillDisappear(animated)
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
captureSession.stopRunning()
if let metatdataObject = metadataObjects.first {
guard let readableObject = metatdataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
当我在 Xcode 8 和 swift 3 中构建此代码时,它工作正常。当我 运行 它在 Xcode 9 swift 4 它在添加媒体类型时崩溃:
metadataOutput.metadataObjectTypes = [.ean8, .ean13, .pdf417, .upce]
在这两种情况下,我正在构建一个 iOS 11 设备,该设备之前没有测试版。
我试过“__”,看看是不是上面提到的Apple bug。 如果我注释掉代码 运行s 但没有捕获。
Apple 是否引入了其他一些错误?还有其他人遇到这个问题吗?
如有任何帮助,我们将不胜感激。
谢谢
清晰度的更多信息:
Leevi Graham 是正确的,Apple 确实在没有适当文档的情况下更改了堆栈。这导致它看起来像有一个错误。
澄清对我有帮助的评论:
委托回调已更改自:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!)
到
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
但是,我遇到的真正问题是您不再设置一长串类型来获取 metadataObjectTypes
。您现在只需为所有可用类型设置:
metadataOutput.metadataObjectTypes =
metadataOutput.availableMetadataObjectTypes
所以...
这实际上是一个 API 问题。为此提出了几个雷达问题。但是 Apple 已经修改了他们的 AVFoundation 文档来解决这个问题。