使用视觉和机器学习 API Iphone 6 和 IOS 11 中的 IPad 仅应用程序崩溃

App Only Crashes for Iphone 6 and IPad in IOS 11 using Vision & Machine Learning API

我制作了一个实时翻译应用程序,它可以识别一个对象并使用用户的相机对其进行翻译。它在我的 iPhone 6s 上工作得很好并且在任何模拟器中都不会崩溃,但是当我 运行 它在 iPhone 6 上时,它会在我尝试继续运行时崩溃相机饲料。 Apple 还表示它也会在 iPad 上崩溃。

是否某些设备不支持 Vision API 或我的代码有问题?

import UIKit
import AVKit
import Vision

var lang = ""
var lang2 = ""


class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate {



      @IBAction func screenshotB(_ sender: Any) {
       //screenshot camera screen view

    }
    @IBOutlet weak var screenshotBOutlet: UIButton!
    @IBOutlet weak var swirlyGuy: UIActivityIndicatorView!
    @IBOutlet weak var title1: UILabel!
    @IBOutlet weak var settingsButtonOutlet: UIButton!
    @IBOutlet weak var launchScreen: UIViewX!
    @IBOutlet weak var launchScreenLogo: UIImageView!

    func stopSwirlyGuy(){
        swirlyGuy.stopAnimating()
    }
    let identifierLabel: UILabel = {
        let label = UILabel()
        label.backgroundColor = UIColor(red: 0, green: 0, blue:0, alpha: 0.4)
        label.textColor = .white
        label.textAlignment = .center
        label.translatesAutoresizingMaskIntoConstraints = false
        return label
    }()

    @IBAction func prepareForUnwind (segue:UIStoryboardSegue) {

    }

    override func viewDidLoad() {
        super.viewDidLoad()
        launchScreen.alpha = 1
        launchScreenLogo.alpha = 1
        swirlyGuy.startAnimating()


        // start up the camera

        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = .hd4K3840x2160

        guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
        captureSession.addInput(input)

        captureSession.startRunning()

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame

        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
        captureSession.addOutput(dataOutput)



        setupIdentifierConfidenceLabel()
        setupSettingsButton()
        setupTitle()
        setupSwirlyGuy()
        setupScreenshot()
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        UIView.animate(withDuration: 1.5) {
            self.launchScreen.alpha = 0
            self.launchScreenLogo.alpha = 0
        }

    }
    fileprivate func setupSettingsButton() {

        view.addSubview(settingsButtonOutlet)
    }

    fileprivate func setupScreenshot() {

        view.addSubview(screenshotBOutlet)
    }

    fileprivate func setupSwirlyGuy() {

        view.addSubview(swirlyGuy)
    }

    fileprivate func setupTitle() {

        view.addSubview(title1)
    }

    fileprivate func setupIdentifierConfidenceLabel() {
        view.addSubview(identifierLabel)
        identifierLabel.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
        identifierLabel.leftAnchor.constraint(equalTo: view.leftAnchor).isActive = true
        identifierLabel.rightAnchor.constraint(equalTo: view.rightAnchor).isActive = true
        identifierLabel.heightAnchor.constraint(equalToConstant: 100).isActive = true
        identifierLabel.numberOfLines = 0
    }





    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        //        print("Camera was able to capture a frame:", Date())

        guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

      // model
        guard let model = try? VNCoreMLModel(for: Resnet50().model) else { return }
        let request = VNCoreMLRequest(model: model) { (finishedReq, err) in

            //perhaps check the err

            //            print(finishedReq.results)

            guard let results = finishedReq.results as? [VNClassificationObservation] else { return }

            guard let firstObservation = results.first else { return }

            print(firstObservation.identifier, firstObservation.confidence)

            let x = (firstObservation.confidence)

            let y = (x * 10000).rounded() / 10000




            let z = (firstObservation.identifier)

            let s = (self.translateSpanish(object1: firstObservation.identifier))

            let f = (self.translateFrench(object1: firstObservation.identifier))
       //      var lang = ""
       //      var lang2 = ""

             if language == "English" {
             lang = z
             }
             else if language == "Spanish" {
             lang = s
             }
             else {
             lang = f

             }

             if language2 == "Spanish" {
             lang2 = s
             }
             else if language2 == "English" {
             lang2 = z
             }
             else {
             lang2 = f
             }

             DispatchQueue.main.async {



             self.identifierLabel.text = "\(lang)" + " = " + "\(lang2) \n \(y * 100)% accuracy"
             self.stopSwirlyGuy()
             }



        }




        try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
    }

//Translation fucntions omitted for brevity

这是进入主屏幕的视图控制器的代码,在主屏幕中进行相机馈送和视觉处理。

导入 UIKit

class FirstLaunchViewController: UIViewController {

@IBOutlet weak var title1: UILabelX!
@IBOutlet weak var logo1: UIImageView!
@IBOutlet weak var description1: UILabel!
@IBOutlet weak var buttonOutlet: UIButtonX!
@IBOutlet weak var initialBackground: UIViewX!
@IBOutlet weak var initialLogo: UIImageView!

@IBAction func toVC(_ sender: Any) {
        UserDefaults.standard.set(false, forKey: "name")
        performSegue(withIdentifier: "toMain", sender: self)
}



override func viewDidLoad() {
    super.viewDidLoad()
    initialLogo.alpha = 1
    initialBackground.alpha = 1
    title1.alpha = 0
    logo1.alpha = 0
    description1.alpha = 0
    buttonOutlet.alpha = 0


    // Do any additional setup after loading the view.
}

override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)


    UIView.animate(withDuration: 1.5, animations: {
        self.initialLogo.alpha = 0
        self.initialBackground.alpha = 0
    }) { (true) in
        self.initialBackgroundGone()
    }
}
       func initialBackgroundGone() {
    UIView.animate(withDuration: 1.5, animations: {
        self.title1.alpha = 1
    }) { (true) in
        self.showBackgroundAgain()
    }

}

func showBackgroundAgain() {
    UIView.animate(withDuration: 1.3, animations: {
        self.logo1.alpha = 1
    }) { (true) in
        self.showTitle()
    }
}

func showTitle() {
    UIView.animate(withDuration: 1.5, animations: {
        self.description1.alpha = 1
    }) { (true) in
        self.showEverythingElse()
    }
}

func showEverythingElse() {
    UIView.animate(withDuration: 3.5) {
        self.buttonOutlet.alpha = 1
    }
}

}

代码很多,但我认为您的问题出在您使用的视频预设上,因为 iPhone6 不支持 4K 视频录制。

设置会话预设时,您应该测试所有目标设备是否支持它:

if captureSession.canSetSessionPreset(.hd4K3840x2160) {
    captureSession.sessionPreset = .hd4K3840x2160
} else {
    captureSession.sessionPreset = .high // or any other preset that suits your needs
}