如何在 Swift 5、XCode 11 中使相机背景比 rectOfInterest 更暗

How to make camera background darker other than rectOfInterest in Swift 5, XCode 11

我想制作一个条形码扫描仪应用程序。由于我是 Swift 和 Xcode 的新手,我设法从其他 Whosebug 文章 ( and this one) 获得帮助,创建了一个页面,我可以在其中扫描 rectOfInterestArea(带角)内的条形码) 而不是全景相机。然而,除了 rectOfInterestArea 之外,我很难让其余区域变得更暗。我的整个区域都变暗了。不知道我做错了什么。这是结果:


我基本上是想实现类似

的目标

下面是我的代码

import AVFoundation
import UIKit

class TestViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {


    var captureSession: AVCaptureSession!
    var previewLayer: AVCaptureVideoPreviewLayer!
    var rectOfInterestArea = UIView()
    var darkView = UIView()

    var scanRect:CGRect = CGRect(x: 0, y: 0, width: 0, height: 0)
    let metadataOutput = AVCaptureMetadataOutput()


    override func viewDidLoad() {
        super.viewDidLoad()


        captureSession = AVCaptureSession()

        guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
        let videoInput: AVCaptureDeviceInput

        do {
            videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
        } catch {
            return
        }

        if (captureSession.canAddInput(videoInput)) {
            captureSession.addInput(videoInput)
        } else {
            failed()
            return
        }

        if (captureSession.canAddOutput(metadataOutput)) {
            captureSession.addOutput(metadataOutput)

            metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            metadataOutput.metadataObjectTypes = [.qr]
        } else {
            failed()
            return
        }

        let size = 300
        let screenWidth = self.view.frame.size.width
        let xPos = (CGFloat(screenWidth) / CGFloat(2)) - (CGFloat(size) / CGFloat(2))
        scanRect = CGRect(x: Int(xPos), y: 150, width: size, height: size)

        rectOfInterestArea.frame = scanRect

        view.addSubview(rectOfInterestArea)

        print(rectOfInterestArea.frame.size.height, " ", rectOfInterestArea.frame.size.width )

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.frame = view.layer.bounds
        previewLayer.videoGravity = .resizeAspectFill

        view.layer.addSublayer(previewLayer)

        print(previewLayer.frame.size.height, " ", previewLayer.frame.size.width )

        view.addSubview(rectOfInterestArea)


        captureSession.startRunning()
        metadataOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: scanRect)


    }


    func failed() {
        let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
        ac.addAction(UIAlertAction(title: "OK", style: .default))
        present(ac, animated: true)
        captureSession = nil
    }



    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)

        self.rectOfInterestArea.layer.addSublayer(self.createFrame())
        if (captureSession?.isRunning == false) {
            captureSession.startRunning()
        }
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        if (captureSession?.isRunning == true) {
            captureSession.stopRunning()
        }
    }

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        captureSession.stopRunning()

        if let metadataObject = metadataObjects.first {
            guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
            guard let stringValue = readableObject.stringValue else { return }

            found(code: stringValue)
        }

        dismiss(animated: true)
    }

    func found(code: String) {
        print(code)
    }

    override var prefersStatusBarHidden: Bool {
        return true
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    func createFrame() -> CAShapeLayer {
        let height: CGFloat = self.rectOfInterestArea.frame.size.height
        let width: CGFloat = self.rectOfInterestArea.frame.size.width
        print(height, " " , width)
        //let h = previewLayer.frame.size.height
        let path = UIBezierPath()
        path.move(to: CGPoint(x: 5, y: 50))
        path.addLine(to: CGPoint(x: 5, y: 5))
        path.addLine(to: CGPoint(x: 50, y: 5))
        path.move(to: CGPoint(x: height - 55, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 55))
        path.move(to: CGPoint(x: 5, y: width - 55))
        path.addLine(to: CGPoint(x: 5, y: width - 5))
        path.addLine(to: CGPoint(x: 55, y: width - 5))
        path.move(to: CGPoint(x: width - 55, y: height - 5))
        path.addLine(to: CGPoint(x: width - 5, y: height - 5))
        path.addLine(to: CGPoint(x: width - 5, y: height - 55))
        let shape = CAShapeLayer()
        shape.path = path.cgPath
        shape.strokeColor = UIColor.white.cgColor
        shape.lineWidth = 5
        shape.fillColor = UIColor.clear.cgColor
        return shape
    }

}

感谢任何帮助!

let path = CGMutablePath()
path.addRect(bounds)
path.addRect(rectOfInterest)

let maskLayer = CAShapeLayer()
maskLayer.path = path
maskLayer.fillColor = UIColor.black.withAlphaComponent(0.6).cgColor
maskLayer.fillRule = .evenOdd

addSublayer(maskLayer)

在添加边缘层之前添加遮罩层。

看看这个 repo 我的实现。