如何从 TrueDepth 相机 iOS 获取相机校准数据?

how to get camera calibration data from the TrueDepth Camera iOS?

目标:从计算机原深感相机获取深度数据 & 校准数据视觉任务。

我很困惑,因为例如苹果说,

To use depth data for computer vision tasks, use the data in the cameraCalibrationData property to rectify the depth data.

我尝试得到 nil,然后在查看堆栈溢出时我读到,

cameraCalibrationData is always nil in photo, you have to get it from photo.depthData. As long as you're requesting depth data, you'll get the calibration data.

所以当我尝试 print(photo.depthData) 获取深度和校准数据时,我的输出是:

Optional(hdis 640x480 (high/abs) 
calibration:
{intrinsicMatrix: [2735.35 0.00 2017.75 | 0.00 2735.35 1518.51 | 0.00 0.00 1.00], 
extrinsicMatrix: [1.00 0.00 0.00 0.00 | 0.00 1.00 0.00 0.00 | 0.00 0.00 1.00 0.00] pixelSize:0.001 mm, 
distortionCenter:{2017.75,1518.51}, 
ref:{4032x3024}})

^ 但是深度数据呢??`

下面是我的全部代码:

注意:我是 Xcode 的新手,我习惯在 python 中编码计算机视觉任务,所以我提前为混乱的代码道歉。

import AVFoundation
import UIKit
import Photos

class ViewController: UIViewController {

    var session: AVCaptureSession?
    let output = AVCapturePhotoOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()
    
    // MARK: - Permission check
    private func checkCameraPermissions() {
        switch AVCaptureDevice.authorizationStatus(for: .video) {
        case .notDetermined:
            AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in
                guard granted else { return }
                DispatchQueue.main.async { self?.setUpCamera() }
            }
        case .restricted:
            break
        case .denied:
            break
        case .authorized:
            setUpCamera()
        @unknown default:
            break
        }
    }
    
    
    // MARK: - camera SETUP
    private func setUpCamera() {
        let session = AVCaptureSession()
        if let captureDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: AVMediaType.depthData, position: .unspecified) {
            do {
                let input = try AVCaptureDeviceInput(device: captureDevice)
       
                if session.canAddInput(input) {
                    session.beginConfiguration()
                    session.sessionPreset = .photo
                    session.addInput(input)
                    session.commitConfiguration()
                }
                if session.canAddOutput(output) {
                    session.beginConfiguration()
                    session.addOutput(output)
                    session.commitConfiguration()
                }
                output.isDepthDataDeliveryEnabled = true

                previewLayer.videoGravity = .resizeAspectFill
                previewLayer.session = session
                
                session.startRunning()
                self.session = session
            }
            catch {
                print(error)
            }
        }
    }
    
    
    //MARK: - UI Button
    private let shutterButton: UIButton = {
        let button = UIButton(frame: CGRect(x: 0, y: 0, width: 100, height: 100))
        button.layer.cornerRadius = 50
        button.layer.borderWidth = 10
        button.layer.borderColor = UIColor.white.cgColor
        return button
    }()
    
    //MARK: - Video Preview Setup
    override func viewDidLoad() {
        super.viewDidLoad()
        view.backgroundColor = .black
        view.layer.insertSublayer(previewLayer, at: 0)
        view.addSubview(shutterButton)
        checkCameraPermissions()
        shutterButton.addTarget(self, action: #selector(didTapTakePhoto), for: .touchUpInside)
    }
    
    //MARK: - Video Preview Setup
    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        previewLayer.frame = view.bounds
        shutterButton.center = CGPoint(x: view.frame.size.width/2, y: view.frame.size.height - 100)
    }
    
    //MARK: - Running and Stopping the Session
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        session!.startRunning()
    }
    
    //MARK: - Running and Stopping the Session
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        session!.stopRunning()
    }
    
    //MARK: - taking a photo
    @objc private func didTapTakePhoto() {
        let photoSettings = AVCapturePhotoSettings()
        photoSettings.isDepthDataDeliveryEnabled = true
        photoSettings.isDepthDataFiltered = true
        output.capturePhoto(with: photoSettings, delegate: self)
    }
}
extension ViewController: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let data = photo.fileDataRepresentation() else {
            return
        }
        print(data)
        print(photo.depthData)
    
        let image = UIImage(data: data)
        session?.stopRunning()
    
        // ADDING the IMAGE onto the UI
        let imageView = UIImageView(image: image)
        imageView.contentMode = .scaleAspectFill
        imageView.frame = view.bounds
        view.addSubview(imageView)
        session?.stopRunning()
        
        
        // saving photo to library
        PHPhotoLibrary.requestAuthorization { status in
            guard status == .authorized else { return }
            
            PHPhotoLibrary.shared().performChanges({
                let creationRequest = PHAssetCreationRequest.forAsset()
                creationRequest.addResource(with: .photo, data: photo.fileDataRepresentation()!, options: nil)
            }, completionHandler: { _, error in
                if error != nil {
                    print("error")
                }
            })
        }
    }
}

Vision 需要的是 CVPixelBuffer(以及其他选项),您可以从 photo.depthData.depthDataMap

let depthData = photo.depthData
let depthBuffer = depthData.depthDataMap //CVPixelBuffer (orientation needs to be handled separately)

if depthData.depthDataQuality == .low {
    print("Low depth quality...")
}
    
if depthData.depthDataAccuracy == .relative {
    print("Depth data not accurate (relative)")
}
    

要从 CVPixelBuffer 获取 UIImage - 请参阅 this answer