ARKit 图像识别黑白

ARKit image recognized black and white

我在识别图像时遇到问题。正如您从我附上的视频中看到的那样,将所需的图片框起来并给我黑白图像。

这是我正在使用的代码,我的目标是识别特定绘画的图像并在 AR 中以颜色查看它。

PS:识别图片以PNG格式上传到颜色资源

Watch this video.

import UIKit
import SceneKit
import ARKit

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
    // MARK: - IBOutlets

    @IBOutlet weak var sessionInfoLabel: UILabel!
    @IBOutlet weak var sceneView: ARSCNView!

    // MARK: - View Life Cycle

    /// - Tag: StartARSession
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        guard ARWorldTrackingConfiguration.isSupported else {
            fatalError("""
                ARKit is not available on this device. For apps that require ARKit
                for core functionality, use the `arkit` key in the key in the
                `UIRequiredDeviceCapabilities` section of the Info.plist to prevent
                the app from installing. (If the app can't be installed, this error
                can't be triggered in a production scenario.)
                In apps where AR is an additive feature, use `isSupported` to
                determine whether to show UI for launching AR experiences.
            """) // For details, see https://developer.apple.com/documentation/arkit
        }

        // Start the view's AR session with a configuration that uses the rear camera,
        // device position and orientation tracking, and plane detection.
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = [.horizontal, .vertical]
        sceneView.session.run(configuration)

        let scanningPanel = UIImageView()
        scanningPanel.backgroundColor = UIColor(white: 0.33, alpha: 0.6)
        scanningPanel.layer.masksToBounds = true
        scanningPanel.frame = CGRect(x: -2, y: self.sceneView.frame.height-270, width: 178, height: 50)
        scanningPanel.layer.cornerRadius = 10

        let scanInfo = UILabel(frame: CGRect(x: 8, y: self.sceneView.frame.height-268, width: 160, height: 45))
        scanInfo.textAlignment = .left
        scanInfo.font = scanInfo.font.withSize(15)
        scanInfo.textColor = UIColor.white
        scanInfo.text = "SCAN A SURFACE"

        self.sceneView.addSubview(scanningPanel)
        self.sceneView.addSubview(scanInfo)


        // Set a delegate to track the number of plane anchors for providing UI feedback.
        sceneView.session.delegate = self

        // Prevent the screen from being dimmed after a while as users will likely
        // have long periods of interaction without touching the screen or buttons.
        UIApplication.shared.isIdleTimerDisabled = true

        // Show debug UI to view performance metrics (e.g. frames per second).
        sceneView.showsStatistics = true

        resetTracking()
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        // Pause the view's AR session.
        sceneView.session.pause()
    }

    // MARK: - ARSCNViewDelegate

    var session: ARSession {
        return sceneView.session
    }

    enum MessageType {
        case trackingStateEscalation
        case contentPlacement

        static var all: [MessageType] = [
            .trackingStateEscalation,
            .contentPlacement
        ]
    }

    func cancelScheduledMessage(for messageType: MessageType) {
        timers[messageType]?.invalidate()
        timers[messageType] = nil
    }

    @IBOutlet weak private var messagePanel: UIVisualEffectView!

    private var timers: [MessageType: Timer] = [:]
    private var messageHideTimer: Timer?

    private let displayDuration: TimeInterval = 6

    @IBOutlet weak private var messageLabel: UILabel!

    private func setMessageHidden(_ hide: Bool, animated: Bool) {
        // The panel starts out hidden, so show it before animating opacity.
        messagePanel.isHidden = false

        guard animated else {
            messagePanel.alpha = hide ? 0 : 1
            return
        }

        UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
            self.messagePanel.alpha = hide ? 0 : 1
        }, completion: nil)
    }

    func showMessage(_ text: String, autoHide: Bool = true) {
        // Cancel any previous hide timer.
        messageHideTimer?.invalidate()

        messageLabel.text = text

        // Make sure status is showing.
        setMessageHidden(false, animated: true)

        if autoHide {
            messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
                self?.setMessageHidden(true, animated: true)
            })
        }
    }

    func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
        cancelScheduledMessage(for: messageType)

        let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
            self?.showMessage(text)
            timer.invalidate()
        })
        timers[messageType] = timer
    }


    func resetTracking() {

        guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
            fatalError("Missing expected asset catalog resources.")
        }

        let configuration = ARWorldTrackingConfiguration()
        configuration.detectionImages = referenceImages
        session.run(configuration, options: [.resetTracking, .removeExistingAnchors])

        //statusViewController.scheduleMessage("Look around to detect images", inSeconds: 7.5, messageType: .contentPlacement)
    }

    let updateQueue = DispatchQueue(label: Bundle.main.bundleIdentifier! +
        ".serialSceneKitQueue")


    /// - Tag: PlaceARContent
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        // Place content only for anchors found by plane detection.
        guard let imageAnchor = anchor as? ARImageAnchor else { return }
        let referenceImage = imageAnchor.referenceImage
        updateQueue.async {

            // Create a SceneKit plane to visualize the plane anchor using its position and extent.
            let plane = SCNPlane(width: referenceImage.physicalSize.width,
                                 height: referenceImage.physicalSize.height)

            let planeNode = SCNNode(geometry: plane)

            planeNode.geometry?.materials.first?.diffuse.contents = UIImage(named: "Crocefissione")

            plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione")

            //planeNode.geometry?.firstMaterial?.diffuse.contents = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil)

            // `SCNPlane` is vertically oriented in its local coordinate space, so
            // rotate the plane to match the horizontal orientation of `ARPlaneAnchor`.
            planeNode.eulerAngles.x = -.pi / 2
            planeNode.runAction(self.imageHighlightAction)

            // Make the plane visualization semitransparent to clearly show real-world placement.
            // Add the plane visualization to the ARKit-managed node so that it tracks
            // changes in the plane anchor as plane estimation continues.
            node.addChildNode(planeNode)
        }

        /// - Tag: UpdateARContent
        func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
            // Update content only for plane anchors and nodes matching the setup created in `renderer(_:didAdd:for:)`.
            guard let planeAnchor = anchor as?  ARPlaneAnchor,
                let planeNode = node.childNodes.first,
                let plane = planeNode.geometry as? SCNPlane
                else { return }

            // Plane estimation may shift the center of a plane relative to its anchor's transform.
            planeNode.simdPosition = float3(planeAnchor.center.x, 0, planeAnchor.center.z)

            // Plane estimation may also extend planes, or remove one plane to merge its extent into another.
            plane.width = CGFloat(planeAnchor.extent.x)
            plane.height = CGFloat(planeAnchor.extent.z)
        }

        // MARK: - ARSessionDelegate

        func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            //updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            //updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
            //updateSessionInfoLabel(for: session.currentFrame!, trackingState: camera.trackingState)
        }

        // MARK: - ARSessionObserver

        func sessionWasInterrupted(_ session: ARSession) {
            // Inform the user that the session has been interrupted, for example, by presenting an overlay.
            sessionInfoLabel.text = "Session was interrupted"
        }

        func sessionInterruptionEnded(_ session: ARSession) {
            // Reset tracking and/or remove existing anchors if consistent tracking is required.
            sessionInfoLabel.text = "Session interruption ended"
            resetTracking()
        }

        func session(_ session: ARSession, didFailWithError error: Error) {
            // Present an error message to the user.
            sessionInfoLabel.text = "Session failed: \(error.localizedDescription)"
            resetTracking()
        }

        // MARK: - Private methods

        func updateSessionInfoLabel(for frame: ARFrame, trackingState: ARCamera.TrackingState) {
            // Update the UI to provide feedback on the state of the AR experience.
            let message: String

            switch trackingState {
            case .normal where frame.anchors.isEmpty:
                // No planes detected; provide instructions for this app's AR interactions.
                message = "Move the device around to detect horizontal surfaces."

            case .notAvailable:
                message = "Tracking unavailable."

            case .limited(.excessiveMotion):
                message = "Tracking limited - Move the device more slowly."

            case .limited(.insufficientFeatures):
                message = "Tracking limited - Point the device at an area with visible surface detail, or improve lighting conditions."

            case .limited(.initializing):
                message = "Initializing AR session."

            default:
                // No feedback needed when tracking is normal and planes are visible.
                // (Nor when in unreachable limited-tracking states.)
                message = ""
            }
        }

        func resetTrackin() {
            let configuration = ARWorldTrackingConfiguration()
            configuration.planeDetection = .horizontal
            sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
        }
    }
    var imageHighlightAction: SCNAction {
        return .sequence([
            .wait(duration: 100.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOut(duration: 0.5),
            .removeFromParentNode()
            ])
    }
}

我很感激这已经很晚了,我相信现在你已经解决了这个问题,但是尽管如此,在回答另一个问题时 ,我遇到了和你一样的问题。

查看您的代码,我假设当您设置以下内容时:

plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione")

那个Crocefissione指的是你实际的ARReferenceImage?

如果是这样,那么我相信幕后发生了一些魔法,Xcode 将这些转换为 GrayScale,这似乎是 ARKit 能够检测到 ARReferenceImage.

如果我错了,请告诉我^_______^.

无论如何,一个可能的解决方案是做这样的事情。

将每个 ARReferenceImage's 复制到 Assets Catalogue 并给它一个 prefix 例如颜色(您的图片名称):

然后您需要稍微更改函数以显示正确着色的图像,例如:

 plane.firstMaterial?.diffuse.contents = UIImage(named: "ColourCrocefissione")