ARKit 检测平面之间的交叉点

ARKit detecting intersection between planes

我正在使用 ARKit(带有 Scene Kit)并试图找到一种方法来获取 ARReference 图像和 Horizo​​ntal ARPlaneDetection 之间的交集,以便在检测到的图像正前方的表面上显示 3D 角色,例如, 在红圈内生成见下图

目前我能够让角色在检测到的图像前面生成,但是,角色漂浮在空中而不是站在表面上。

let realWorldPositon = SCNVector3Make(anchor.transform.columns.3.x, anchor.transform.columns.3.y, anchor.transform.columns.3.z)
let hitTest = self.sceneView.scene.rootNode.hitTestWithSegment(from: self.sceneView.scene.rootNode.worldPosition, to: realWorldPositon, options: nil)
overlayNode.position = SCNVector3Make((hitTest.first?.worldCoordinates.x)!, 0, (hitTest.first?.worldCoordinates.z)!) 
self.sceneView.scene.rootNode.addChildNode(overlayNode)

如有任何帮助,我们将不胜感激,谢谢!

Example project

我认为您使用 hitTestWithSegment 函数来检测 ARImageAnchorARPlaneAnchor 之间的交集是正确的。

我没有试图解释我尝试回答的每一步,而是提供了完整注释的代码,因此它应该是相当自我解释的。

我的例子运行得相当好(虽然它肯定不完美)并且肯定需要一些调整。

例如,您需要更准确地确定从 ARReferenceImageARPlaneAnchor 等的距离

我可以将模型(口袋妖怪)放置在正确的水平并且非常靠近 ARReferenceImage 的前面,尽管它需要调整。

话虽如此,我认为这将是您开始精炼代码并获得更准确结果的一个相当好的基础。

然而,值得注意的是,我刚刚启用了一个 ARPlaneAnchor 来检测(只是为了简单起见)并且假设您将检测图像标记前面的平面。

我没有考虑旋转或类似的东西。当然,基于您提出的方案;它还假定您的图像将放在桌子或其他平面上。

无论如何,这是我的答案(希望它应该是不言自明的):

import UIKit
import ARKit

//-----------------------
//MARK: ARSCNViewDelegate
//-----------------------

extension ViewController: ARSCNViewDelegate{

    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {

        //1. If We Have Detected Our ImageTarget Then Create A Plane To Visualize It
        if let currentImageAnchor = anchor as? ARImageAnchor {

            createReferenceImagePlaneForNode(currentImageAnchor, node: node)
            allowTracking = true

        }

        //2. If We Have Detected A Horizontal Plane Then Create One
        if let currentPlaneAnchor = anchor as? ARPlaneAnchor{

            if planeNode == nil && !createdModel{ createReferencePlaneForNode(currentPlaneAnchor, node: node) }
        }

    }

    func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {

        //1. Check To See Whether An ARPlaneAnchor Has Been Updated
        guard let anchor = anchor as? ARPlaneAnchor,
              //2. Check It Is Our PlaneNode
              let existingPlane = planeNode,
              //3. Get The Geometry Of The PlaneNode
              let planeGeometry = existingPlane.geometry as? SCNPlane else { return }

        //4. Adjust It's Size & Positions
        planeGeometry.width = CGFloat(anchor.extent.x)
        planeGeometry.height = CGFloat(anchor.extent.z)

        planeNode?.position = SCNVector3Make(anchor.center.x, 0.01, anchor.center.z)
    }

    func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {

        //1. Detect The Intersection Of The ARPlaneAnchor & ARImageAncho
        if allowTracking { detectIntersetionOfImageTarget() }

    }

}

//---------------------------------------
//MARK: Model Generation & Identification
//---------------------------------------

extension ViewController {

    /// Detects If We Have Intersected A Valid Image Target
    func detectIntersetionOfImageTarget(){

        //If We Havent Created Our Model Then Check To See If We Have Detected An Existing Plane
        if !createdModel{

            //a. Perform A HitTest On The Center Of The Screen For AnyExisting Planes
            guard let planeHitTest = self.augmentedRealityView.hitTest(screenCenter, types: .existingPlaneUsingExtent).first,
                let planeAnchor = planeHitTest.anchor as? ARPlaneAnchor else { return }

            //b. Get The Transform Of The ARPlane Anchor
            let x = planeAnchor.transform.columns.3.x
            let y = planeAnchor.transform.columns.3.y
            let z = planeAnchor.transform.columns.3.z

            //b. Create The Anchors Vector
            let anchorVector = SCNVector3(x,y, z)

            //Perform Another HitTest From The ImageAnchor Vector To The Anchors Vector
            if let _ = self.augmentedRealityView.scene.rootNode.hitTestWithSegment(from: imageAnchorVector, to: anchorVector, options: nil).first?.node {

                //a. If We Havent Created The Model Then Place It As Soon As An Intersection Occures
                if createdModel == false{

                    //b. Load The Model

                   loadModelAtVector(SCNVector3(imageAnchorVector.x, y, imageAnchorVector.z))

                    createdModel = true

                    planeNode?.removeFromParentNode()

                }
            }
        }
    }

}

class ViewController: UIViewController {

    //1. Reference To Our ImageTarget Bundle
    let AR_BUNDLE = "AR Resources"

    //2. Vector To Store The Position Of Our Detected Image
    var imageAnchorVector: SCNVector3!

    //3. Variables To Allow Tracking & To Determine Whether Our Model Has Been Placed
    var allowTracking = false
    var createdModel = false

    //4. Create A Reference To Our ARSCNView In Our Storyboard Which Displays The Camera Feed
    @IBOutlet weak var augmentedRealityView: ARSCNView!

    //5. Create Our ARWorld Tracking Configuration
    let configuration = ARWorldTrackingConfiguration()

    //6. Create Our Session
    let augmentedRealitySession = ARSession()

    //7. ARReference Images
    lazy var staticReferenceImages: Set<ARReferenceImage> = {

        let images = ARReferenceImage.referenceImages(inGroupNamed: AR_BUNDLE, bundle: nil)
        return images!

    }()

    //8. Scrren Center Reference
    var screenCenter: CGPoint!

    //9. PlaneNode
    var planeNode: SCNNode?

    //--------------------
    //MARK: View LifeCycle
    //--------------------

    override func viewDidLoad() {

        super.viewDidLoad()

        //1. Get Reference To The Center Of The Screen For RayCasting
        DispatchQueue.main.async { self.screenCenter = CGPoint(x: self.view.bounds.width/2, y: self.view.bounds.height/2) }

        //2. Setup Our ARSession
        setupARSessionWithStaticImages()


    }

    override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() }

    //---------------------------------
    //MARK: ARImageAnchor Vizualization
    //---------------------------------

    /// Creates An SCNPlane For Visualizing The Detected ARImageAnchor
    ///
    /// - Parameters:
    ///   - imageAnchor: ARImageAnchor
    ///   - node: SCNNode
    func createReferenceImagePlaneForNode(_ imageAnchor: ARImageAnchor, node: SCNNode){

        //1. Get The Targets Width & Height
        let width = imageAnchor.referenceImage.physicalSize.width
        let height = imageAnchor.referenceImage.physicalSize.height

        //2. Create A Plane Geometry To Cover The ARImageAnchor
        let planeNode = SCNNode()
        let planeGeometry = SCNPlane(width: width, height: height)
        planeGeometry.firstMaterial?.diffuse.contents = UIColor.white
        planeNode.opacity = 0.5
        planeNode.geometry = planeGeometry

        //3. Rotate The PlaneNode To Horizontal
        planeNode.eulerAngles.x = -.pi/2

        //4. The Node Is Centered In The Anchor (0,0,0)
        node.addChildNode(planeNode)

        //5. Store The Vector Of The ARImageAnchor
        imageAnchorVector = SCNVector3(imageAnchor.transform.columns.3.x, imageAnchor.transform.columns.3.y, imageAnchor.transform.columns.3.z)

        let fadeOutAction = SCNAction.fadeOut(duration: 5)
        planeNode.runAction(fadeOutAction)

    }

    //-------------------------
    //MARK: Plane Visualization
    //-------------------------

    /// Creates An SCNPlane For Visualizing The Detected ARAnchor
    ///
    /// - Parameters:
    ///   - imageAnchor: ARAnchor
    ///   - node: SCNNode
    func createReferencePlaneForNode(_ anchor: ARPlaneAnchor, node: SCNNode){

        //1. Get The Anchors Width & Height
        let width = CGFloat(anchor.extent.x)
        let height = CGFloat(anchor.extent.z)

        //2. Create A Plane Geometry To Cover The ARImageAnchor
        planeNode = SCNNode()
        let planeGeometry = SCNPlane(width: width, height: height)
        planeGeometry.firstMaterial?.diffuse.contents = UIColor.white
        planeNode?.opacity = 0.5
        planeNode?.geometry = planeGeometry

        //3. Rotate The PlaneNode To Horizontal
        planeNode?.eulerAngles.x = -.pi/2

        //4. The Node Is Centered In The Anchor (0,0,0)
        node.addChildNode(planeNode!)

    }

    //-------------------
    //MARK: Model Loading
    //-------------------


    /// Loads Our Model Based On The Resulting Vector Of Our ARAnchor
    ///
    /// - Parameter worldVector: SCNVector3
    func loadModelAtVector(_ worldVector: SCNVector3) {

        let modelPath = "ARModels.scnassets/Scatterbug.scn"

        //1. Get The Reference To Our SCNScene & Get The Model Root Node
        guard let model = SCNScene(named: modelPath),
              let pokemonModel = model.rootNode.childNode(withName: "RootNode", recursively: false) else { return }

        //2.Add It To Our SCNView
        augmentedRealityView.scene.rootNode.addChildNode(pokemonModel)

        //3. Scale The Scatterbug
        pokemonModel.scale = SCNVector3(0.003, 0.003, 0.003)

        pokemonModel.position = worldVector

        augmentedRealityView.scene.rootNode.addChildNode(pokemonModel)


    }

    //---------------
    //MARK: ARSession
    //---------------

    /// Sets Up The AR Session With Static Or Dynamic AEImages
    func setupARSessionWithStaticImages(){

        //1. Set Our Configuration
        configuration.detectionImages = staticReferenceImages
        configuration.planeDetection = .horizontal

        //2. Run The Configuration
        augmentedRealitySession.run(configuration, options: [.resetTracking, .removeExistingAnchors])

        //3. Set The Session & Delegate
        augmentedRealityView?.session = augmentedRealitySession
        self.augmentedRealityView?.delegate = self

    }

}

希望它能为您指明正确的方向...