ARKit – 如何实现 SCNRenderer().audioListener?
ARKit – How to implement SCNRenderer().audioListener?
我正在尝试在我的应用程序中实现这个新的定位音频。
我使用 tapGesture 在我的场景中插入了一个无人机,并将 sound.mp3 附加到它上面
func tapToPlace(node: SCNNode, recognizer: UITapGestureRecognizer, view : ARSCNView){
debugPrint("tap")
if gameState == .placeObject {
DispatchQueue.global().async {
let tapedScreen = recognizer.location(in: view)
guard let query = view.raycastQuery(from: tapedScreen, allowing: .existingPlaneGeometry, alignment: .horizontal) else {return}
let result = view.session.raycast(query).first
guard let worldTransform = result?.worldTransform else {return}// simd_Float4x4
let newNode = node.clone() // duplicate the node create at app start up
newNode.position = SCNVector3(worldTransform.columns.3.x, worldTransform.columns.3.y, worldTransform.columns.3.z) // place it at position tapped
// set up position audio
let audio = SCNAudioSource(fileNamed: "sound.mp3")! // add audio file
audio.loops = true
audio.volume = 0.3
audio.rate = 0.1
audio.isPositional = true
audio.shouldStream = false
audio.load()
let player = SCNAudioPlayer(source: audio)
newNode.addAudioPlayer(player)
view.scene.rootNode.addChildNode(newNode)
}
}
}
阅读苹果文档看起来需要实现这个audioListner: SCNnode
我该怎么做?
我试过以下方法:
获取摄像头当前位置。
func trackCameraLocation(arView: ARSCNView) -> simd_float4x4 {
var cameraloc : simd_float4x4!
if let camera = arView.session.currentFrame?.camera.transform {
cameraloc = camera
}
return cameraloc
}
我用这个里面的方法做了update frame,为了得到准确的用户位置。
func session(_ session: ARSession, didUpdate frame: ARFrame) {
cameraLocation = trackCameraLocation(arView: sceneView)
}
获得相机位置后,在 didAdd 节点方法中我尝试设置 audioListner..
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
let cameraNode = SCNNode()
cameraNode.position = SCNVector3(cameraLocation.columns.3.x, cameraLocation.columns.3.y, cameraLocation.columns.3.z)
renderer.audioListener = cameraNode
}
但是..没有任何作用..听不到任何声音..我只看到我家地板上的无人机静音。
寻求一些帮助或解释如何实现 ARKit 的这个新未来。
在此先感谢您的帮助。
这里是我放置音频文件的地方:
试试这个解决方案。它适用于 VR 应用程序,以及 AR 应用程序。
import SceneKit
extension ViewController: SCNSceneRendererDelegate {
func renderer(_ renderer: SCNSceneRenderer,
updateAtTime time: TimeInterval) {
listener.position.z = -20 // change listener's position here
renderer.audioListener = self.listener
}
}
...
class ViewController: UIViewController {
let scene = SCNScene()
let audioNode = SCNNode()
let listener = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let sceneView = self.view as! SCNView
sceneView.scene = self.scene
sceneView.backgroundColor = .black
sceneView.delegate = self
let node = SCNNode()
node.geometry = SCNSphere(radius: 0.05)
node.position = SCNVector3(0,0,-2)
self.scene.rootNode.addChildNode(node)
let path = Bundle.main.path(forResource: "art.scnassets/audio",
ofType: "mp3") // MONO AUDIO
let url = URL(fileURLWithPath: path!)
let source = SCNAudioSource(url: url)!
source.isPositional = true
source.shouldStream = false
source.load()
let player = SCNAudioPlayer(source: source)
node.addChildNode(audioNode)
// THE LOCATION OF THIS LINE IS IMPORTANT
audioNode.addAudioPlayer(player)
audioNode.addChildNode(self.listener)
}
}
我正在尝试在我的应用程序中实现这个新的定位音频。
我使用 tapGesture 在我的场景中插入了一个无人机,并将 sound.mp3 附加到它上面
func tapToPlace(node: SCNNode, recognizer: UITapGestureRecognizer, view : ARSCNView){
debugPrint("tap")
if gameState == .placeObject {
DispatchQueue.global().async {
let tapedScreen = recognizer.location(in: view)
guard let query = view.raycastQuery(from: tapedScreen, allowing: .existingPlaneGeometry, alignment: .horizontal) else {return}
let result = view.session.raycast(query).first
guard let worldTransform = result?.worldTransform else {return}// simd_Float4x4
let newNode = node.clone() // duplicate the node create at app start up
newNode.position = SCNVector3(worldTransform.columns.3.x, worldTransform.columns.3.y, worldTransform.columns.3.z) // place it at position tapped
// set up position audio
let audio = SCNAudioSource(fileNamed: "sound.mp3")! // add audio file
audio.loops = true
audio.volume = 0.3
audio.rate = 0.1
audio.isPositional = true
audio.shouldStream = false
audio.load()
let player = SCNAudioPlayer(source: audio)
newNode.addAudioPlayer(player)
view.scene.rootNode.addChildNode(newNode)
}
}
}
阅读苹果文档看起来需要实现这个audioListner: SCNnode
我该怎么做?
我试过以下方法:
获取摄像头当前位置。
func trackCameraLocation(arView: ARSCNView) -> simd_float4x4 {
var cameraloc : simd_float4x4!
if let camera = arView.session.currentFrame?.camera.transform {
cameraloc = camera
}
return cameraloc
}
我用这个里面的方法做了update frame,为了得到准确的用户位置。
func session(_ session: ARSession, didUpdate frame: ARFrame) {
cameraLocation = trackCameraLocation(arView: sceneView)
}
获得相机位置后,在 didAdd 节点方法中我尝试设置 audioListner..
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
let cameraNode = SCNNode()
cameraNode.position = SCNVector3(cameraLocation.columns.3.x, cameraLocation.columns.3.y, cameraLocation.columns.3.z)
renderer.audioListener = cameraNode
}
但是..没有任何作用..听不到任何声音..我只看到我家地板上的无人机静音。
寻求一些帮助或解释如何实现 ARKit 的这个新未来。
在此先感谢您的帮助。
这里是我放置音频文件的地方:
试试这个解决方案。它适用于 VR 应用程序,以及 AR 应用程序。
import SceneKit
extension ViewController: SCNSceneRendererDelegate {
func renderer(_ renderer: SCNSceneRenderer,
updateAtTime time: TimeInterval) {
listener.position.z = -20 // change listener's position here
renderer.audioListener = self.listener
}
}
...
class ViewController: UIViewController {
let scene = SCNScene()
let audioNode = SCNNode()
let listener = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let sceneView = self.view as! SCNView
sceneView.scene = self.scene
sceneView.backgroundColor = .black
sceneView.delegate = self
let node = SCNNode()
node.geometry = SCNSphere(radius: 0.05)
node.position = SCNVector3(0,0,-2)
self.scene.rootNode.addChildNode(node)
let path = Bundle.main.path(forResource: "art.scnassets/audio",
ofType: "mp3") // MONO AUDIO
let url = URL(fileURLWithPath: path!)
let source = SCNAudioSource(url: url)!
source.isPositional = true
source.shouldStream = false
source.load()
let player = SCNAudioPlayer(source: source)
node.addChildNode(audioNode)
// THE LOCATION OF THIS LINE IS IMPORTANT
audioNode.addAudioPlayer(player)
audioNode.addChildNode(self.listener)
}
}