i'm try to implement this new Positional audio on my application.
with a tapGesture I insert a Drone on my scene, and I attach to it the sound.mp3
func tapToPlace(node: SCNNode, recognizer: UITapGestureRecognizer, view : ARSCNView){
debugPrint("tap")
if gameState == .placeObject {
DispatchQueue.global().async {
let tapedScreen = recognizer.location(in: view)
guard let query = view.raycastQuery(from: tapedScreen, allowing: .existingPlaneGeometry, alignment: .horizontal) else {return}
let result = view.session.raycast(query).first
guard let worldTransform = result?.worldTransform else {return}// simd_Float4x4
let newNode = node.clone() // duplicate the node create at app start up
newNode.position = SCNVector3(worldTransform.columns.3.x, worldTransform.columns.3.y, worldTransform.columns.3.z) // place it at position tapped
// set up position audio
let audio = SCNAudioSource(fileNamed: "sound.mp3")! // add audio file
audio.loops = true
audio.volume = 0.3
audio.rate = 0.1
audio.isPositional = true
audio.shouldStream = false
audio.load()
let player = SCNAudioPlayer(source: audio)
newNode.addAudioPlayer(player)
view.scene.rootNode.addChildNode(newNode)
}
}
}
Reading apple documentation looks like need to be implement this audioListner: SCNnode
how can I do this?
I have tried the following approach:
I get the camera current location.
func trackCameraLocation(arView: ARSCNView) -> simd_float4x4 {
var cameraloc : simd_float4x4!
if let camera = arView.session.currentFrame?.camera.transform {
cameraloc = camera
}
return cameraloc
}
I use this inside the method did update frame, in order to have the accurate user location.
func session(_ session: ARSession, didUpdate frame: ARFrame) {
cameraLocation = trackCameraLocation(arView: sceneView)
}
Once I have the camera location, inside the method didAdd node I tried to set the audioListner..
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
let cameraNode = SCNNode()
cameraNode.position = SCNVector3(cameraLocation.columns.3.x, cameraLocation.columns.3.y, cameraLocation.columns.3.z)
renderer.audioListener = cameraNode
}
but.. nothing work.. can't hear any audio..I just see my Drone silence on the floor of my house.
Looking for some help or explanation how to implement this new future of ARKit.
thanks in advance for the help.
here where I put my audio file:
Try this solution. It works in VR app, as well as in AR app.
import SceneKit
extension ViewController: SCNSceneRendererDelegate {
func renderer(_ renderer: SCNSceneRenderer,
updateAtTime time: TimeInterval) {
listener.position.z = -20 // change listener's position here
renderer.audioListener = self.listener
}
}
...
class ViewController: UIViewController {
let scene = SCNScene()
let audioNode = SCNNode()
let listener = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let sceneView = self.view as! SCNView
sceneView.scene = self.scene
sceneView.backgroundColor = .black
sceneView.delegate = self
let node = SCNNode()
node.geometry = SCNSphere(radius: 0.05)
node.position = SCNVector3(0,0,-2)
self.scene.rootNode.addChildNode(node)
let path = Bundle.main.path(forResource: "art.scnassets/audio",
ofType: "mp3") // MONO AUDIO
let url = URL(fileURLWithPath: path!)
let source = SCNAudioSource(url: url)!
source.isPositional = true
source.shouldStream = false
source.load()
let player = SCNAudioPlayer(source: source)
node.addChildNode(audioNode)
// THE LOCATION OF THIS LINE IS IMPORTANT
audioNode.addAudioPlayer(player)
audioNode.addChildNode(self.listener)
}
}