I have a panorama player with 3D sounds that I am trying to integrate into Three.js by following this tutorial: http://www.html5rocks.com/en/tutorials/webaudio/positional_audio/
The camera remains fixed, while the 3D sounds are positioned across the scenes. I want the volume of the sounds to be influenced by whether the camera is facing the scene or not.
Here is how the sounds are created (using a vector3 for position):
function playSound(buffer, looping, position, volume) {
var sound = {};
sound.source = context.createBufferSource();
sound.volume = context.createGain();
sound.source.connect(sound.volume);
sound.volume.connect(mainVolume);
var gainNode = context.createGain();
gainNode.gain.value = volume;
sound.source.connect(gainNode);
sound.source.buffer = buffer;
if(looping)
sound.source.loop = true;
sound.source.connect(context.destination);
sound.source.start();
sound.panner = context.createPanner();
sound.position = position;
//sound.panner.updateMatrixWorld();
sound.volume.connect(sound.panner);
sound.panner.connect(mainVolume);
sceneSounds.push( sound );
}
This implementation works effectively.
In the render loop:
lat = Math.max(-85, Math.min(85, lat));
phi = THREE.Math.degToRad(90 - lat);
theta = THREE.Math.degToRad(lon);
target.x = 512 * Math.sin(phi) * Math.cos(theta);
target.y = 512 * Math.cos(phi);
target.z = 512 * Math.sin(phi) * Math.sin(theta);
camera.lookAt(target);
if(sceneSounds.length > 0)
updateSceneSounds( target );
I'm invoking this method with the camera's target:
function updateSceneSounds( target )
{
for(var s in sceneSounds){
var sound = sceneSounds[s];
distance = sound.position.distanceTo( target );
console.log( distance );
}
}
The distances are currently ranging between 400-500, which may not be very helpful as they might be incorrect values to use.
Any suggestions or tips on the best approach to tackle this issue?