blob: 2912365a45a01add83a10c28ab585bb3a63a698d [file] [log] [blame]
<!doctype html>
<!--
Copyright 2018 The Immersive Web Community Group
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-->
<html>
<head>
<meta charset='utf-8'>
<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no'>
<meta name='mobile-web-app-capable' content='yes'>
<meta name='apple-mobile-web-app-capable' content='yes'>
<!-- Origin Trial Token, feature = WebXR Device API, origin = https://immersive-web.github.io, expires = 2018-08-28 -->
<meta http-equiv="origin-trial" data-feature="WebXR Device API" data-expires="2018-08-28" content="AnNpu7ceXvLew05ccD8Zr1OZsdZiB2hLQKK82kTTMDwF7oRKtP3QEJ4RzkeHrmB8Sq0vSV6ZNmszpBCZ0I8p9gAAAABceyJvcmlnaW4iOiJodHRwczovL2ltbWVyc2l2ZS13ZWIuZ2l0aHViLmlvOjQ0MyIsImZlYXR1cmUiOiJXZWJYUkRldmljZSIsImV4cGlyeSI6MTUzNTQxNDQwMH0=">
<title>Positional Audio</title>
<link href='css/common.css' rel='stylesheet'></link>
<!--The polyfill is not needed for browser that have native API support,
but is linked by these samples for wider compatibility.-->
<!--script src='https://cdn.jsdelivr.net/npm/webxr-polyfill@latest/build/webxr-polyfill.js'></script-->
<script src='js/webxr-polyfill.js'></script>
<script src="https://cdn.jsdelivr.net/npm/resonance-audio/build/resonance-audio.min.js"></script>
<script src='js/webxr-button.js'></script>
</head>
<body>
<header>
<details open>
<summary>Positional Audio</summary>
<p>
This sample demonstrates playing audio that sounds as if it originates
at a specific point in the space. Audio will begin playing when you
enter XR.
<a class="back" href="./">Back</a>
</p>
</details>
</header>
<script type="module">
import {Scene} from './js/cottontail/src/scenes/scene.js';
import {Renderer, createWebGLContext} from './js/cottontail/src/core/renderer.js';
import {Gltf2Node} from './js/cottontail/src/nodes/gltf2.js';
import {QueryArgs} from './js/cottontail/src/util/query-args.js';
import {FallbackHelper} from './js/cottontail/src/util/fallback-helper.js';
import {UrlTexture} from './js/cottontail/src/core/texture.js';
import {mat4} from './js/cottontail/src/math/gl-matrix.js';
// If requested, initialize the WebXR polyfill
if (QueryArgs.getBool('allowPolyfill', false)) {
var polyfill = new WebXRPolyfill();
}
// Temporary
let hideStats = QueryArgs.getBool('hideStats', false);
const DEFAULT_HEIGHT = 1.5;
const ANALYSER_FFT_SIZE = 1024;
// XR globals.
let xrButton = null;
let xrImmersiveRefSpace = null;
let xrNonImmersiveRefSpace = null;
// WebGL scene globals.
let gl = null;
let renderer = null;
let scene = new Scene();
if (hideStats) {
scene.enableStats(false);
}
scene.addNode(new Gltf2Node({url: '../media/gltf/garage/garage.gltf'}));
scene.standingStats(true);
let playButton = null;
let playTexture = new UrlTexture('../media/textures/play-button.png');
let pauseTexture = new UrlTexture('../media/textures/pause-button.png');
let stereo = new Gltf2Node({url: '../media/gltf/stereo/stereo.gltf'});
// FIXME: Temporary fix to initialize for cloning.
stereo.visible = false;
scene.addNode(stereo);
// Audio scene globals
let audioContext = new AudioContext();
let resonance = new ResonanceAudio(audioContext);
resonance.output.connect(audioContext.destination);
audioContext.suspend();
// TODO: This is crashing in recent versions of Resonance for me, and I'm
// not sure why. It does run succesfully without it, though.
// Rough room dimensions in meters (estimated from model in Blender.)
/*let roomDimensions = {
width : 6,
height : 3,
depth : 6
};
// Simplified view of the materials that make up the scene.
let roomMaterials = {
left : 'plywood-panel', // Garage walls
right : 'plywood-panel',
front : 'plywood-panel',
back : 'metal', // To account for the garage door
down : 'polished-concrete-or-tile', // garage floor
up : 'wood-ceiling'
};
resonance.setRoomProperties(roomDimensions, roomMaterials);*/
function createAudioSource(options) {
// Create a Resonance source and set its position in space.
let source = resonance.createSource();
let pos = options.position;
source.setPosition(pos[0], pos[1], pos[2]);
// Connect an analyser. This is only for visualization of the audio, and
// in most cases you won't want it.
let analyser = audioContext.createAnalyser();
analyser.fftSize = ANALYSER_FFT_SIZE;
analyser.lastRMSdB = 0;
return fetch(options.url)
.then((response) => response.arrayBuffer())
.then((buffer) => audioContext.decodeAudioData(buffer))
.then((decodedBuffer) => {
let bufferSource = createBufferSource(
source, decodedBuffer, analyser);
return {
buffer: decodedBuffer,
bufferSource: bufferSource,
source: source,
analyser: analyser,
position: pos,
rotateY: options.rotateY,
node: null
};
});
}
function createBufferSource(source, buffer, analyser) {
// Create a buffer source. This will need to be recreated every time
// we wish to start the audio, see
// https://developer.mozilla.org/en-US/docs/Web/API/AudioBufferSourceNode
let bufferSource = audioContext.createBufferSource();
bufferSource.loop = true;
bufferSource.connect(source.input);
bufferSource.connect(analyser);
bufferSource.buffer = buffer;
return bufferSource;
}
/**
* Returns a floating point value that represents the loudness of the audio
* stream, appropriate for scaling an object with.
* @return {Number} loudness scalar.
*/
let fftBuffer = new Float32Array(ANALYSER_FFT_SIZE);
function getLoudnessScale(analyser) {
analyser.getFloatTimeDomainData(fftBuffer);
let sum = 0;
for (let i = 0; i < fftBuffer.length; ++i)
sum += fftBuffer[i] * fftBuffer[i];
// Calculate RMS and convert it to DB for perceptual loudness.
let rms = Math.sqrt(sum / fftBuffer.length);
let db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
// Moving average with the alpha of 0.525. Experimentally determined.
analyser.lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - analyser.lastRMSdB);
// Scaling by 1/30 is also experimentally determined. Max is to present
// objects from disappearing entirely.
return Math.max(0.3, analyser.lastRMSdB / 30.0);
}
let audioSources = [];
function updateAudioNodes() {
if (!stereo)
return;
for (let source of audioSources) {
if (!source.node) {
source.node = stereo.clone();
source.node.visible = true;
source.node.selectable = true;
scene.addNode(source.node);
}
let node = source.node;
let matrix = node.matrix;
// Move the node to the right location.
mat4.identity(matrix);
mat4.translate(matrix, matrix, source.position);
mat4.rotateY(matrix, matrix, source.rotateY);
// Scale it based on loudness of the audio channel
let scale = getLoudnessScale(source.analyser);
mat4.scale(matrix, matrix, [scale, scale, scale]);
}
}
function playAudio() {
if (audioContext.state == 'running')
return;
audioContext.resume();
for (let source of audioSources) {
source.bufferSource.start(0);
}
if (playButton) {
playButton.iconTexture = pauseTexture;
}
}
function pauseAudio() {
if (audioContext.state == 'suspended')
return;
for (let source of audioSources) {
source.bufferSource.stop(0);
source.bufferSource = createBufferSource(
source.source, source.buffer, source.analyser);
}
audioContext.suspend();
if (playButton) {
playButton.iconTexture = playTexture;
}
}
window.addEventListener('blur', () => {
// As a general rule you should mute any sounds your page is playing
// whenever the page loses focus.
pauseAudio();
});
function initXR() {
xrButton = new XRDeviceButton({
onRequestSession: onRequestSession,
onEndSession: onEndSession
});
document.querySelector('header').appendChild(xrButton.domElement);
if (navigator.xr) {
navigator.xr.supportsSessionMode('immersive-vr').then(() => {
xrButton.enabled = true;
});
// Load multiple audio sources.
Promise.all([
createAudioSource({
url: '../media/sound/guitar.ogg',
position: [0, DEFAULT_HEIGHT, -1],
rotateY: 0
}),
createAudioSource({
url: '../media/sound/drums.ogg',
position: [-1, DEFAULT_HEIGHT, 0],
rotateY: Math.PI * 0.5
}),
createAudioSource({
url: '../media/sound/perc.ogg',
position: [1, DEFAULT_HEIGHT, 0],
rotateY: Math.PI * -0.5
}),
]).then((sources) => {
audioSources = sources;
// Once the audio is loaded, create a button that toggles the
// audio state when clicked.
playButton = new ButtonNode(playTexture, () => {
if (audioContext.state == 'running') {
pauseAudio();
} else {
playAudio();
}
});
playButton.translation = [0, 1.2, -0.65];
scene.addNode(playButton);
});
let outputCanvas = document.createElement('canvas');
let ctx = outputCanvas.getContext('xrpresent');
navigator.xr.requestSession({ outputContext: ctx })
.then((session) => {
document.body.appendChild(outputCanvas);
onSessionStarted(session);
});
} else {
initFallback();
}
}
function initFallback() {
initGL();
document.body.appendChild(gl.canvas);
let fallbackHelper = new FallbackHelper(scene, gl);
fallbackHelper.emulateStage = true;
}
function initGL() {
if (gl)
return;
gl = createWebGLContext({
xrCompatible: true
});
renderer = new Renderer(gl);
scene.setRenderer(renderer);
scene.inputRenderer.setControllerMesh(new Gltf2Node({url: '../media/gltf/controller/controller.gltf'}));
}
function onRequestSession() {
// Set up a mirror canvas
let mirrorCanvas = document.createElement('canvas');
let ctx = mirrorCanvas.getContext('xrpresent');
mirrorCanvas.setAttribute('id', 'mirror-canvas');
document.body.appendChild(mirrorCanvas);
navigator.xr.requestSession({ mode: 'immersive-vr', outputContext: ctx }).then((session) => {
xrButton.setSession(session);
onSessionStarted(session);
});
}
function onSessionStarted(session) {
session.addEventListener('end', onSessionEnded);
session.addEventListener('selectstart', onSelectStart);
session.addEventListener('selectend', onSelectEnd);
session.addEventListener('select', (ev) => {
let refSpace = ev.frame.session.mode.startsWith('immersive') ?
xrImmersiveRefSpace :
xrNonImmersiveRefSpace;
scene.handleSelect(ev.inputSource, ev.frame, refSpace);
});
initGL();
session.updateRenderState({ baseLayer: new XRWebGLLayer(session, gl) });
session.requestReferenceSpace({ type: 'stationary', subtype: 'floor-level' }).then((refSpace) => {
if (session.mode.startsWith('immersive')) {
xrImmersiveRefSpace = refSpace;
} else {
xrNonImmersiveRefSpace = refSpace;
}
session.requestAnimationFrame(onXRFrame);
});
}
function onEndSession(session) {
session.end();
}
function onSessionEnded(event) {
if (event.session.mode.startsWith('immersive')) {
document.body.removeChild(document.querySelector('#mirror-canvas'));
xrButton.setSession(null);
// Stop the audio playback when we exit XR.
pauseAudio();
}
}
let draggingSource = null;
let draggingInput = null;
let draggingTransform = mat4.create();
function hitTest(inputSource, frame, refSpace) {
let inputPose = frame.getInputPose(inputSource, refSpace);
if (!inputPose) {
return;
}
if (inputPose.targetRay) {
let hitResult = scene.hitTest(inputPose.targetRay)
if (hitResult) {
for (let source of audioSources) {
if (hitResult.node == source.node) {
draggingSource = source;
draggingInput = inputSource;
mat4.invert(draggingTransform, inputPose.targetRay.transformMatrix);
mat4.multiply(draggingTransform, draggingTransform, source.node.matrix);
return true;
}
}
}
}
return false;
}
function onSelectStart(ev) {
let refSpace = ev.frame.session.mode.startsWith('immersive') ?
xrImmersiveRefSpace :
xrNonImmersiveRefSpace;
hitTest(ev.inputSource, ev.frame, refSpace);
}
function onSelectEnd(ev) {
draggingSource = null;
draggingInput = null;
}
let tmpMatrix = mat4.create();
function onXRFrame(t, frame) {
let session = frame.session;
let refSpace = session.mode.startsWith('immersive') ?
xrImmersiveRefSpace :
xrNonImmersiveRefSpace;
let pose = frame.getViewerPose(refSpace);
scene.startFrame();
session.requestAnimationFrame(onXRFrame);
scene.updateInputSources(frame, refSpace);
if (draggingSource) {
let draggingPose = frame.getInputPose(draggingInput, refSpace);
if (draggingPose) {
let pos = draggingSource.position;
mat4.multiply(tmpMatrix, draggingPose.targetRay.transformMatrix, draggingTransform);
vec3.transformMat4(pos, [0, 0, 0], tmpMatrix);
draggingSource.source.setPosition(pos[0], pos[1], pos[2]);
}
}
updateAudioNodes();
scene.drawXRFrame(frame, pose);
if (pose) {
resonance.setListenerFromMatrix({ elements: pose.poseModelMatrix });
}
scene.endFrame();
}
// Start the XR application.
initXR();
</script>
</body>
</html>