blob: d72e6466f291c432f753a62f81170a3cad9c1594 [file] [log] [blame]
<!doctype html>
<!--
Copyright 2018 The Immersive Web Community Group
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-->
<html>
<head>
<meta charset='utf-8'>
<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no'>
<meta name='mobile-web-app-capable' content='yes'>
<meta name='apple-mobile-web-app-capable' content='yes'>
<!-- Origin Trial Token, feature = WebXR Device API, origin = https://immersive-web.github.io, expires = 2018-08-28 -->
<meta http-equiv="origin-trial" data-feature="WebXR Device API" data-expires="2018-08-28" content="AnNpu7ceXvLew05ccD8Zr1OZsdZiB2hLQKK82kTTMDwF7oRKtP3QEJ4RzkeHrmB8Sq0vSV6ZNmszpBCZ0I8p9gAAAABceyJvcmlnaW4iOiJodHRwczovL2ltbWVyc2l2ZS13ZWIuZ2l0aHViLmlvOjQ0MyIsImZlYXR1cmUiOiJXZWJYUkRldmljZSIsImV4cGlyeSI6MTUzNTQxNDQwMH0=">
<title>Spectator Mode</title>
<link href='css/common.css' rel='stylesheet'></link>
<!--The polyfill is not needed for browser that have native API support,
but is linked by these samples for wider compatibility.-->
<!--script src='https://cdn.jsdelivr.net/npm/webxr-polyfill@latest/build/webxr-polyfill.js'></script-->
<script src='js/webxr-polyfill.js'></script>
<script src='js/webxr-button.js'></script>
</head>
<body>
<header>
<details open>
<summary>Spectator Mode</summary>
<p>
This sample demonstrates how to render a separate, 3rd person view of
the scene to an external monitor if one is available. This sample is
not applicable to mobile or standalone devices.
<a class="back" href="./">Back</a>
</p>
</details>
</header>
<main style='text-align: center;'>
</main>
<script type="module">
import {Scene} from './js/cottontail/src/scenes/scene.js';
import {Renderer, createWebGLContext} from './js/cottontail/src/core/renderer.js';
import {Gltf2Node} from './js/cottontail/src/nodes/gltf2.js';
import {QueryArgs} from './js/cottontail/src/util/query-args.js';
import {FallbackHelper} from './js/cottontail/src/util/fallback-helper.js';
import {mat4} from './js/cottontail/src/math/gl-matrix.js';
// If requested, initialize the WebXR polyfill
if (QueryArgs.getBool('allowPolyfill', false)) {
var polyfill = new WebXRPolyfill();
}
// XR globals.
let xrButton = null;
let xrImmersiveRefSpace = null;
let xrNonImmersiveRefSpace = null;
// WebGL scene globals.
let outputCanvas = null;
let gl = null;
let renderer = null;
let scene = new Scene();
scene.addNode(new Gltf2Node({url: '../media/gltf/garage/garage.gltf'}));
scene.standingStats(true);
// Indicates if we are currently presenting a spectator view of the
// scene to an external display.
let spectatorMode = false;
let spectatorButton = null;
let spectatorProjectionMatrix = mat4.create();
let spectatorViewMatrix = mat4.create();
let headset = null;
let mainElement = document.querySelector('main');
function initXR() {
xrButton = new XRDeviceButton({
onRequestSession: onRequestSession,
onEndSession: onEndSession
});
document.querySelector('header').appendChild(xrButton.domElement);
if (navigator.xr) {
navigator.xr.supportsSessionMode('immersive-vr').then(() => {
xrButton.enabled = true;
});
outputCanvas = document.createElement('canvas');
navigator.xr.requestSession()
.then((session) => {
document.body.appendChild(outputCanvas);
session.updateRenderState({
outputContext: outputCanvas.getContext('xrpresent')
});
onSessionStarted(session);
});
} else {
initFallback();
}
}
function initFallback() {
initGL();
document.body.appendChild(gl.canvas);
let fallbackHelper = new FallbackHelper(scene, gl);
fallbackHelper.emulateStage = true;
}
function initGL() {
if (gl)
return;
gl = createWebGLContext({
xrCompatible: true
});
renderer = new Renderer(gl);
scene.setRenderer(renderer);
scene.inputRenderer.setControllerMesh(new Gltf2Node({url: '../media/gltf/controller/controller.gltf'}));
// Because we may be rendering to the gl context's backbuffer now we
// need to start monitoring resize events to manage it's size.
window.addEventListener('resize', onResize);
}
function onRequestSession() {
navigator.xr.requestSession({ mode: 'immersive-vr' }).then((session) => {
xrButton.setSession(session);
onSessionStarted(session);
// When the exclusive session starts, add a button to the page that
// users can click to start the spectator mode. It's important to not
// begin presenting spectator mode right away, because on some devices
// that may mean doing additional work that the user can't see. By
// requiring a button click first you're ensuring that the user CAN
// still access the 2D page somehow.
spectatorButton = document.createElement('button');
spectatorButton.innerHTML = 'Enable spectator mode';
spectatorButton.addEventListener('click', onEnableSpectatorMode);
mainElement.appendChild(spectatorButton);
// Hide the non-exclusive session's output canvas so that we can see
// the button.
outputCanvas.style.display = 'none';
});
}
function onSessionStarted(session) {
session.addEventListener('end', onSessionEnded);
initGL();
session.updateRenderState({ baseLayer: new XRWebGLLayer(session, gl) });
session.requestReferenceSpace({ type: 'stationary', subtype: 'floor-level' }).then((refSpace) => {
if (session.mode.startsWith('immersive')) {
xrImmersiveRefSpace = refSpace;
} else {
xrNonImmersiveRefSpace = refSpace;
}
session.requestAnimationFrame(onXRFrame);
});
}
function onEndSession(session) {
session.end();
}
function onSessionEnded(event) {
if (event.session.mode.startsWith('immersive')) {
xrButton.setSession(null);
// When we exit the exclusive session, stop presenting the spectator
// view and remove the WebGL canvas from the document.
if (spectatorMode) {
spectatorMode = false;
document.body.removeChild(gl.canvas);
}
// Remove the spectator button if needed.
if (spectatorButton) {
mainElement.removeChild(spectatorButton);
spectatorButton = null;
}
// Show the non-exclusive session's output canvas again.
outputCanvas.style.display = '';
}
}
// Called when the user clicks the button to enable spectator mode.
function onEnableSpectatorMode() {
spectatorMode = true;
// Append the WebGL context's canvas to the page so that we can render
// directly to it.
document.body.appendChild(gl.canvas);
onResize();
// Remove the spectator button, since it's no longer needed. You could
// alternately change the button's function to disable spectator mode.
if (spectatorButton) {
mainElement.removeChild(spectatorButton);
spectatorButton = null;
}
// Load up a mesh that we can use to visualize the headset's pose.
if (!headset) {
headset = new Gltf2Node({url: '../media/gltf/headset/headset.gltf'});
scene.addNode(headset);
}
}
function onResize () {
if (spectatorMode) {
// The spectator view does take time to render, and can impact the
// performance of the in-headset view. To help mitigate that, we'll
// draw the spectator view at half the native resolution.
gl.canvas.width = (gl.canvas.offsetWidth * window.devicePixelRatio) / 2.0;
gl.canvas.height = (gl.canvas.offsetHeight * window.devicePixelRatio) / 2.0;
}
}
function onXRFrame(t, frame) {
let session = frame.session;
let refSpace = session.mode.startsWith('immersive') ?
xrImmersiveRefSpace :
xrNonImmersiveRefSpace;
let pose = frame.getViewerPose(refSpace);
scene.startFrame();
session.requestAnimationFrame(onXRFrame);
scene.updateInputSources(frame, refSpace);
scene.drawXRFrame(frame, pose);
// If spectator mode is active, draw a 3rd person view of the scene to
// the WebGL context's default backbuffer.
if (spectatorMode) {
// Bind the WebGL context's default framebuffer, so that the rendered
// content shows up in the canvas element.
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
// Clear the framebuffer
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Set the viewport to the whole canvas
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
// Set up a sensible projection matrix for the canvas
mat4.perspective(spectatorProjectionMatrix, Math.PI*0.4, gl.drawingBufferWidth / gl.drawingBufferHeight, session.renderState.depthNear, session.renderState.depthFar);
// Set up a view matrix that gives us a 3rd-person view of the scene.
// In this case it's positioned in a corner of the garage scene near
// near the ceiling and looking down at the user.
mat4.identity(spectatorViewMatrix);
mat4.translate(spectatorViewMatrix, spectatorViewMatrix, [-1.75, 2.0, 1.75]);
mat4.rotateY(spectatorViewMatrix, spectatorViewMatrix, Math.PI * -0.25);
mat4.rotateX(spectatorViewMatrix, spectatorViewMatrix, Math.PI * -0.15);
mat4.invert(spectatorViewMatrix, spectatorViewMatrix);
// Update the headset's pose to match the user's and make it visible
// for this draw.
if (headset) {
headset.visible = true;
headset.matrix = pose.transform.matrix;
}
// Draw the spectator view of the scene.
scene.draw(spectatorProjectionMatrix, spectatorViewMatrix);
// Ensure the headset isn't visible in the VR view.
if (headset) {
headset.visible = false;
}
}
scene.endFrame();
}
// Start the XR application.
initXR();
</script>
</body>
</html>