blob: a38d2d0262c354ec80f878c53d09d37aa8100cda [file] [log] [blame]
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<!-- Origin Trial Token, feature = WebVR (For Chrome M62+), origin = https://webvr.info, expires = 2018-09-10 -->
<meta http-equiv="origin-trial" data-feature="WebVR (For Chrome M62+)" data-expires="2018-09-10" content="AhQcOrbjvS0+50wwuqtAidzraKNfZj8Bj159g2+2LsT5QRHe9IeebCl5ApORwd3oGxfKzl5H8s5K3aTMNzC+5gsAAABPeyJvcmlnaW4iOiJodHRwczovL3dlYnZyLmluZm86NDQzIiwiZmVhdHVyZSI6IldlYlZSMS4xTTYyIiwiZXhwaXJ5IjoxNTM2NjAxNDEzfQ==">
<title>08 - Dynamic Resolution</title>
<!--
This sample demonstrates how to efficiently adjust the resolution of your
WebVR scene on the fly using the layer bounds. Based off sample 4b.
-->
<style>
body {
background-color: black;
}
#canvas-clip, #webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#canvas-clip.presenting {
overflow: hidden;
bottom: 0;
right: 0;
margin: auto;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
var polyfill = new WebVRPolyfill({
// Ensures the polyfill is always active on mobile, due to providing
// a polyfilled CardboardVRDisplay when no native API is available,
// and also polyfilling even when the native API is available, due to
// providing a CardboardVRDisplay when no native VRDisplays exist.
PROVIDE_MOBILE_VRDISPLAY: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
});
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<div id="canvas-clip">
<canvas id="webgl-canvas"></canvas>
</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var gl = null;
var cubeSea = null;
var stats = null;
function onContextLost( event ) {
event.preventDefault();
console.log( 'WebGL Context Lost.' );
gl = null;
cubeSea = null;
stats = null;
}
function onContextRestored( event ) {
console.log( 'WebGL Context Restored.' );
initWebGL(vrDisplay ? vrDisplay.capabilities.hasExternalDisplay : false);
}
var webglCanvas = document.getElementById("webgl-canvas");
webglCanvas.addEventListener( 'webglcontextlost', onContextLost, false );
webglCanvas.addEventListener( 'webglcontextrestored', onContextRestored, false );
var canvasClip = document.getElementById("canvas-clip");
function initWebGL (preserveDrawingBuffer) {
var glAttribs = {
alpha: false,
preserveDrawingBuffer: preserveDrawingBuffer
};
var useWebgl2 = WGLUUrl.getBool('webgl2', false);
var contextTypes = useWebgl2 ? ["webgl2"] : ["webgl", "experimental-webgl"];
for (var i in contextTypes) {
gl = webglCanvas.getContext(contextTypes[i], glAttribs);
if (gl)
break;
}
if (!gl) {
var webglType = (useWebgl2 ? "WebGL 2" : "WebGL")
VRSamplesUtil.addError("Your browser does not support " + webglType + ".");
return;
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
var enablePerformanceMonitoring = WGLUUrl.getBool(
'enablePerformanceMonitoring', false);
stats = new WGLUStats(gl, enablePerformanceMonitoring);
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function (err) {
var errMsg = "requestPresent failed.";
if (err && err.message) {
errMsg += "<br/>" + err.message
}
VRSamplesUtil.addError(errMsg, 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
resolutionMultiplier = 1.0;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
canvasClip.classList.add("presenting");
var leftEye = vrDisplay.getEyeParameters("left");
canvasClip.style.width = (leftEye.renderWidth/2) + "px";
canvasClip.style.height = (leftEye.renderHeight/2) + "px";
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
canvasClip.classList.remove("presenting");
canvasClip.style.width = "";
canvasClip.style.height = "";
webglCanvas.style.width = "";
webglCanvas.style.height = "";
}
}
// Make sure the canvas is resized AFTER we've updated the container div.
onResize();
}
if (navigator.getVRDisplays) {
frameData = new VRFrameData();
navigator.getVRDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[displays.length - 1];
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
// For the benefit of automated testing. Safe to ignore.
if (vrDisplay.capabilities.canPresent && WGLUUrl.getBool('canvasClickPresents', false))
webglCanvas.addEventListener("click", onVRRequestPresent, false);
window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);
window.addEventListener('vrdisplayactivate', onVRRequestPresent, false);
window.addEventListener('vrdisplaydeactivate', onVRExitPresent, false);
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
}, function () {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
webglCanvas.width = webglCanvas.offsetWidth * window.devicePixelRatio;
webglCanvas.height = webglCanvas.offsetHeight * window.devicePixelRatio;
}
}
function onClick () {
// Reset the background color to a random value
if (gl) {
gl.clearColor(
Math.random() * 0.5,
Math.random() * 0.5,
Math.random() * 0.5, 1.0);
}
}
// Register for mouse restricted events while in VR
// (e.g. mouse no longer available on desktop 2D view)
function onDisplayPointerRestricted() {
if (webglCanvas && webglCanvas.requestPointerLock) {
webglCanvas.requestPointerLock();
}
}
// Register for mouse unrestricted events while in VR
// (e.g. mouse once again available on desktop 2D view)
function onDisplayPointerUnrestricted() {
var lock = document.pointerLockElement;
if (lock && lock === webglCanvas && document.exitPointerLock) {
document.exitPointerLock();
}
}
VRSamplesUtil.addVRClickListener(onClick);
webglCanvas.addEventListener("click", onClick, false);
window.addEventListener('vrdisplaypointerrestricted', onDisplayPointerRestricted);
window.addEventListener('vrdisplaypointerunrestricted', onDisplayPointerUnrestricted);
// How large our frame should be in relation to the recommended render
// target size.
var resolutionMultiplier = 1.0;
var eyeWidth, eyeHeight;
var lastAdjustment = Date.now()-1000;
function adjustResolution() {
var t = Date.now();
// Update the resolution every tenth of a second
if (t - lastAdjustment < 100)
return;
lastAdjustment = t;
// Modify the resolution we are rendering at over time on a sin wave.
// In the real world this would probably be based on scene complexity.
// Oscillates between 1.0 to 0.5.
resolutionMultiplier = (Math.sin(t / 1000) * 0.25) + 0.75;
eyeWidth = Math.floor(webglCanvas.width * 0.5 * resolutionMultiplier);
eyeHeight = Math.floor(webglCanvas.height * resolutionMultiplier);
// Layer bounds are described in UV space, so 0.0 to 1.0
var boundsWidth = eyeWidth / webglCanvas.width;
var boundsHeight = eyeHeight / webglCanvas.height;
// Tell the presenting display about the new texture bounds. This
// ensures it only picks up the parts of the texture we're going to be
// rendering to and avoids the need to resize the WebGL canvas, which
// can be a slow operation. Because we're already presenting when we
// call requestPresent again it only updates the VRLayer information and
// doesn't require a user gesture.
vrDisplay.requestPresent([{
source: webglCanvas,
leftBounds: [0.0, 0.0, boundsWidth, boundsHeight],
rightBounds: [boundsWidth, 0.0, boundsWidth, boundsHeight],
}]);
if (vrDisplay.capabilities.hasExternalDisplay) {
// To ensure our mirrored content also shows up correctly we'll scale
// the canvas display size to be scaled appropriately such that it
// continues to only show one eye.
webglCanvas.style.width = (1.0/resolutionMultiplier) * 200 + "%";
webglCanvas.style.height = (1.0/resolutionMultiplier) * 100 + "%";
}
}
function onAnimationFrame (t) {
// do not attempt to render if there is no available WebGL context
if (!gl || !stats || !cubeSea) {
return;
}
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
adjustResolution();
// Note that the viewports use the eyeWidth/height rather than the
// canvas width and height.
gl.viewport(0, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats, t);
gl.viewport(eyeWidth, webglCanvas.height-eyeHeight, eyeWidth, eyeHeight);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats, t);
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats, t);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats, t);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>