Merge branch 'main' of github.com:cabanier/webxr-samples-fork into cabanier-main
diff --git a/js/render/nodes/cube-sea-occlusion.js b/js/render/nodes/cube-sea-occlusion.js
new file mode 100644
index 0000000..0e6f580
--- /dev/null
+++ b/js/render/nodes/cube-sea-occlusion.js
@@ -0,0 +1,358 @@
+// Copyright 2018 The Immersive Web Community Group
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+// SOFTWARE.
+
+import {Material} from '../core/material.js';
+import {Node} from '../core/node.js';
+import {ExternalTexture} from '../core/texture.js';
+import {UrlTexture} from '../core/texture.js';
+import {BoxBuilder} from '../geometry/box-builder.js';
+import {mat4} from '../math/gl-matrix.js';
+
+class CubeSeaMaterial extends Material {
+ constructor() {
+ super();
+
+ this.baseColor = this.defineSampler('baseColor');
+ this.depthColor = this.defineSampler('depthColor');
+ }
+
+ get materialName() {
+ return 'CUBE_SEA';
+ }
+
+ get vertexSource() {
+ return `
+ attribute vec3 POSITION;
+ attribute vec2 TEXCOORD_0;
+ attribute vec3 NORMAL;
+
+ varying vec2 vTexCoord;
+ varying vec3 vLight;
+
+ const vec3 lightDir = vec3(0.75, 0.5, 1.0);
+ const vec3 ambientColor = vec3(0.5, 0.5, 0.5);
+ const vec3 lightColor = vec3(0.75, 0.75, 0.75);
+
+ vec4 vertex_main(mat4 proj, mat4 view, mat4 model) {
+ vec3 normalRotated = vec3(model * vec4(NORMAL, 0.0));
+ float lightFactor = max(dot(normalize(lightDir), normalRotated), 0.0);
+ vLight = ambientColor + (lightColor * lightFactor);
+ vTexCoord = TEXCOORD_0;
+ return proj * view * model * vec4(POSITION, 1.0);
+ }`;
+ }
+
+ get vertexSourceMultiview() {
+ return `#version 300 es
+ #extension GL_OVR_multiview2 : require
+ #define NUM_VIEWS 2
+ layout(num_views=NUM_VIEWS) in;
+ #define VIEW_ID gl_ViewID_OVR
+ in vec3 POSITION;
+ in vec2 TEXCOORD_0;
+ in vec3 NORMAL;
+
+ out vec2 vTexCoord;
+ out vec3 vLight;
+
+ const vec3 lightDir = vec3(0.75, 0.5, 1.0);
+ const vec3 ambientColor = vec3(0.5, 0.5, 0.5);
+ const vec3 lightColor = vec3(0.75, 0.75, 0.75);
+
+ vec4 vertex_main(mat4 left_proj, mat4 left_view, mat4 right_proj, mat4 right_view, mat4 model) {
+ vec3 normalRotated = vec3(model * vec4(NORMAL, 0.0));
+ float lightFactor = max(dot(normalize(lightDir), normalRotated), 0.0);
+ vLight = ambientColor + (lightColor * lightFactor);
+ vTexCoord = TEXCOORD_0;
+ return (VIEW_ID == 0u) ? left_proj * left_view * model * vec4(POSITION, 1.0) :
+ right_proj * right_view * model * vec4(POSITION, 1.0);
+ }`;
+ }
+
+ get fragmentSourceMultiview() {
+ return `#version 300 es
+ #extension GL_OVR_multiview2 : require
+ #define VIEW_ID gl_ViewID_OVR
+ precision highp float;
+ precision highp sampler2DArray;
+ uniform sampler2D baseColor;
+ uniform sampler2DArray depthColor;
+ in vec2 vTexCoord;
+ in vec3 vLight;
+
+ float Depth_GetCameraDepthInMillimeters(const sampler2DArray depthTexture,
+ const vec2 depthUv) {
+ return texture(depthColor, vec3(depthUv.x, depthUv.y, VIEW_ID)).r * 1000.0;
+ }
+
+ float Depth_GetVirtualSceneDepthMillimeters(const sampler2D depthTexture,
+ const vec2 depthUv, float zNear,
+ float zFar) {
+ // Determine the depth of the virtual scene fragment in millimeters.
+ const float kMetersToMillimeters = 1000.0;
+ // This value was empirically chosen to correct errors with objects appearing
+ // to phase through the floor. In millimeters.
+ const float kBias = -80.0;
+ float ndc = 2.0 * texture(depthTexture, depthUv).x - 1.0;
+ return 2.0 * zNear * zFar / (zFar + zNear - ndc * (zFar - zNear)) *
+ kMetersToMillimeters +
+ kBias;
+ }
+
+ float Depth_GetOcclusion(const sampler2DArray depthTexture, const vec2 depthUv,
+ float assetDepthMm) {
+ float depthMm = Depth_GetCameraDepthInMillimeters(depthTexture, depthUv);
+
+ // Instead of a hard z-buffer test, allow the asset to fade into the
+ // background along a 2 * kDepthTolerancePerMm * assetDepthMm
+ // range centered on the background depth.
+ const float kDepthTolerancePerMm = 0.01;
+ return clamp(1.0 -
+ 0.5 * (depthMm - assetDepthMm) /
+ (kDepthTolerancePerMm * assetDepthMm) +
+ 0.5, 0.0, 1.0);
+ }
+
+ float Depth_GetBlurredOcclusionAroundUV(const sampler2DArray depthTexture,
+ const vec2 uv, float assetDepthMm) {
+ // Kernel used:
+ // 0 4 7 4 0
+ // 4 16 26 16 4
+ // 7 26 41 26 7
+ // 4 16 26 16 4
+ // 0 4 7 4 0
+ const float kKernelTotalWeights = 269.0;
+ float sum = 0.0;
+
+ const float kOcclusionBlurAmount = 0.01;
+ vec2 blurriness =
+ vec2(kOcclusionBlurAmount, kOcclusionBlurAmount /** u_DepthAspectRatio*/);
+
+ float current = 0.0;
+
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-1.0, -2.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+1.0, -2.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-1.0, +2.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+1.0, +2.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-2.0, +1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+2.0, +1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-2.0, -1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+2.0, -1.0) * blurriness, assetDepthMm);
+ sum += current * 4.0;
+
+ current = 0.0;
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-2.0, -0.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+2.0, +0.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+0.0, +2.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-0.0, -2.0) * blurriness, assetDepthMm);
+ sum += current * 7.0;
+
+ current = 0.0;
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-1.0, -1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+1.0, -1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-1.0, +1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+1.0, +1.0) * blurriness, assetDepthMm);
+ sum += current * 16.0;
+
+ current = 0.0;
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+0.0, +1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-0.0, -1.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(-1.0, -0.0) * blurriness, assetDepthMm);
+ current += Depth_GetOcclusion(
+ depthTexture, uv + vec2(+1.0, +0.0) * blurriness, assetDepthMm);
+ sum += current * 26.0;
+
+ sum += Depth_GetOcclusion(depthTexture, uv, assetDepthMm) * 41.0;
+
+ return sum / kKernelTotalWeights;
+ }
+
+ vec4 fragment_main() {
+ vec2 depthUv = vec2(gl_FragCoord.x/1680.0, gl_FragCoord.y/1760.0);
+
+ vec4 o_FragColor = vec4(vLight, 1) * texture(baseColor, vTexCoord);
+ if (o_FragColor.a == 0.0) {
+ // There's no sense in calculating occlusion for a fully transparent pixel.
+ return o_FragColor;
+ }
+
+ float assetDepthMm = gl_FragCoord.z * 1000.0;
+
+ float occlusion = Depth_GetBlurredOcclusionAroundUV(
+ depthColor, depthUv, assetDepthMm);
+
+ //float occlusion = Depth_GetOcclusion(depthColor,
+ // depthUv, assetDepthMm);
+
+ float objectMaskEroded = pow(occlusion, 10.0);
+
+ float occlusionTransition =
+ clamp(occlusion * (2.0 - objectMaskEroded), 0.0, 1.0);
+
+ float kMaxOcclusion = 1.0;
+ occlusionTransition = min(occlusionTransition, kMaxOcclusion);
+
+ return o_FragColor * (1.0 - occlusion);
+ }`
+ }
+
+ get fragmentSource() {
+ return `
+ precision highp float;
+ precision highp sampler2DArray;
+ uniform sampler2D baseColor;
+ uniform sampler2DArray depthColor;
+ varying vec2 vTexCoord;
+ varying vec3 vLight;
+
+ vec4 fragment_main() {
+ return vec4(vLight, 1.0) * texture2D(baseColor, vTexCoord);
+ }`;
+ }
+}
+
+export class CubeSeaNode extends Node {
+ constructor(options = {}) {
+ super();
+
+ // Test variables
+
+ // Number and size of the static cubes. Warning, large values
+ // don't render right due to overflow of the int16 indices.
+ this.cubeCount = options.cubeCount || 10;
+ this.cubeScale = options.cubeScale || 1.0;
+
+ // Draw only half the world cubes.
+ this.halfOnly = !!options.halfOnly;
+
+ // Automatically spin the world cubes. Intended for automated testing,
+ // not recommended for viewing in a headset.
+ this.autoRotate = !!options.autoRotate;
+
+ this._texture = new UrlTexture(options.imageUrl || 'media/textures/cube-sea.png');
+ this._material = new CubeSeaMaterial();
+ this._material.baseColor.texture = this._texture;
+ this._material.depthColor.texture = new ExternalTexture("scene_depth");
+
+ this._renderPrimitive = null;
+ }
+
+ onRendererChanged(renderer) {
+ this._renderPrimitive = null;
+
+ let boxBuilder = new BoxBuilder();
+
+ // Build the spinning "hero" cubes
+ boxBuilder.pushCube([0, 0.25, -0.8], 0.1);
+ boxBuilder.pushCube([0.8, 0.25, 0], 0.1);
+ boxBuilder.pushCube([0, 0.25, 0.8], 0.1);
+ boxBuilder.pushCube([-0.8, 0.25, 0], 0.1);
+
+ let heroPrimitive = boxBuilder.finishPrimitive(renderer);
+
+ this.heroNode = renderer.createMesh(heroPrimitive, this._material);
+
+ this.rebuildCubes(boxBuilder);
+
+ this.cubeSeaNode = new Node();
+ this.cubeSeaNode.addRenderPrimitive(this._renderPrimitive);
+
+ this.addNode(this.cubeSeaNode);
+ this.addNode(this.heroNode);
+
+ return this.waitForComplete();
+ }
+
+ rebuildCubes(boxBuilder) {
+ if (!this._renderer) {
+ return;
+ }
+
+ if (!boxBuilder) {
+ boxBuilder = new BoxBuilder();
+ } else {
+ boxBuilder.clear();
+ }
+
+ let size = 0.4 * this.cubeScale;
+
+ // Build the cube sea
+ let halfGrid = this.cubeCount * 0.5;
+ for (let x = 0; x < this.cubeCount; ++x) {
+ for (let y = 0; y < this.cubeCount; ++y) {
+ for (let z = 0; z < this.cubeCount; ++z) {
+ let pos = [x - halfGrid, y - halfGrid, z - halfGrid];
+ // Only draw cubes on one side. Useful for testing variable render
+ // cost that depends on view direction.
+ if (this.halfOnly && pos[0] < 0) {
+ continue;
+ }
+
+ // Don't place a cube in the center of the grid.
+ if (pos[0] == 0 && pos[1] == 0 && pos[2] == 0) {
+ continue;
+ }
+
+ boxBuilder.pushCube(pos, size);
+ }
+ }
+ }
+
+ if (this.cubeCount > 12) {
+ // Each cube has 6 sides with 2 triangles and 3 indices per triangle, so
+ // the total number of indices needed is cubeCount^3 * 36. This exceeds
+ // the short index range past 12 cubes.
+ boxBuilder.indexType = 5125; // gl.UNSIGNED_INT
+ }
+ let cubeSeaPrimitive = boxBuilder.finishPrimitive(this._renderer);
+
+ if (!this._renderPrimitive) {
+ this._renderPrimitive = this._renderer.createRenderPrimitive(cubeSeaPrimitive, this._material);
+ } else {
+ this._renderPrimitive.setPrimitive(cubeSeaPrimitive);
+ }
+ }
+
+ onUpdate(timestamp, frameDelta) {
+ if (this.autoRotate) {
+ mat4.fromRotation(this.cubeSeaNode.matrix, timestamp / 500, [0, -1, 0]);
+ }
+ mat4.fromRotation(this.heroNode.matrix, timestamp / 2000, [0, 1, 0]);
+ }
+}
diff --git a/layers-samples/proj-multiview-occlusion.html b/layers-samples/proj-multiview-occlusion.html
new file mode 100644
index 0000000..c2ecf36
--- /dev/null
+++ b/layers-samples/proj-multiview-occlusion.html
@@ -0,0 +1,301 @@
+<!doctype html>
+<!--
+Copyright 2021 The Immersive Web Community Group
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+-->
+<html>
+
+<head>
+ <meta charset='utf-8'>
+ <meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no'>
+ <meta name='mobile-web-app-capable' content='yes'>
+ <meta name='apple-mobile-web-app-capable' content='yes'>
+ <link rel='icon' type='image/png' sizes='32x32' href='../favicon-32x32.png'>
+ <link rel='icon' type='image/png' sizes='96x96' href='../favicon-96x96.png'>
+ <link rel='stylesheet' href='../css/common.css'>
+
+ <title>Projection Layer with Occlusion</title>
+</head>
+
+<body>
+ <header>
+ <details open>
+ <summary>Projection Layer with Multiview</summary>
+ <p>
+ This sample demonstrates use of WebXR depth sensing to do simple occlusion.
+ <a class="back" href="./">Back</a>
+ </p>
+ <p id="mv-status"></p>
+ <input type="checkbox" id="do_antialias" checked>antialias</input>
+ </details>
+ </header>
+ <script type="module">
+ import { WebXRButton } from '../js/util/webxr-button.js';
+ import { Scene, WebXRView } from '../js/render/scenes/scene.js';
+ import { Renderer, createWebGLContext } from '../js/render/core/renderer.js';
+ import { CubeSeaNode } from '../js/render/nodes/cube-sea-occlusion.js';
+ import { InlineViewerHelper } from '../js/util/inline-viewer-helper.js';
+ import { QueryArgs } from '../js/util/query-args.js';
+
+ // If requested, use the polyfill to provide support for mobile devices
+ // and devices which only support WebVR.
+ import WebXRPolyfill from '../js/third-party/webxr-polyfill/build/webxr-polyfill.module.js';
+ if (QueryArgs.getBool('usePolyfill', true)) {
+ let polyfill = new WebXRPolyfill();
+ }
+
+ // XR globals.
+ let do_antialias = document.getElementById('do_antialias');
+ let xrButton = null;
+ let xrImmersiveRefSpace = null;
+ let inlineViewerHelper = null;
+ let xrGLFactory = null;
+ let xrFramebuffer = null;
+
+ // WebGL scene globals.
+ let gl = null;
+ let renderer = null;
+ let scene = new Scene();
+ let is_multisampled_supported = false;
+ let samples = 1;
+ let mv_ext = null;
+ let depthStencilTex = null;
+
+ scene.addNode(new CubeSeaNode({ imageUrl: '../media/textures/cube-sea.png' }));
+ scene.enableStats(false);
+
+ function initXR() {
+ xrButton = new WebXRButton({
+ onRequestSession: onRequestSession,
+ onEndSession: onEndSession
+ });
+ document.querySelector('header').appendChild(xrButton.domElement);
+
+ if (navigator.xr) {
+ navigator.xr.isSessionSupported('immersive-ar').then((supported) => {
+ let mvCompat = testMultiViewCompatibility();
+ if (!mvCompat) {
+ document.querySelector('#mv-status').textContent = "❌ - Multiview Unsupported";
+ } else {
+ document.querySelector('#mv-status').textContent = "✔️- Multiview Supported";
+ }
+ xrButton.enabled = supported && mvCompat;
+ });
+
+ navigator.xr.requestSession('inline').then(onSessionStarted);
+ }
+ }
+
+ function testMultiViewCompatibility() {
+ let tempWebGLContext = createWebGLContext({
+ xrCompatible: true,
+ webgl2: true
+ });
+
+ return tempWebGLContext.getExtension('OCULUS_multiview') != null;
+ }
+
+ function initGL() {
+ if (gl)
+ return;
+
+ gl = createWebGLContext({
+ xrCompatible: true,
+ webgl2: true
+ });
+ document.body.appendChild(gl.canvas);
+
+ samples = gl.getParameter(gl.MAX_SAMPLES);
+
+ mv_ext = gl.getExtension('OCULUS_multiview');
+ if (mv_ext) {
+ console.log("OCULUS_multiview extension is supported");
+ is_multisampled_supported = true;
+ }
+ else {
+ console.log("OCULUS_multiview extension is NOT supported");
+ }
+ if (!mv_ext) {
+ mv_ext = gl.getExtension('OVR_multiview2');
+ if (mv_ext) {
+ console.log("OVR_multiview2 extension is supported");
+ }
+ else {
+ console.log("Neither OCULUS_multiview nor OVR_multiview2 extension is NOT supported");
+ }
+ }
+
+ function onResize() {
+ gl.canvas.width = gl.canvas.clientWidth * window.devicePixelRatio;
+ gl.canvas.height = gl.canvas.clientHeight * window.devicePixelRatio;
+ }
+ window.addEventListener('resize', onResize);
+ onResize();
+
+ // Set up a non-black clear color so that we can see if something renders wrong.
+ gl.clearColor(0, 0, 0, 0);
+ }
+
+ function onRequestSession() {
+ return navigator.xr.requestSession('immersive-ar', { requiredFeatures: ['layers','depth-sensing'], optionalFeatures: [] }).then((session) => {
+ xrButton.setSession(session);
+ session.isImmersive = true;
+ onSessionStarted(session);
+ });
+ }
+
+ function onVisibilityChange(event) {
+ console.log("Visibility change for "
+ + (event.session.isImmersive ? "immersive" : "non-immersive")
+ + " session: "
+ + event.session.visibilityState);
+ }
+
+ function onSessionStarted(session) {
+ session.addEventListener('end', onSessionEnded);
+ session.addEventListener('visibilitychange', onVisibilityChange);
+
+ initGL();
+
+ if (session.isImmersive) {
+ renderer = new Renderer(gl, true /* multiview */);
+ renderer.useDepth = true;
+
+ scene.setRenderer(renderer);
+
+
+ xrFramebuffer = gl.createFramebuffer();
+ xrGLFactory = new XRWebGLBinding(session, gl);
+ let layer = xrGLFactory.createProjectionLayer({
+ textureType: "texture-array",
+ depthFormat: gl.DEPTH_COMPONENT24,
+ useDepthSorting: true,
+ });
+ session.updateRenderState({ layers: [layer] });
+ } else {
+ renderer = new Renderer(gl, false /* multiview */);
+ renderer.useDepth = false;
+
+ scene.setRenderer(renderer);
+
+ let glLayer = new XRWebGLLayer(session, gl);
+ session.updateRenderState({ baseLayer: glLayer });
+ }
+
+ let refSpaceType = session.isImmersive ? 'local' : 'viewer';
+ session.requestReferenceSpace(refSpaceType).then((refSpace) => {
+ if (session.isImmersive) {
+ xrImmersiveRefSpace = refSpace;
+ } else {
+ // In most samples moving forward we'll use this helper class rather
+ // than the reference space directly to inject the necessary logic
+ // for looking around an inline session with mouse and touch input.
+ inlineViewerHelper = new InlineViewerHelper(gl.canvas, refSpace);
+ }
+ session.requestAnimationFrame(onXRFrame);
+ });
+ }
+
+ function onEndSession(session) {
+ session.end();
+ }
+
+ function onSessionEnded(event) {
+ if (event.session.isImmersive) {
+ xrButton.setSession(null);
+ }
+ }
+
+ function onXRFrame(t, frame) {
+ let session = frame.session;
+ let refSpace = session.isImmersive ?
+ xrImmersiveRefSpace :
+ inlineViewerHelper.referenceSpace;
+ let pose = frame.getViewerPose(refSpace);
+
+ scene.startFrame();
+
+ session.requestAnimationFrame(onXRFrame);
+
+ if (pose) {
+ let glLayer = null;
+
+ if (session.isImmersive) {
+ gl.bindFramebuffer(gl.FRAMEBUFFER, xrFramebuffer);
+ } else {
+ glLayer = session.renderState.baseLayer;
+ gl.bindFramebuffer(gl.FRAMEBUFFER, glLayer.framebuffer);
+ gl.disable(gl.SCISSOR_TEST);
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ }
+
+ let views = [];
+ for (let view of pose.views) {
+ let viewport = null;
+ if (session.isImmersive) {
+ const depthData = xrGLFactory.getDepthInformation(view);
+ if (depthData.isValid) {
+ renderer.addExternalTexture('scene_depth', depthData.texture, true);
+ }
+
+ glLayer = xrGLFactory.getViewSubImage(session.renderState.layers[0], view);
+ viewport = glLayer.viewport;
+ glLayer.framebuffer = xrFramebuffer;
+ gl.bindFramebuffer(gl.FRAMEBUFFER, xrFramebuffer);
+ if (views.length == 0) { // for multiview we need to set fbo only once
+ if (!is_multisampled_supported || !do_antialias.checked)
+ mv_ext.framebufferTextureMultiviewOVR(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, glLayer.colorTexture, 0, 0, 2);
+ else
+ mv_ext.framebufferTextureMultisampleMultiviewOVR(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, glLayer.colorTexture, 0, samples, 0, 2);
+
+ if (glLayer.depthStencilTexture === null) {
+ if (depthStencilTex === null) {
+ console.log("MaxViews = " + gl.getParameter(mv_ext.MAX_VIEWS_OVR));
+ depthStencilTex = gl.createTexture();
+ gl.bindTexture(gl.TEXTURE_2D_ARRAY, depthStencilTex);
+ gl.texStorage3D(gl.TEXTURE_2D_ARRAY, 1, gl.DEPTH_COMPONENT24, viewport.width, viewport.height, 2);
+ }
+ } else {
+ depthStencilTex = glLayer.depthStencilTexture;
+ }
+ if (!is_multisampled_supported || !do_antialias.checked)
+ mv_ext.framebufferTextureMultiviewOVR(gl.DRAW_FRAMEBUFFER, gl.DEPTH_ATTACHMENT, depthStencilTex, 0, 0, 2);
+ else
+ mv_ext.framebufferTextureMultisampleMultiviewOVR(gl.DRAW_FRAMEBUFFER, gl.DEPTH_ATTACHMENT, depthStencilTex, 0, samples, 0, 2);
+
+ gl.disable(gl.SCISSOR_TEST);
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
+ }
+ } else {
+ viewport = glLayer.getViewport(view);
+ }
+ views.push(new WebXRView(view, glLayer, viewport));
+ }
+
+ scene.drawViewArray(views);
+ }
+
+ scene.endFrame();
+ }
+
+ initXR();
+ </script>
+</body>
+
+</html>