| <!DOCTYPE html> |
| <html> |
| <body> |
| <canvas id='canvas' width=10 height=10></canvas> |
| <video id='video' autoplay></video> |
| <script src="third_party/blackframe.js"></script> |
| <script type="module" src="third_party/three.js/three.module.js"></script> |
| <script> |
| const WIDTH = 320; |
| const HEIGHT = 240; |
| |
| function nextFrame(t) { |
| return new Promise(resolve => { |
| // We could use here requestAnimationFrame(resolve) but since the workload |
| // is relatively light, we'll get such frames quite fast (~60Hz). |
| setTimeout(resolve, t) |
| }) |
| } |
| |
| |
| async function drawAlternatingColours(canvasId, framerate) { |
| const GREEN = [0, 1, 0, 1]; |
| const BLUE = [0, 0, 1, 1]; |
| |
| var context = canvasId.getContext('webgl', {alpha : true}); |
| context.clearColor(GREEN[0], GREEN[1], GREEN[2], GREEN[3]); |
| context.clear(context.COLOR_BUFFER_BIT); |
| await nextFrame(1000 / framerate); |
| context.clearColor(BLUE[0], BLUE[1], BLUE[2], BLUE[3]); |
| context.clear(context.COLOR_BUFFER_BIT); |
| await nextFrame(1000 / framerate); |
| |
| drawAlternatingColours(canvasId, framerate); |
| } |
| |
| |
| async function renderGetUserMediaWithPerspective(canvas, width, height) { |
| const constraints = { |
| audio: false, |
| video: { |
| width: width, |
| height: height |
| } |
| }; |
| const stream = await navigator.mediaDevices.getUserMedia(constraints); |
| video = document.createElement('video'); |
| video.autoplay = true; |
| video.srcObject = stream; |
| |
| let THREE = await import ('/third_party/three.js/three.module.js'); |
| |
| let camera = new THREE.PerspectiveCamera(75, width / height, 0.1, 1000); |
| let scene = new THREE.Scene(); |
| |
| const texture = new THREE.VideoTexture(video); |
| texture.needsUpdate = true; |
| texture.minFilter = THREE.LinearFilter; |
| texture.magFilter = THREE.LinearFilter; |
| texture.format = THREE.RGBFormat; |
| texture.crossOrigin = 'anonymous'; |
| |
| const material = new THREE.MeshBasicMaterial({ |
| map: texture, |
| side: THREE.DoubleSide /* For |texture| to be rendered when facing away. */ |
| }); |
| |
| const geometry = new THREE.PlaneGeometry(width / height, 1); |
| |
| const mesh = new THREE.Mesh(geometry, material); |
| mesh.position.z = -1; |
| scene.add(mesh); |
| |
| let renderer = new THREE.WebGLRenderer({ |
| antialias: true, |
| canvas: canvas |
| }); |
| renderer.setPixelRatio(width / height); |
| renderer.setSize(width, height, false); |
| |
| animate(mesh, scene, camera, renderer); |
| } |
| |
| function animate(mesh, scene, camera, renderer) { |
| // Math.PI/60 delta when rendered at 60fps (expected) gives a 2 second period. |
| mesh.rotation.x += Math.PI / 60; |
| renderer.render(scene, camera); |
| requestAnimationFrame((timestamp) => { |
| animate(mesh, scene, camera, renderer) |
| }); |
| } |
| |
| async function asyncIsBlackFrame(context, video, width, height) { |
| let i = 10; |
| let frameIsBlack = true; |
| do { |
| i--; |
| await nextFrame(1000/60); |
| // drawImage() and getImageData() are asynchronous and sometimes return |
| // before any actual |video| content has been drawn. Try a few times. |
| context.drawImage(video, 0, 0, width, height); |
| imageData = context.getImageData(0, 0, width, height); |
| frameIsBlack = isBlackFrame(imageData.data, imageData.data.length) |
| } while (i > 0 && frameIsBlack); |
| return frameIsBlack |
| } |
| |
| async function captureFromCanvasAndInspect(validate) { |
| const canvas = document.getElementById('canvas'); |
| |
| // Note that no parameters to captureStream() will force a new capture every |
| // time |canvas| is modified. |
| const stream = canvas.captureStream(); |
| if (stream.getVideoTracks().length !== 1) { |
| throw new DOMException('Wrong getVideoTracks() length', |
| 'InvalidStateError'); |
| } |
| |
| const video = document.getElementById('video'); |
| video.srcObject = stream; |
| await video.play(); |
| |
| if (!validate) { |
| return; |
| } |
| |
| const mediaStreamSettings = stream.getVideoTracks()[0].getSettings(); |
| video.width = mediaStreamSettings.width; |
| video.height = mediaStreamSettings.height; |
| |
| if (isNaN(video.width) || isNaN(video.height) || video.width === 0 || |
| video.height === 0) { |
| throw new DOMException(`Bad stream dimensions ` + |
| `${video.width}x${video.height}`); |
| } |
| if (video.width !== canvas.width || video.height !== canvas.height) { |
| throw new DOMException(`Unexpected capture resolution, got: ` + |
| `${video.width}x${video.height}, expected ${canvas.width}x${canvas.height}`); |
| } |
| |
| // Draw the <video> contents in an offscreen canvas to validate the pixels. |
| const width = Math.max(video.width / 8, 128); |
| const height = Math.max(video.height / 8, 128); |
| const context = new OffscreenCanvas(width, height).getContext('2d'); |
| if (await asyncIsBlackFrame(context, video, width, height)) { |
| throw new DOMException('Captured data is considered all black'); |
| } |
| } |
| |
| async function captureFromCanvasWithAlternatingColoursAndInspect(validate) { |
| const canvas = document.getElementById('canvas'); |
| const FRAMERATE = 24; |
| canvas.width = WIDTH; |
| canvas.height = HEIGHT; |
| await drawAlternatingColours(canvas, FRAMERATE); |
| await captureFromCanvasAndInspect(validate); |
| } |
| |
| async function captureFromCanvasWithVideoAndInspect(validate) { |
| const canvas = document.getElementById('canvas'); |
| await renderGetUserMediaWithPerspective(canvas, WIDTH, HEIGHT); |
| await captureFromCanvasAndInspect(validate); |
| } |
| |
| </script> |
| </body> |
| </html> |