8. Advances in Web Technology
<video>
<canvas>
WebRTC’s getUserMedia();
WebGL three.jsvideo tag, canvas tag OpenCV + emscripten
WebVR
Copyright @Hirokazu Egashira. All right reserved.
28. Usage
<script src='webxr-polyfill.js'></script>
<!-- or use a link to a CDN -->
<script src='https://cdn.jsdelivr.net/npm/webxr-polyfill@latest/build/webxrpolyfill.js'>
</script>
using a build tool (like browserify or webpack)
npm install --save webxr-polyfill
var polyfill = new WebXRPolyfill();
include it as a script tag, or use a CDN
Using
using script tags
import WebXRPolyfill from 'webxr-polyfill';
const polyfill = new WebXRPolyfill();
In a modular ES6 world
29.
30. Copyright @Hirokazu Egashira. All right reserved.
<!--The polyfill is not needed for browser that have native API support,
but is linked by these samples for wider compatibility.-->
<script src=‘https://cdn.jsdelivr.net/npm/webxr-polyfill@latest/build/webxr-polyfill.js'></
script>
<body>
<header>
<details open>
<summary>Viewport Scaling</summary>
<p>
<a class="back" href="./">Back</a>
</p>
</details>
</header>
</body>
31. Copyright @Hirokazu Egashira. All right reserved.
(function () {
'use strict';
// If requested, initialize the WebXR polyfill
if (QueryArgs.getBool('allowPolyfill', false)) {
var polyfill = new WebXRPolyfill();
}
// XR globals.
let xrButton = null;
let xrExclusiveFrameOfRef = null;
let xrNonExclusiveFrameOfRef = null;
// WebGL scene globals.
let gl = null;
let renderer = null;
let scene = new Scene();
scene.addNode(new Gltf2Node({url: 'media/gltf/camp/
camp.gltf'}));
scene.addNode(new SkyboxNode({url: 'media/textures/
eilenriede-park-2k.png'}));
scene.standingStats(true);
//
function initXR() {
xrButton = new XRDeviceButton({
onRequestSession: onRequestSession,
onEndSession: onEndSession
});
document.querySelector('header').appendChild(xrButton.domElement);
if (navigator.xr) {
navigator.xr.requestDevice().then((device) => {
device.supportsSession({exclusive: true}).then(() => {
xrButton.setDevice(device);
});
let outputCanvas = document.createElement('canvas');
let ctx = outputCanvas.getContext('xrpresent');
device.requestSession({ outputContext: ctx })
.then((session) => {
document.body.appendChild(outputCanvas);
onSessionStarted(session);
});
}).catch(() => {
initFallback();
});
} else {
initFallback();
}
}
32. Copyright @Hirokazu Egashira. All right reserved.
// initXR()
function initFallback() {
initGL();
document.body.appendChild(gl.canvas);
let fallbackHelper = new FallbackHelper(scene, gl);
fallbackHelper.emulateStage = true;
}
// initXR()
function onRequestSession(device) {
// Set up a mirror canvas
let mirrorCanvas = document.createElement('canvas');
let ctx = mirrorCanvas.getContext('xrpresent');
mirrorCanvas.setAttribute('id', 'mirror-canvas');
document.body.appendChild(mirrorCanvas);
device.requestSession({ exclusive: true, outputContext:
ctx }).then((session) => {
xrButton.setSession(session);
onSessionStarted(session);
});
}
// initFallback()
function initGL(compatibleDevice) {
if (gl)
return;
gl = createWebGLContext({
compatibleXRDevice: compatibleDevice
});
renderer = new Renderer(gl);
scene.setRenderer(renderer);
}
33. Copyright @Hirokazu Egashira. All right reserved.
// fire ar initXR() and onRequestSession()
function onSessionStarted(session) {
session.addEventListener('end', onSessionEnded);
initGL(session.device);
session.baseLayer = new XRWebGLLayer(session, gl);
// Get a stage frame of reference, which will align the user's physical
// floor with Y=0 and can provide boundaries that indicate where the
// user can safely walk. If the system can't natively provide stage
// coordinates (for example, with a 3DoF device) then it will return an
// emulated stage, where the view is translated up by a static height
so
// that the scene still renders in approximately the right place.
session.requestFrameOfReference('stage').then((frameOfRef) => {
if (session.exclusive) {
xrExclusiveFrameOfRef = frameOfRef;
let boundsRenderer = new BoundsRenderer();
boundsRenderer.stageBounds = frameOfRef.bounds;
scene.addNode(boundsRenderer);
} else {
xrNonExclusiveFrameOfRef = frameOfRef;
}
session.requestAnimationFrame(onXRFrame);
});
}
// fire at initXR()
function onEndSession(session) {
session.end();
}
// fire at onSessionStarted()
function onSessionEnded(event) {
if (event.session.exclusive) {
document.body.removeChild(document.querySelector('#mirror-
canvas'));
xrButton.setSession(null);
}
}
// fire at onSessionStarted()
function onXRFrame(t, frame) {
let session = frame.session;
let frameOfRef = session.exclusive ?
xrExclusiveFrameOfRef :
xrNonExclusiveFrameOfRef;
let pose = frame.getDevicePose(frameOfRef);
scene.startFrame();
session.requestAnimationFrame(onXRFrame);
// Every XR frame uses basically the same render loop, so for the
sake
// of keeping the sample code focused on the interesting bits most
// samples after this one will start using this helper function to
hide
// away the majority of the rendering logic.
scene.drawXRFrame(frame, pose);
scene.endFrame();
}