Skip to content
This repository has been archived by the owner on Nov 9, 2023. It is now read-only.

Update codelab to work in Chrome Canary/Dev version 73.0.3679.0 #20

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Building an augmented reality application with the WebXR Device API

> ### 🚨 Will not work in latest Chrome Canary 🚨
> The WebXR Device API is undergoing a lot of changes currently. This codelab will only work in Chrome Canary/Dev versions 70-72.
> ### The WebXR Device API is undergoing a lot of changes currently.
This codelab will work in Chrome Canary/Dev version 73.0.3679.0

This code has the resources you need for the codelab [Building an augmented reality application with the WebXR Device API](https://codelabs.developers.google.com/codelabs/ar-with-webxr/#0).

Expand Down
22 changes: 15 additions & 7 deletions final/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,8 @@ class App {

// Ensure that the context we want to write to is compatible
// with our XRDevice
await this.gl.setCompatibleXRDevice(this.session.device);
//API change: setCompatibleXRDevice() -> makeXRCompatible()
await this.gl.makeXRCompatible(this.session.device);

// Set our session's baseLayer to an XRWebGLLayer
// using our new renderer's context
Expand Down Expand Up @@ -176,7 +177,11 @@ class App {
this.reticle = new Reticle(this.session, this.camera);
this.scene.add(this.reticle);

this.frameOfRef = await this.session.requestFrameOfReference('eye-level');
//API change: requestFrameOfReference() -> requestReferenceSpace()
this.frameOfRef = await this.session.requestReferenceSpace({
type: 'stationary',
subtype: 'eye-level',
});
this.session.requestAnimationFrame(this.onXRFrame);

window.addEventListener('click', this.onClick);
Expand All @@ -188,7 +193,8 @@ class App {
*/
onXRFrame(time, frame) {
let session = frame.session;
let pose = frame.getDevicePose(this.frameOfRef);
//API change: getDevicePose() -> getViewerPose()
let pose = frame.getViewerPose(this.frameOfRef);

// Update the reticle's position
this.reticle.update(this.frameOfRef);
Expand All @@ -210,14 +216,16 @@ class App {
// Our XRFrame has an array of views. In the VR case, we'll have
// two views, one for each eye. In mobile AR, however, we only
// have one view.
for (let view of frame.views) {
//API change: frame.views -> pose.views
for (let view of pose.views) {
const viewport = session.baseLayer.getViewport(view);
this.renderer.setSize(viewport.width, viewport.height);

// Set the view matrix and projection matrix from XRDevicePose
// and XRView onto our THREE.Camera.
this.camera.projectionMatrix.fromArray(view.projectionMatrix);
const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view));
//API change: pose.getViewMatrix(view) -> view.viewMatrix
const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix);
this.camera.matrix.getInverse(viewMatrix);
this.camera.updateMatrixWorld(true);

Expand Down Expand Up @@ -263,8 +271,8 @@ class App {
const origin = new Float32Array(ray.origin.toArray());
const direction = new Float32Array(ray.direction.toArray());
const hits = await this.session.requestHitTest(origin,
direction,
this.frameOfRef);
direction,
this.frameOfRef);

// If we found at least one hit...
if (hits.length) {
Expand Down
22 changes: 15 additions & 7 deletions step-05/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,8 @@ class App {

// Ensure that the context we want to write to is compatible
// with our XRDevice
await this.gl.setCompatibleXRDevice(this.session.device);
//API change: setCompatibleXRDevice() -> makeXRCompatible()
await this.gl.makeXRCompatible(this.session.device);

// Set our session's baseLayer to an XRWebGLLayer
// using our new renderer's context
Expand Down Expand Up @@ -152,7 +153,11 @@ class App {
this.reticle = new Reticle(this.session, this.camera);
this.scene.add(this.reticle);

this.frameOfRef = await this.session.requestFrameOfReference('eye-level');
//API change: requestFrameOfReference() -> requestReferenceSpace()
this.frameOfRef = await this.session.requestReferenceSpace({
type: 'stationary',
subtype: 'eye-level',
});
this.session.requestAnimationFrame(this.onXRFrame);

window.addEventListener('click', this.onClick);
Expand All @@ -164,7 +169,8 @@ class App {
*/
onXRFrame(time, frame) {
let session = frame.session;
let pose = frame.getDevicePose(this.frameOfRef);
//API change: getDevicePose() -> getViewerPose()
let pose = frame.getViewerPose(this.frameOfRef);

// Update the reticle's position
this.reticle.update(this.frameOfRef);
Expand All @@ -186,14 +192,16 @@ class App {
// Our XRFrame has an array of views. In the VR case, we'll have
// two views, one for each eye. In mobile AR, however, we only
// have one view.
for (let view of frame.views) {
//API change: frame.views -> pose.views
for (let view of pose.views) {
const viewport = session.baseLayer.getViewport(view);
this.renderer.setSize(viewport.width, viewport.height);

// Set the view matrix and projection matrix from XRDevicePose
// and XRView onto our THREE.Camera.
this.camera.projectionMatrix.fromArray(view.projectionMatrix);
const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view));
//API change: pose.getViewMatrix(view) -> view.viewMatrix
const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix);
this.camera.matrix.getInverse(viewMatrix);
this.camera.updateMatrixWorld(true);

Expand Down Expand Up @@ -233,8 +241,8 @@ class App {
const origin = new Float32Array(ray.origin.toArray());
const direction = new Float32Array(ray.direction.toArray());
const hits = await this.session.requestHitTest(origin,
direction,
this.frameOfRef);
direction,
this.frameOfRef);

// If we found at least one hit...
if (hits.length) {
Expand Down
22 changes: 15 additions & 7 deletions step-06/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,8 @@ class App {

// Ensure that the context we want to write to is compatible
// with our XRDevice
await this.gl.setCompatibleXRDevice(this.session.device);
//API change: setCompatibleXRDevice() -> makeXRCompatible()
await this.gl.makeXRCompatible(this.session.device);

// Set our session's baseLayer to an XRWebGLLayer
// using our new renderer's context
Expand Down Expand Up @@ -161,7 +162,11 @@ class App {
this.reticle = new Reticle(this.session, this.camera);
this.scene.add(this.reticle);

this.frameOfRef = await this.session.requestFrameOfReference('eye-level');
//API change: requestFrameOfReference() -> requestReferenceSpace()
this.frameOfRef = await this.session.requestReferenceSpace({
type: 'stationary',
subtype: 'eye-level',
});
this.session.requestAnimationFrame(this.onXRFrame);

window.addEventListener('click', this.onClick);
Expand All @@ -173,7 +178,8 @@ class App {
*/
onXRFrame(time, frame) {
let session = frame.session;
let pose = frame.getDevicePose(this.frameOfRef);
//API change: getDevicePose() -> getViewerPose()
let pose = frame.getViewerPose(this.frameOfRef);

// Update the reticle's position
this.reticle.update(this.frameOfRef);
Expand All @@ -195,14 +201,16 @@ class App {
// Our XRFrame has an array of views. In the VR case, we'll have
// two views, one for each eye. In mobile AR, however, we only
// have one view.
for (let view of frame.views) {
//API change: frame.views -> pose.views
for (let view of pose.views) {
const viewport = session.baseLayer.getViewport(view);
this.renderer.setSize(viewport.width, viewport.height);

// Set the view matrix and projection matrix from XRDevicePose
// and XRView onto our THREE.Camera.
this.camera.projectionMatrix.fromArray(view.projectionMatrix);
const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view));
//API change: pose.getViewMatrix(view) -> view.viewMatrix
const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix);
this.camera.matrix.getInverse(viewMatrix);
this.camera.updateMatrixWorld(true);

Expand Down Expand Up @@ -248,8 +256,8 @@ class App {
const origin = new Float32Array(ray.origin.toArray());
const direction = new Float32Array(ray.direction.toArray());
const hits = await this.session.requestHitTest(origin,
direction,
this.frameOfRef);
direction,
this.frameOfRef);

// If we found at least one hit...
if (hits.length) {
Expand Down
18 changes: 13 additions & 5 deletions work/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ class App {

// Ensure that the context we want to write to is compatible
// with our XRDevice
await this.gl.setCompatibleXRDevice(this.session.device);
//API change: setCompatibleXRDevice() -> makeXRCompatible()
await this.gl.makeXRCompatible(this.session.device);

// Set our session's baseLayer to an XRWebGLLayer
// using our new renderer's context
Expand All @@ -138,7 +139,11 @@ class App {
this.camera = new THREE.PerspectiveCamera();
this.camera.matrixAutoUpdate = false;

this.frameOfRef = await this.session.requestFrameOfReference('eye-level');
//API change: requestFrameOfReference() -> requestReferenceSpace()
this.frameOfRef = await this.session.requestReferenceSpace({
type: 'stationary',
subtype: 'eye-level',
});
this.session.requestAnimationFrame(this.onXRFrame);
}

Expand All @@ -148,7 +153,8 @@ class App {
*/
onXRFrame(time, frame) {
let session = frame.session;
let pose = frame.getDevicePose(this.frameOfRef);
//API change: getDevicePose() -> getViewerPose()
let pose = frame.getViewerPose(this.frameOfRef);

// Queue up the next frame
session.requestAnimationFrame(this.onXRFrame);
Expand All @@ -160,14 +166,16 @@ class App {
// Our XRFrame has an array of views. In the VR case, we'll have
// two views, one for each eye. In mobile AR, however, we only
// have one view.
for (let view of frame.views) {
//API change: frame.views -> pose.views
for (let view of pose.views) {
const viewport = session.baseLayer.getViewport(view);
this.renderer.setSize(viewport.width, viewport.height);

// Set the view matrix and projection matrix from XRDevicePose
// and XRView onto our THREE.Camera.
this.camera.projectionMatrix.fromArray(view.projectionMatrix);
const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view));
//API change: pose.getViewMatrix(view) -> view.viewMatrix
const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix);
this.camera.matrix.getInverse(viewMatrix);
this.camera.updateMatrixWorld(true);

Expand Down