diff --git a/README.md b/README.md index 08ed2ea..358902f 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Building an augmented reality application with the WebXR Device API -> ### 🚨 Will not work in latest Chrome Canary 🚨 -> The WebXR Device API is undergoing a lot of changes currently. This codelab will only work in Chrome Canary/Dev versions 70-72. +> ### The WebXR Device API is undergoing a lot of changes currently. +This codelab will work in Chrome Canary/Dev version 73.0.3679.0 This code has the resources you need for the codelab [Building an augmented reality application with the WebXR Device API](https://codelabs.developers.google.com/codelabs/ar-with-webxr/#0). diff --git a/final/app.js b/final/app.js index c73548e..c7ad30f 100644 --- a/final/app.js +++ b/final/app.js @@ -129,7 +129,8 @@ class App { // Ensure that the context we want to write to is compatible // with our XRDevice - await this.gl.setCompatibleXRDevice(this.session.device); + //API change: setCompatibleXRDevice() -> makeXRCompatible() + await this.gl.makeXRCompatible(this.session.device); // Set our session's baseLayer to an XRWebGLLayer // using our new renderer's context @@ -176,7 +177,11 @@ class App { this.reticle = new Reticle(this.session, this.camera); this.scene.add(this.reticle); - this.frameOfRef = await this.session.requestFrameOfReference('eye-level'); + //API change: requestFrameOfReference() -> requestReferenceSpace() + this.frameOfRef = await this.session.requestReferenceSpace({ + type: 'stationary', + subtype: 'eye-level', + }); this.session.requestAnimationFrame(this.onXRFrame); window.addEventListener('click', this.onClick); @@ -188,7 +193,8 @@ class App { */ onXRFrame(time, frame) { let session = frame.session; - let pose = frame.getDevicePose(this.frameOfRef); + //API change: getDevicePose() -> getViewerPose() + let pose = frame.getViewerPose(this.frameOfRef); // Update the reticle's position this.reticle.update(this.frameOfRef); @@ -210,14 +216,16 @@ class App { // Our XRFrame has an array of views. In the VR case, we'll have // two views, one for each eye. In mobile AR, however, we only // have one view. - for (let view of frame.views) { + //API change: frame.views -> pose.views + for (let view of pose.views) { const viewport = session.baseLayer.getViewport(view); this.renderer.setSize(viewport.width, viewport.height); // Set the view matrix and projection matrix from XRDevicePose // and XRView onto our THREE.Camera. this.camera.projectionMatrix.fromArray(view.projectionMatrix); - const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view)); + //API change: pose.getViewMatrix(view) -> view.viewMatrix + const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix); this.camera.matrix.getInverse(viewMatrix); this.camera.updateMatrixWorld(true); @@ -263,8 +271,8 @@ class App { const origin = new Float32Array(ray.origin.toArray()); const direction = new Float32Array(ray.direction.toArray()); const hits = await this.session.requestHitTest(origin, - direction, - this.frameOfRef); + direction, + this.frameOfRef); // If we found at least one hit... if (hits.length) { diff --git a/step-05/app.js b/step-05/app.js index b8f9cd1..e175d92 100644 --- a/step-05/app.js +++ b/step-05/app.js @@ -121,7 +121,8 @@ class App { // Ensure that the context we want to write to is compatible // with our XRDevice - await this.gl.setCompatibleXRDevice(this.session.device); + //API change: setCompatibleXRDevice() -> makeXRCompatible() + await this.gl.makeXRCompatible(this.session.device); // Set our session's baseLayer to an XRWebGLLayer // using our new renderer's context @@ -152,7 +153,11 @@ class App { this.reticle = new Reticle(this.session, this.camera); this.scene.add(this.reticle); - this.frameOfRef = await this.session.requestFrameOfReference('eye-level'); + //API change: requestFrameOfReference() -> requestReferenceSpace() + this.frameOfRef = await this.session.requestReferenceSpace({ + type: 'stationary', + subtype: 'eye-level', + }); this.session.requestAnimationFrame(this.onXRFrame); window.addEventListener('click', this.onClick); @@ -164,7 +169,8 @@ class App { */ onXRFrame(time, frame) { let session = frame.session; - let pose = frame.getDevicePose(this.frameOfRef); + //API change: getDevicePose() -> getViewerPose() + let pose = frame.getViewerPose(this.frameOfRef); // Update the reticle's position this.reticle.update(this.frameOfRef); @@ -186,14 +192,16 @@ class App { // Our XRFrame has an array of views. In the VR case, we'll have // two views, one for each eye. In mobile AR, however, we only // have one view. - for (let view of frame.views) { + //API change: frame.views -> pose.views + for (let view of pose.views) { const viewport = session.baseLayer.getViewport(view); this.renderer.setSize(viewport.width, viewport.height); // Set the view matrix and projection matrix from XRDevicePose // and XRView onto our THREE.Camera. this.camera.projectionMatrix.fromArray(view.projectionMatrix); - const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view)); + //API change: pose.getViewMatrix(view) -> view.viewMatrix + const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix); this.camera.matrix.getInverse(viewMatrix); this.camera.updateMatrixWorld(true); @@ -233,8 +241,8 @@ class App { const origin = new Float32Array(ray.origin.toArray()); const direction = new Float32Array(ray.direction.toArray()); const hits = await this.session.requestHitTest(origin, - direction, - this.frameOfRef); + direction, + this.frameOfRef); // If we found at least one hit... if (hits.length) { diff --git a/step-06/app.js b/step-06/app.js index 40832b7..91343c6 100644 --- a/step-06/app.js +++ b/step-06/app.js @@ -125,7 +125,8 @@ class App { // Ensure that the context we want to write to is compatible // with our XRDevice - await this.gl.setCompatibleXRDevice(this.session.device); + //API change: setCompatibleXRDevice() -> makeXRCompatible() + await this.gl.makeXRCompatible(this.session.device); // Set our session's baseLayer to an XRWebGLLayer // using our new renderer's context @@ -161,7 +162,11 @@ class App { this.reticle = new Reticle(this.session, this.camera); this.scene.add(this.reticle); - this.frameOfRef = await this.session.requestFrameOfReference('eye-level'); + //API change: requestFrameOfReference() -> requestReferenceSpace() + this.frameOfRef = await this.session.requestReferenceSpace({ + type: 'stationary', + subtype: 'eye-level', + }); this.session.requestAnimationFrame(this.onXRFrame); window.addEventListener('click', this.onClick); @@ -173,7 +178,8 @@ class App { */ onXRFrame(time, frame) { let session = frame.session; - let pose = frame.getDevicePose(this.frameOfRef); + //API change: getDevicePose() -> getViewerPose() + let pose = frame.getViewerPose(this.frameOfRef); // Update the reticle's position this.reticle.update(this.frameOfRef); @@ -195,14 +201,16 @@ class App { // Our XRFrame has an array of views. In the VR case, we'll have // two views, one for each eye. In mobile AR, however, we only // have one view. - for (let view of frame.views) { + //API change: frame.views -> pose.views + for (let view of pose.views) { const viewport = session.baseLayer.getViewport(view); this.renderer.setSize(viewport.width, viewport.height); // Set the view matrix and projection matrix from XRDevicePose // and XRView onto our THREE.Camera. this.camera.projectionMatrix.fromArray(view.projectionMatrix); - const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view)); + //API change: pose.getViewMatrix(view) -> view.viewMatrix + const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix); this.camera.matrix.getInverse(viewMatrix); this.camera.updateMatrixWorld(true); @@ -248,8 +256,8 @@ class App { const origin = new Float32Array(ray.origin.toArray()); const direction = new Float32Array(ray.direction.toArray()); const hits = await this.session.requestHitTest(origin, - direction, - this.frameOfRef); + direction, + this.frameOfRef); // If we found at least one hit... if (hits.length) { diff --git a/work/app.js b/work/app.js index 5dde817..be36097 100644 --- a/work/app.js +++ b/work/app.js @@ -120,7 +120,8 @@ class App { // Ensure that the context we want to write to is compatible // with our XRDevice - await this.gl.setCompatibleXRDevice(this.session.device); + //API change: setCompatibleXRDevice() -> makeXRCompatible() + await this.gl.makeXRCompatible(this.session.device); // Set our session's baseLayer to an XRWebGLLayer // using our new renderer's context @@ -138,7 +139,11 @@ class App { this.camera = new THREE.PerspectiveCamera(); this.camera.matrixAutoUpdate = false; - this.frameOfRef = await this.session.requestFrameOfReference('eye-level'); + //API change: requestFrameOfReference() -> requestReferenceSpace() + this.frameOfRef = await this.session.requestReferenceSpace({ + type: 'stationary', + subtype: 'eye-level', + }); this.session.requestAnimationFrame(this.onXRFrame); } @@ -148,7 +153,8 @@ class App { */ onXRFrame(time, frame) { let session = frame.session; - let pose = frame.getDevicePose(this.frameOfRef); + //API change: getDevicePose() -> getViewerPose() + let pose = frame.getViewerPose(this.frameOfRef); // Queue up the next frame session.requestAnimationFrame(this.onXRFrame); @@ -160,14 +166,16 @@ class App { // Our XRFrame has an array of views. In the VR case, we'll have // two views, one for each eye. In mobile AR, however, we only // have one view. - for (let view of frame.views) { + //API change: frame.views -> pose.views + for (let view of pose.views) { const viewport = session.baseLayer.getViewport(view); this.renderer.setSize(viewport.width, viewport.height); // Set the view matrix and projection matrix from XRDevicePose // and XRView onto our THREE.Camera. this.camera.projectionMatrix.fromArray(view.projectionMatrix); - const viewMatrix = new THREE.Matrix4().fromArray(pose.getViewMatrix(view)); + //API change: pose.getViewMatrix(view) -> view.viewMatrix + const viewMatrix = new THREE.Matrix4().fromArray(view.viewMatrix); this.camera.matrix.getInverse(viewMatrix); this.camera.updateMatrixWorld(true);