-
Notifications
You must be signed in to change notification settings - Fork 8
/
webxr-threejs.html
250 lines (219 loc) · 8.58 KB
/
webxr-threejs.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
<!doctype html>
<html>
<head>
<meta charset='utf-8'>
<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no'>
<meta name='mobile-web-app-capable' content='yes'>
<meta name='apple-mobile-web-app-capable' content='yes'>
<link rel='stylesheet' href='css/style.css'>
<title>WebXR</title>
</head>
<body>
<div id="overlay">
<div class="info-area">
<div id="info"></div>
<button id="xr-button" disabled>XR not found</button>
</div>
</div>
<script type="module">
import * as THREE from './three.js/build/three.module.js';
import { GLTFLoader } from './three.js/examples/jsm/loaders/GLTFLoader.js'
let renderer = null;
let scene = null;
let camera = null;
let model = null;
let mixer = null;
let action = null;
let reticle = null;
let lastFrame = Date.now();
const initScene = (gl, session) => {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
// load our gltf model
var loader = new GLTFLoader();
loader.load(
'models/wheel.glb',
(gltf) => {
model = gltf.scene;
model.scale.set(0.1, 0.1, 0.1);
model.castShadow = true;
model.receiveShadow = true;
mixer = new THREE.AnimationMixer(model);
action = mixer.clipAction(gltf.animations[ 0 ]);
action.setLoop(THREE.LoopRepeat, 0);
},
() => {},
(error) => console.error(error)
);
var light = new THREE.PointLight(0xffffff, 2, 100); // soft white light
light.position.z = 1;
light.position.y = 5;
scene.add(light);
// create and configure three.js renderer with XR support
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
autoClear: true,
context: gl,
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.xr.enabled = true;
renderer.xr.setReferenceSpaceType('local');
renderer.xr.setSession(session);
// simple sprite to indicate detected surfaces
reticle = new THREE.Mesh(
new THREE.RingBufferGeometry(0.15, 0.2, 32).rotateX(-Math.PI / 2),
new THREE.MeshPhongMaterial({ color: 0x0fff00 })
);
// we will update it's matrix later using WebXR hit test pose matrix
reticle.matrixAutoUpdate = false;
reticle.visible = false;
scene.add(reticle);
};
// button to start XR experience
const xrButton = document.getElementById('xr-button');
// to display debug information
const info = document.getElementById('info');
// to control the xr session
let xrSession = null;
// reference space used within an application https://developer.mozilla.org/en-US/docs/Web/API/XRSession/requestReferenceSpace
let xrRefSpace = null;
// for hit testing with detected surfaces
let xrHitTestSource = null;
// Canvas OpenGL context used for rendering
let gl = null;
function checkXR() {
if (!window.isSecureContext) {
document.getElementById("warning").innerText = "WebXR unavailable. Please use secure context";
}
if (navigator.xr) {
navigator.xr.addEventListener('devicechange', checkSupportedState);
checkSupportedState();
} else {
document.getElementById("warning").innerText = "WebXR unavailable for this browser";
}
}
function checkSupportedState() {
navigator.xr.isSessionSupported('immersive-ar').then((supported) => {
if (supported) {
xrButton.innerHTML = 'Enter AR';
xrButton.addEventListener('click', onButtonClicked);
} else {
xrButton.innerHTML = 'AR not found';
}
xrButton.disabled = !supported;
});
}
function onButtonClicked() {
if (!xrSession) {
navigator.xr.requestSession('immersive-ar', {
optionalFeatures: ['dom-overlay'],
requiredFeatures: ['local', 'hit-test'],
domOverlay: {root: document.getElementById('overlay')}
}).then(onSessionStarted, onRequestSessionError);
} else {
xrSession.end();
}
}
function onSessionStarted(session) {
xrSession = session;
xrButton.innerHTML = 'Exit AR';
// Show which type of DOM Overlay got enabled (if any)
if (session.domOverlayState) {
info.innerHTML = 'DOM Overlay type: ' + session.domOverlayState.type;
}
// create a canvas element and WebGL context for rendering
session.addEventListener('end', onSessionEnded);
let canvas = document.createElement('canvas');
gl = canvas.getContext('webgl', { xrCompatible: true });
session.updateRenderState({ baseLayer: new XRWebGLLayer(session, gl) });
// here we ask for viewer reference space, since we will be casting a ray
// from a viewer towards a detected surface. The results of ray and surface intersection
// will be obtained via xrHitTestSource variable
session.requestReferenceSpace('viewer').then((refSpace) => {
session.requestHitTestSource({ space: refSpace }).then((hitTestSource) => {
xrHitTestSource = hitTestSource;
});
});
session.requestReferenceSpace('local').then((refSpace) => {
xrRefSpace = refSpace;
session.requestAnimationFrame(onXRFrame);
});
document.getElementById("overlay").addEventListener('click', placeObject);
// initialize three.js scene
initScene(gl, session);
}
function onRequestSessionError(ex) {
info.innerHTML = "Failed to start AR session.";
console.error(ex.message);
}
function onSessionEnded(event) {
xrSession = null;
xrButton.innerHTML = 'Enter AR';
info.innerHTML = '';
gl = null;
if (xrHitTestSource) xrHitTestSource.cancel();
xrHitTestSource = null;
}
function placeObject() {
if (reticle.visible && model) {
reticle.visible = false;
xrHitTestSource.cancel();
xrHitTestSource = null;
// we'll be placing our object right where the reticle was
const pos = reticle.getWorldPosition();
scene.remove(reticle);
model.position.set(pos.x, pos.y, pos.z);
scene.add(model);
// start object animation right away
toggleAnimation();
// instead of placing an object we will just toggle animation state
document.getElementById("overlay").removeEventListener('click', placeObject);
document.getElementById("overlay").addEventListener('click', toggleAnimation);
}
}
function toggleAnimation() {
if (action.isRunning()) {
action.stop();
action.reset();
} else {
action.play();
}
}
// Utility function to update animated objects
function updateAnimation() {
let dt = (Date.now() - lastFrame) / 1000;
lastFrame = Date.now();
if (mixer) {
mixer.update(dt);
}
}
function onXRFrame(t, frame) {
let session = frame.session;
session.requestAnimationFrame(onXRFrame);
if (xrHitTestSource) {
// obtain hit test results by casting a ray from the center of device screen
// into AR view. Results indicate that ray intersected with one or more detected surfaces
const hitTestResults = frame.getHitTestResults(xrHitTestSource);
if (hitTestResults.length) {
// obtain a local pose at the intersection point
const pose = hitTestResults[0].getPose(xrRefSpace);
// place a reticle at the intersection point
reticle.matrix.fromArray(pose.transform.matrix);
reticle.visible = true;
}
} else { // do not show a reticle if no surfaces are intersected
reticle.visible = false;
}
// update object animation
updateAnimation();
// bind our gl context that was created with WebXR to threejs renderer
gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer);
// render the scene
renderer.render(scene, camera);
}
checkXR();
</script>
</body>
</html>