1-Cube.html 3.04 KB
Newer Older
Thapa's avatar
Thapa committed
1
2
3
4
5
6
<!doctype html>
<html>
<head>
  <meta charset="UTF-8">
  <meta name="viewport"
        content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
Thapa's avatar
Thapa committed
7
  <title>Cube Demo</title>
Thapa's avatar
Thapa committed
8
9
10
11
12
13
14
15

  <!-- three.js -->
  <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
</head>
<body>

<!-- Starting an immersive WebXR session requires user interaction.
    We start this one with a simple button. -->
Thapa's avatar
Thapa committed
16
<button onclick="activateXR()">Start Cube Demo</button>
Thapa's avatar
Thapa committed
17
18
19
20
21
22
23
24
25
<script>
async function activateXR() {
  // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
  const canvas = document.createElement("canvas");
  document.body.appendChild(canvas);
  const gl = canvas.getContext("webgl", {xrCompatible: true});

  const scene = new THREE.Scene();

Thapa's avatar
Thapa committed
26
27
28
29
30
31
// The cube will have a different color on each side.
const materials = [
  new THREE.MeshBasicMaterial({color: 0x000000})
];

// Create the cube and add it to the demo scene.
Thapa's avatar
Thapa committed
32
const cube = new THREE.Mesh(new THREE.ConeGeometry(0.5, 1, 5), materials);
Thapa's avatar
Thapa committed
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
cube.position.set(0, 0, -1);
scene.add(cube);

// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
  alpha: true,
  preserveDrawingBuffer: true,
  canvas: canvas,
  context: gl
});
renderer.autoClear = false;

// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;

// Initialize a WebXR session using "immersive-ar".
const session = await navigator.xr.requestSession("immersive-ar");
session.updateRenderState({
  baseLayer: new XRWebGLLayer(session, gl)
});

// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');

// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
  // Queue up the next draw request.
  session.requestAnimationFrame(onXRFrame);

  // Bind the graphics framebuffer to the baseLayer's framebuffer
  gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer)

  // Retrieve the pose of the device.
  // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
  const pose = frame.getViewerPose(referenceSpace);
  if (pose) {
    // In mobile AR, we only have one view.
    const view = pose.views[0];

    const viewport = session.renderState.baseLayer.getViewport(view);
    renderer.setSize(viewport.width, viewport.height)

    // Use the view's transform matrix and projection matrix to configure the THREE.camera.
    camera.matrix.fromArray(view.transform.matrix)
    camera.projectionMatrix.fromArray(view.projectionMatrix);
    camera.updateMatrixWorld(true);
Thapa's avatar
Thapa committed
83
84
85
86
87
88
89
90
91
92
93

    // Render the scene with THREE.WebGLRenderer.
    renderer.render(scene, camera)
  }
}
session.requestAnimationFrame(onXRFrame);

}
</script>
</body>
</html>