index.html 4.22 KB
Newer Older
Ramirez's avatar
Ramirez committed
1
<!doctype html>
GitLab's avatar
GitLab committed
2
<html>
Ramirez's avatar
Ramirez committed
3
4
5
6
<head>
  <meta charset="UTF-8">
  <meta name="viewport"
        content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
Ramirez's avatar
Ramirez committed
7
  <title>Hit to Place Demo</title>
Ramirez's avatar
Ramirez committed
8
9
10

  <!-- three.js -->
  <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
Ramirez's avatar
Ramirez committed
11
12

  <script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
Ramirez's avatar
Ramirez committed
13
14
15
16
17
</head>
<body>

<!-- Starting an immersive WebXR session requires user interaction.
    We start this one with a simple button. -->
Ramirez's avatar
Ramirez committed
18
<button onclick="activateXR()">Start Hit to Place Demo</button>
Ramirez's avatar
Ramirez committed
19
20
21
22
23
24
25
26
27
<script>
async function activateXR() {
  // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
  const canvas = document.createElement("canvas");
  document.body.appendChild(canvas);
  const gl = canvas.getContext("webgl", {xrCompatible: true});

  const scene = new THREE.Scene();

Ramirez's avatar
Ramirez committed
28
29
30
  const directionalLight = new THREE.DirectionalLight(0xffffff, 1.0);
  directionalLight.position.set(10, 15, 10);
  scene.add(directionalLight);
Ramirez's avatar
Ramirez committed
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47

// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
  alpha: true,
  preserveDrawingBuffer: true,
  canvas: canvas,
  context: gl
});
renderer.autoClear = false;

// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;

// Initialize a WebXR session using "immersive-ar".
Ramirez's avatar
Ramirez committed
48
const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
Ramirez's avatar
Ramirez committed
49
50
51
session.updateRenderState({
  baseLayer: new XRWebGLLayer(session, gl)
});
GitLab's avatar
GitLab committed
52

Ramirez's avatar
Ramirez committed
53
54
55
56
// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');

Ramirez's avatar
Ramirez committed
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace('viewer');
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });




const loader = new THREE.GLTFLoader();
let reticle;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
  reticle = gltf.scene;
  reticle.visible = false;
  scene.add(reticle);
})

let flower;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/sunflower/sunflower.gltf", function(gltf) {
  flower = gltf.scene;
});

session.addEventListener("select", (event) => {
  if (flower) {
    const clone = flower.clone();
    clone.position.copy(reticle.position);
    scene.add(clone);
  }
});

Ramirez's avatar
Ramirez committed
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
  // Queue up the next draw request.
  session.requestAnimationFrame(onXRFrame);

  // Bind the graphics framebuffer to the baseLayer's framebuffer
  gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer)

  // Retrieve the pose of the device.
  // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
  const pose = frame.getViewerPose(referenceSpace);
  if (pose) {
    // In mobile AR, we only have one view.
    const view = pose.views[0];

    const viewport = session.renderState.baseLayer.getViewport(view);
    renderer.setSize(viewport.width, viewport.height)

    // Use the view's transform matrix and projection matrix to configure the THREE.camera.
    camera.matrix.fromArray(view.transform.matrix)
    camera.projectionMatrix.fromArray(view.projectionMatrix);
    camera.updateMatrixWorld(true);

Ramirez's avatar
Ramirez committed
109
110
111
112
113
114
115
116
117
118
    const hitTestResults = frame.getHitTestResults(hitTestSource);
    if (hitTestResults.length > 0 && reticle) {
      const hitPose = hitTestResults[0].getPose(referenceSpace);
      reticle.visible = true;
      reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z)
      reticle.updateMatrixWorld(true);
    }

    

Ramirez's avatar
Ramirez committed
119
120
121
122
123
124
    // Render the scene with THREE.WebGLRenderer.
    renderer.render(scene, camera)
  }
}
session.requestAnimationFrame(onXRFrame);

Ramirez's avatar
Ramirez committed
125
126
127



Ramirez's avatar
Ramirez committed
128
129
130
131
}
</script>
</body>
</html>