index.html 4.52 KB
Newer Older
Rahman's avatar
Rahman committed
1
2
3
4
5
6
<!doctype html>
<html>
<head>
  <meta charset="UTF-8">
  <meta name="viewport"
        content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
Rahman's avatar
Rahman committed
7
  <title>Hit to Place Demo</title>
Rahman's avatar
Rahman committed
8
9
10

  <!-- three.js -->
  <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
Rahman's avatar
Rahman committed
11
  <script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
Rahman's avatar
Rahman committed
12
13
14
15
16
</head>
<body>

<!-- Starting an immersive WebXR session requires user interaction.
    We start this one with a simple button. -->
Rahman's avatar
Rahman committed
17
<button onclick="activateXR()">Start Hit to Place Demo</button>
Rahman's avatar
Rahman committed
18
19
20
21
22
23
<script>
async function activateXR() {
  // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
  const canvas = document.createElement("canvas");
  document.body.appendChild(canvas);
  const gl = canvas.getContext("webgl", {xrCompatible: true});
Rahman's avatar
Rahman committed
24
25

  const scene = new THREE.Scene();
Rahman's avatar
Rahman committed
26

Rahman's avatar
Rahman committed
27
28
29
  const directionalLight = new THREE.DirectionalLight(0xffffff, 1.0);
  directionalLight.position.set(10, 15, 10);
  scene.add(directionalLight);
Rahman's avatar
Rahman committed
30

Rahman's avatar
Rahman committed
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
  // Set up the WebGLRenderer, which handles rendering to the session's base layer.
  const renderer = new THREE.WebGLRenderer({
    alpha: true,
    preserveDrawingBuffer: true,
    canvas: canvas,
    context: gl
  });
  renderer.autoClear = false;

  // The API directly updates the camera matrices.
  // Disable matrix auto updates so three.js doesn't attempt
  // to handle the matrices independently.
  const camera = new THREE.PerspectiveCamera();
  camera.matrixAutoUpdate = false;

  // Initialize a WebXR session using "immersive-ar".
  const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
  session.updateRenderState({
    baseLayer: new XRWebGLLayer(session, gl)
  });

  // A 'local' reference space has a native origin that is located
  // near the viewer's position at the time the session was created.
  const referenceSpace = await session.requestReferenceSpace('local');

  // Create another XRReferenceSpace that has the viewer as the origin.
  const viewerSpace = await session.requestReferenceSpace('viewer');
  // Perform hit testing using the viewer as origin.
  const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });

  const loader = new THREE.GLTFLoader();
  let reticle;
  loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
    reticle = gltf.scene;
    reticle.visible = false;
    scene.add(reticle);
  });

  let pineCone;
Rahman's avatar
Rahman committed
70
71
  // Replace the URL below with the URL of the new GLTF model i want to use
  loader.load("https://transfer.hft-stuttgart.de/gitlab/22raya1mpg/argumented-reality/-/raw/master/public/scene.gltf", function(gltf) {
Rahman's avatar
Rahman committed
72
    pineCone = gltf.scene;
Rahman's avatar
Rahman committed
73
    pineCone.scale.set(0.15, 0.15, 0.15); // Adjust scale as needed
Rahman's avatar
Rahman committed
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
  });

  session.addEventListener("select", (event) => {
    if (pineCone) {
      const clone = pineCone.clone();
      clone.position.copy(reticle.position);
      scene.add(clone);
    }
  });

  // Create a render loop that allows us to draw on the AR view.
  const onXRFrame = (time, frame) => {
    // Queue up the next draw request.
    session.requestAnimationFrame(onXRFrame);

    // Bind the graphics framebuffer to the baseLayer's framebuffer
    gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer);

    // Retrieve the pose of the device.
    // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
    const pose = frame.getViewerPose(referenceSpace);
    if (pose) {
      // In mobile AR, we only have one view.
      const view = pose.views[0];

      const viewport = session.renderState.baseLayer.getViewport(view);
      renderer.setSize(viewport.width, viewport.height);

      // Use the view's transform matrix and projection matrix to configure the THREE.camera.
      camera.matrix.fromArray(view.transform.matrix);
      camera.projectionMatrix.fromArray(view.projectionMatrix);
      camera.updateMatrixWorld(true);

      const hitTestResults = frame.getHitTestResults(hitTestSource);
      if (hitTestResults.length > 0 && reticle) {
        const hitPose = hitTestResults[0].getPose(referenceSpace);
        reticle.visible = true;
        reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z);
        reticle.updateMatrixWorld(true);
      }

      // Render the scene with THREE.WebGLRenderer.
      renderer.render(scene, camera);
Rahman's avatar
Rahman committed
117
118
    }
  }
Rahman's avatar
Rahman committed
119
  session.requestAnimationFrame(onXRFrame);
Rahman's avatar
Rahman committed
120
121
122
123
}
</script>
</body>
</html>