index.html 4.35 KB
Newer Older
Rahman's avatar
Rahman committed
1
2
3
4
5
6
<!doctype html>
<html>
<head>
  <meta charset="UTF-8">
  <meta name="viewport"
        content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
Rahman's avatar
Rahman committed
7
  <title>Hit to Place Demo</title>
Rahman's avatar
Rahman committed
8
9
10

  <!-- three.js -->
  <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
Rahman's avatar
Rahman committed
11

Rahman's avatar
Rahman committed
12
  <script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
Rahman's avatar
Rahman committed
13
14
15
16
17
</head>
<body>

<!-- Starting an immersive WebXR session requires user interaction.
    We start this one with a simple button. -->
Rahman's avatar
Rahman committed
18
<button onclick="activateXR()">Tap to Place AR Demo</button>
Rahman's avatar
Rahman committed
19
20
21
22
23
24
<script>
async function activateXR() {
  // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
  const canvas = document.createElement("canvas");
  document.body.appendChild(canvas);
  const gl = canvas.getContext("webgl", {xrCompatible: true});
Rahman's avatar
Rahman committed
25
26

  const scene = new THREE.Scene();
Rahman's avatar
Rahman committed
27

Rahman's avatar
Rahman committed
28
29
30
  const directionalLight = new THREE.DirectionalLight(0xffffff, 1.0);
  directionalLight.position.set(10, 15, 10);
  scene.add(directionalLight);
Rahman's avatar
Rahman committed
31

Rahman's avatar
Rahman committed
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
  alpha: true,
  preserveDrawingBuffer: true,
  canvas: canvas,
  context: gl
});
renderer.autoClear = false;

// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;

// Initialize a WebXR session using "immersive-ar".
Rahman's avatar
Rahman committed
48
const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
Rahman's avatar
Rahman committed
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
session.updateRenderState({
  baseLayer: new XRWebGLLayer(session, gl)
});

// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');

// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace('viewer');
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });


const loader = new THREE.GLTFLoader();
let reticle;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
  reticle = gltf.scene;
  reticle.visible = false;
  scene.add(reticle);
})

let pineCone;
  // Replace the URL with new url
Rahman's avatar
Rahman committed
73
  loader.load("https://transfer.hft-stuttgart.de/gitlab/22raya1mpg/argumented-reality/-/raw/master/public/scene.gltf", function(gltf) {
Rahman's avatar
Rahman committed
74
    pineCone = gltf.scene;
Rahman's avatar
Rahman committed
75
    pineCone.scale.set(0.15, 0.15, 0.15); // Adjust scale as needed
Rahman's avatar
Rahman committed
76
77
78
79
80
81
82
83
84
85
  });

  session.addEventListener("select", (event) => {
    if (pineCone) {
      const clone = pineCone.clone();
      clone.position.copy(reticle.position);
      scene.add(clone);
    }
  });

Rahman's avatar
Rahman committed
86
87
88
// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
  // Queue up the next draw request.
Rahman's avatar
Rahman committed
89
  session.requestAnimationFrame(onXRFrame);
Rahman's avatar
Rahman committed
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122

  // Bind the graphics framebuffer to the baseLayer's framebuffer
  gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer)

  // Retrieve the pose of the device.
  // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
  const pose = frame.getViewerPose(referenceSpace);
  if (pose) {
    // In mobile AR, we only have one view.
    const view = pose.views[0];

    const viewport = session.renderState.baseLayer.getViewport(view);
    renderer.setSize(viewport.width, viewport.height)

    // Use the view's transform matrix and projection matrix to configure the THREE.camera.
    camera.matrix.fromArray(view.transform.matrix)
    camera.projectionMatrix.fromArray(view.projectionMatrix);
    camera.updateMatrixWorld(true);

    const hitTestResults = frame.getHitTestResults(hitTestSource);
    if (hitTestResults.length > 0 && reticle) {
      const hitPose = hitTestResults[0].getPose(referenceSpace);
      reticle.visible = true;
      reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z)
      reticle.updateMatrixWorld(true);
    }  

    // Render the scene with THREE.WebGLRenderer.
    renderer.render(scene, camera)
  }
}
session.requestAnimationFrame(onXRFrame);

Rahman's avatar
Rahman committed
123
124
125
126
}
</script>
</body>
</html>