2-Hit.html 4.39 KB
Newer Older
Azizi's avatar
Azizi committed
1
2
3
4
5
<!doctype html>
<html>
<head>
  <meta charset="UTF-8">
  <meta name="viewport"
Azizi's avatar
Azizi committed
6
        content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
Azizi's avatar
Azizi committed
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
  <title>Tap to Place Demo</title>

  <!-- three.js -->
  <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>

  <script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
</head>
<body>

<!-- Starting an immersive WebXR session requires user interaction.
    We start this one with a simple button. -->
<button onclick="activateXR()">Start Tap to Place Demo</button>
<script>
async function activateXR() {
  // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
  const canvas = document.createElement("canvas");
  document.body.appendChild(canvas);
  const gl = canvas.getContext("webgl", {xrCompatible: true});

  const scene = new THREE.Scene();

  const directionalLight = new THREE.DirectionalLight(0xffffff, 1.0);
  directionalLight.position.set(10, 15, 10);
  scene.add(directionalLight);

// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
  alpha: true,
  preserveDrawingBuffer: true,
  canvas: canvas,
  context: gl
});
renderer.autoClear = false;

// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;

// Initialize a WebXR session using "immersive-ar".
const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
session.updateRenderState({
  baseLayer: new XRWebGLLayer(session, gl)
});

// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');

// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace('viewer');
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });




const loader = new THREE.GLTFLoader();
let reticle;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
  reticle = gltf.scene;
  reticle.visible = false;
  scene.add(reticle);
})

let flower;
Azizi's avatar
Azizi committed
74
loader.load("https://transfer.hft-stuttgart.de/gitlab/22azah1mpg/ar_civ2/-/raw/master/public/model.gltf", function(gltf) {
Azizi's avatar
Azizi committed
75
76
77
78
79
80
81
  flower = gltf.scene;
});

session.addEventListener("select", (event) => {
  if (flower) {
    const clone = flower.clone();
    clone.position.copy(reticle.position);
Azizi's avatar
Azizi committed
82
    // Set the desired scale for the icon.
Azizi's avatar
Azizi committed
83
    const iconScale = 0.001; // Adjust the scale value as needed
Azizi's avatar
Azizi committed
84
    clone.scale.set(iconScale, iconScale, iconScale);
Azizi's avatar
Azizi committed
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
    scene.add(clone);
  }
});

// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
  // Queue up the next draw request.
  session.requestAnimationFrame(onXRFrame);

  // Bind the graphics framebuffer to the baseLayer's framebuffer
  gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer)

  // Retrieve the pose of the device.
  // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
  const pose = frame.getViewerPose(referenceSpace);
  if (pose) {
    // In mobile AR, we only have one view.
    const view = pose.views[0];

    const viewport = session.renderState.baseLayer.getViewport(view);
    renderer.setSize(viewport.width, viewport.height)

    // Use the view's transform matrix and projection matrix to configure the THREE.camera.
    camera.matrix.fromArray(view.transform.matrix)
    camera.projectionMatrix.fromArray(view.projectionMatrix);
    camera.updateMatrixWorld(true);

    const hitTestResults = frame.getHitTestResults(hitTestSource);
    if (hitTestResults.length > 0 && reticle) {
      const hitPose = hitTestResults[0].getPose(referenceSpace);
      reticle.visible = true;
      reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z)
      reticle.updateMatrixWorld(true);
    }

    

    // Render the scene with THREE.WebGLRenderer.
    renderer.render(scene, camera)
  }
}
session.requestAnimationFrame(onXRFrame);




}
</script>
</body>
</html>