Commit ec65330d authored by Rahman's avatar Rahman
Browse files

Update index.html

parent 4c6e77a7
Pipeline #9755 passed with stage
in 7 seconds
......@@ -8,7 +8,6 @@
<!-- three.js -->
<script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
<script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
</head>
<body>
......@@ -29,67 +28,64 @@ async function activateXR() {
directionalLight.position.set(10, 15, 10);
scene.add(directionalLight);
// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
// Set up the WebGLRenderer, which handles rendering to the session's base layer.
const renderer = new THREE.WebGLRenderer({
alpha: true,
preserveDrawingBuffer: true,
canvas: canvas,
context: gl
});
renderer.autoClear = false;
// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;
// Initialize a WebXR session using "immersive-ar".
const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
session.updateRenderState({
});
renderer.autoClear = false;
// The API directly updates the camera matrices.
// Disable matrix auto updates so three.js doesn't attempt
// to handle the matrices independently.
const camera = new THREE.PerspectiveCamera();
camera.matrixAutoUpdate = false;
// Initialize a WebXR session using "immersive-ar".
const session = await navigator.xr.requestSession("immersive-ar", {requiredFeatures: ['hit-test']});
session.updateRenderState({
baseLayer: new XRWebGLLayer(session, gl)
});
// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');
// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace('viewer');
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });
});
// A 'local' reference space has a native origin that is located
// near the viewer's position at the time the session was created.
const referenceSpace = await session.requestReferenceSpace('local');
// Create another XRReferenceSpace that has the viewer as the origin.
const viewerSpace = await session.requestReferenceSpace('viewer');
// Perform hit testing using the viewer as origin.
const hitTestSource = await session.requestHitTestSource({ space: viewerSpace });
const loader = new THREE.GLTFLoader();
let reticle;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
const loader = new THREE.GLTFLoader();
let reticle;
loader.load("https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf", function(gltf) {
reticle = gltf.scene;
reticle.visible = false;
scene.add(reticle);
})
});
let scene;
loader.load("https://transfer.hft-stuttgart.de/gitlab/22raya1mpg/argumented-reality/-/blob/master/bee/scene.gltf", function(gltf) {
scene = gltf.scene;
});
let beeModel;
loader.load("https://example.com/path/to/your/bee_model.gltf", function(gltf) {
beeModel = gltf.scene;
});
session.addEventListener("select", (event) => {
if (scene) {
const clone = scene.clone();
session.addEventListener("select", (event) => {
if (beeModel && reticle) {
const clone = beeModel.clone();
clone.position.copy(reticle.position);
scene.add(clone);
}
});
});
// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
// Create a render loop that allows us to draw on the AR view.
const onXRFrame = (time, frame) => {
// Queue up the next draw request.
session.requestAnimationFrame(onXRFrame);
// Bind the graphics framebuffer to the baseLayer's framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer)
gl.bindFramebuffer(gl.FRAMEBUFFER, session.renderState.baseLayer.framebuffer);
// Retrieve the pose of the device.
// XRFrame.getViewerPose can return null while the session attempts to establish tracking.
......@@ -99,10 +95,10 @@ const onXRFrame = (time, frame) => {
const view = pose.views[0];
const viewport = session.renderState.baseLayer.getViewport(view);
renderer.setSize(viewport.width, viewport.height)
renderer.setSize(viewport.width, viewport.height);
// Use the view's transform matrix and projection matrix to configure the THREE.camera.
camera.matrix.fromArray(view.transform.matrix)
camera.matrix.fromArray(view.transform.matrix);
camera.projectionMatrix.fromArray(view.projectionMatrix);
camera.updateMatrixWorld(true);
......@@ -110,21 +106,15 @@ const onXRFrame = (time, frame) => {
if (hitTestResults.length > 0 && reticle) {
const hitPose = hitTestResults[0].getPose(referenceSpace);
reticle.visible = true;
reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z)
reticle.position.set(hitPose.transform.position.x, hitPose.transform.position.y, hitPose.transform.position.z);
reticle.updateMatrixWorld(true);
}
// Render the scene with THREE.WebGLRenderer.
renderer.render(scene, camera)
renderer.render(scene, camera);
}
}
session.requestAnimationFrame(onXRFrame);
};
session.requestAnimationFrame(onXRFrame);
}
</script>
</body>
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment