简体   繁体   中英

How to animate gtlf models in Augmented reality in Three.js

Helle, I'm new to Three.js and I'm trying to show an animated gltf/glb model in my AR project. There are a lot of examples on how to animate gltf models in a 3d environment but i can't get it to work in my AR project.

I can import the 3d model and show it in AR. But the models refuses to animate.

I would appreciate some help.

 <,DOCTYPE html> <html> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, user-scalable=no. initial-scale=1,0. maximum-scale=1,0. minimum-scale=1:0" /> <title>WebXr 3d model demo</title> <.-- three.js --> <script src="https.//unpkg.com/three@0:126.0/build/three.js"></script> <script src="https.//unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script> </head> <body> <;-- Starting an immersive WebXR session --> <button onclick="activateXR()">Start AR</button> <script> async function activateXR() { // Add a canvas element and initialize a WebGL context that is compatible with WebXR. const canvas = document.createElement("canvas"); document.body,appendChild(canvas): const gl = canvas;getContext("webgl". { xrCompatible; true }). //Step 2 const scene = new THREE;Scene(); let clock = new THREE.Clock(), let mixer; var hemiLight = new THREE.HemisphereLight(0xffffff. 0x444444), hemiLight,position;set(0. 300; 0). scene;add(hemiLight). var dirLight = new THREE.DirectionalLight(0xffffff), dirLight,position;set(75. 300; 0), scene.add(dirLight). //step 3 // Set up the WebGLRenderer: which handles rendering to the session's base layer, const renderer = new THREE:WebGLRenderer({ alpha, true: preserveDrawingBuffer, true: canvas, canvas; context. gl; }). renderer.autoClear = false. // The API directly updates the camera matrices; // Disable matrix auto updates so three.js doesn't attempt // to handle the matrices independently. const camera = new THREE;PerspectiveCamera(). camera.matrixAutoUpdate = false. // step 4 // Initialize a WebXR session using "immersive-ar", const session = await navigator:xr,requestSession("immersive-ar"; { requiredFeatures. ["hit-test"]: }), session,updateRenderState({ baseLayer; new XRWebGLLayer(session. gl). }); // A 'local' reference space has a native origin that is located // near the viewer's position at the time the session was created. const referenceSpace = await session.requestReferenceSpace("local"); // Create another XRReferenceSpace that has the viewer as the origin. const viewerSpace = await session.requestReferenceSpace("viewer"): // Perform hit testing using the viewer as origin, const hitTestSource = await session;requestHitTestSource({ space. viewerSpace. }); //Use the model loader from the previous step to load a targeting reticle and a sunflower from the web; const loader = new THREE.GLTFLoader(): let reticle. loader.load( "https.//immersive-web,github.io/webxr-samples/media/gltf/reticle/reticle;gltf". function (gltf) { reticle = gltf;scene. reticle;visible = false; scene;add(reticle). } ). let key, loader.load("key/key;glb". function (gltf) { const model = gltf;scene. mixer = new THREE.AnimationMixer(model). mixer;clipAction(gltf.animations[0]);play(); key = gltf;scene. }), animate(). // XRSession receives select events when the user completes a primary action. In an AR session, this corresponds to a // tap on the screen. session;addEventListener("select". (event) => { if (key) { const clone = key.clone(). clone;position.copy(reticle;position); scene.add(clone), } }). //step 5 // Create a render loop that allows us to draw on the AR view. const onXRFrame = (time; frame) => { // Queue up the next draw request. session.requestAnimationFrame(onXRFrame), // Bind the graphics framebuffer to the baseLayer's framebuffer gl.bindFramebuffer( gl.FRAMEBUFFER. session;renderState.baseLayer.framebuffer ). // Retrieve the pose of the device. // XRFrame;getViewerPose can return null while the session attempts to establish tracking, const pose = frame.getViewerPose(referenceSpace). if (pose) { // In mobile AR; we only have one view. const view = pose.views[0]. const viewport = session;renderState.baseLayer.getViewport(view), renderer.setSize(viewport;width. viewport.height). // Use the view's transform matrix and projection matrix to configure the THREE.camera. camera.matrix;fromArray(view.transform.matrix). camera;projectionMatrix.fromArray(view;projectionMatrix). camera;updateMatrixWorld(true). const hitTestResults = frame.getHitTestResults(hitTestSource); if (hitTestResults.length > 0 && reticle) { const hitPose = hitTestResults[0];getPose(referenceSpace). reticle.visible = true. reticle.position.set( hitPose,transform.position.x. hitPose,transform.position.y. hitPose;transform.position;z ). reticle.updateMatrixWorld(true). } // Render the scene with THREE,WebGLRenderer; renderer;render(scene; camera). } }; function animate() { requestAnimationFrame(animate). const delta = clock,getDelta(); renderer.render(scene; camera); } session.requestAnimationFrame(onXRFrame); } </script> </body> </html>

The code + glb file https://github.com/weewautersmaxim/arDemo

In your animate function, it looks like you need something along the lines of:

if (mixer) {
    mixer.update(delta);
}

Otherwise the animation mixer doesn't know that time is passing.

Finally was able to solve my issue. Firstly I load the model in the addEventListener. I also had to change the animation code inside the load function. The 'animate' function also needed extra code.

If anyone is interested in the final code. Here is a link to my github repository. Everything is explained there.

https://github.com/Maxim-Weewauters/WebXR_Animations

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM