1

ヘレ、私は Three.js が初めてで、AR プロジェクトでアニメーション化された gltf/glb モデルを表示しようとしています。3D 環境で gltf モデルをアニメーション化する方法については多くの例がありますが、AR プロジェクトで動作させることができません。

3D モデルをインポートして、AR で表示できます。しかし、モデルはアニメーション化を拒否します。

助けていただければ幸いです。

<!DOCTYPE html>
<html>
  <head>
    <meta charset="UTF-8" />
    <meta
      name="viewport"
      content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"
    />
    <title>WebXr 3d model demo</title>
    <!-- three.js -->
    <script src="https://unpkg.com/three@0.126.0/build/three.js"></script>
    <script src="https://unpkg.com/three@0.126.0/examples/js/loaders/GLTFLoader.js"></script>
  </head>
  <body>
    <!-- Starting an immersive WebXR session -->
    <button onclick="activateXR()">Start AR</button>
    <script>
      async function activateXR() {
        // Add a canvas element and initialize a WebGL context that is compatible with WebXR.
        const canvas = document.createElement("canvas");
        document.body.appendChild(canvas);
        const gl = canvas.getContext("webgl", { xrCompatible: true });

        //Step 2
        const scene = new THREE.Scene();
        let clock = new THREE.Clock();
        let mixer;

        var hemiLight = new THREE.HemisphereLight(0xffffff, 0x444444);
        hemiLight.position.set(0, 300, 0);
        scene.add(hemiLight);

        var dirLight = new THREE.DirectionalLight(0xffffff);
        dirLight.position.set(75, 300, 0);
        scene.add(dirLight);

        //step 3

        // Set up the WebGLRenderer, which handles rendering to the session's base layer.
        const renderer = new THREE.WebGLRenderer({
          alpha: true,
          preserveDrawingBuffer: true,
          canvas: canvas,
          context: gl,
        });
        renderer.autoClear = false;

        // The API directly updates the camera matrices.
        // Disable matrix auto updates so three.js doesn't attempt
        // to handle the matrices independently.
        const camera = new THREE.PerspectiveCamera();
        camera.matrixAutoUpdate = false;

        // step 4

        // Initialize a WebXR session using "immersive-ar".
        const session = await navigator.xr.requestSession("immersive-ar", {
          requiredFeatures: ["hit-test"],
        });
        session.updateRenderState({
          baseLayer: new XRWebGLLayer(session, gl),
        });

        // A 'local' reference space has a native origin that is located
        // near the viewer's position at the time the session was created.
        const referenceSpace = await session.requestReferenceSpace("local");

        // Create another XRReferenceSpace that has the viewer as the origin.
        const viewerSpace = await session.requestReferenceSpace("viewer");
        // Perform hit testing using the viewer as origin.
        const hitTestSource = await session.requestHitTestSource({
          space: viewerSpace,
        });

        //Use the model loader from the previous step to load a targeting reticle and a sunflower from the web.
        const loader = new THREE.GLTFLoader();
        let reticle;
        loader.load(
          "https://immersive-web.github.io/webxr-samples/media/gltf/reticle/reticle.gltf",
          function (gltf) {
            reticle = gltf.scene;
            reticle.visible = false;
            scene.add(reticle);
          }
        );

        let key;
        loader.load("key/key.glb", function (gltf) {
          const model = gltf.scene;

          mixer = new THREE.AnimationMixer(model);
          mixer.clipAction(gltf.animations[0]).play();

          key = gltf.scene;
        });
        animate();

        // XRSession receives select events when the user completes a primary action. In an AR session, this corresponds to a
        // tap on the screen.

        session.addEventListener("select", (event) => {
          if (key) {
            const clone = key.clone();
            clone.position.copy(reticle.position);
            scene.add(clone);
          }
        });

        //step 5
        // Create a render loop that allows us to draw on the AR view.
        const onXRFrame = (time, frame) => {
          // Queue up the next draw request.
          session.requestAnimationFrame(onXRFrame);

          // Bind the graphics framebuffer to the baseLayer's framebuffer
          gl.bindFramebuffer(
            gl.FRAMEBUFFER,
            session.renderState.baseLayer.framebuffer
          );

          // Retrieve the pose of the device.
          // XRFrame.getViewerPose can return null while the session attempts to establish tracking.
          const pose = frame.getViewerPose(referenceSpace);
          if (pose) {
            // In mobile AR, we only have one view.
            const view = pose.views[0];

            const viewport = session.renderState.baseLayer.getViewport(view);
            renderer.setSize(viewport.width, viewport.height);

            // Use the view's transform matrix and projection matrix to configure the THREE.camera.
            camera.matrix.fromArray(view.transform.matrix);
            camera.projectionMatrix.fromArray(view.projectionMatrix);
            camera.updateMatrixWorld(true);

            const hitTestResults = frame.getHitTestResults(hitTestSource);
            if (hitTestResults.length > 0 && reticle) {
              const hitPose = hitTestResults[0].getPose(referenceSpace);
              reticle.visible = true;
              reticle.position.set(
                hitPose.transform.position.x,
                hitPose.transform.position.y,
                hitPose.transform.position.z
              );
              reticle.updateMatrixWorld(true);
            }

            // Render the scene with THREE.WebGLRenderer.
            renderer.render(scene, camera);
          }
        };
        function animate() {
          requestAnimationFrame(animate);

          const delta = clock.getDelta();

          renderer.render(scene, camera);
        }
        session.requestAnimationFrame(onXRFrame);
      }
    </script>
  </body>
</html>

コード + glb ファイル https://github.com/weewautersmaxim/arDemo

4

2 に答える 2