0

我正在尝试使用 Three.js r71 重现three.js panaorama dualfisheye 示例

我需要坚持使用 r71,因为最终我将在基于 Three.js r71 的 autodesk forge 查看器上使用此代码。

我取得了一些进展,但我需要帮助来确定 UV 映射。

如果您将此链接three.js panaorama dualfisheye 示例的结果与代码片段进行比较,则显然存在问题。

    var camera, scene, renderer;

    var isUserInteracting = false,
      onMouseDownMouseX = 0, onMouseDownMouseY = 0,
      lon = 0, onMouseDownLon = 0,
      lat = 0, onMouseDownLat = 0,
      phi = 0, theta = 0,
      distance = 500;

    init();
    animate();

    function init() {

      var container, mesh;

      container = document.getElementById('container');

      camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);

      scene = new THREE.Scene();
 
      // var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed();
      var geometry = new THREE.SphereGeometry(500, 60, 40);
      // invert the geometry on the x-axis so that all of the faces point inward
      // geometry.scale( - 1, 1, 1 );
      geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));

      /*
      // Remap UVs
     
      // var normals = geometry.attributes.normal.array;
      var normals = [];
      geometry.faces.forEach(element => {
        normals.push(element.normal)
      });
      var uvs = geometry.faceVertexUvs
      // var uvs = geometry.attributes.uv.array;

      for (var i = 0, l = normals.length / 3; i < l; i++) {

        var x = normals[i * 3 + 0];
        var y = normals[i * 3 + 1];
        var z = normals[i * 3 + 2];

        if (i < l / 2) {

          var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
          uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920);
          uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);

        } else {

          var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
          uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920);
          uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);

        }

      }

      */
      // geometry.rotateZ( - Math.PI / 2 );
      geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(Math.PI / 2))

      THREE.ImageUtils.crossOrigin = '';
      var texture = THREE.ImageUtils.loadTexture('https://threejs.org/examples/textures/ricoh_theta_s.jpg');

      this.texture = texture;
      texture.format = THREE.RGBFormat;

      var material = new THREE.MeshBasicMaterial({ map: texture });
      material.map.repeat.set(1, 1);
      material.map.offset.set(0, 0);

      mesh = new THREE.Mesh(geometry, material);
      scene.add(mesh);

      renderer = new THREE.WebGLRenderer();
      renderer.setPixelRatio(window.devicePixelRatio);
      renderer.setSize(window.innerWidth, window.innerHeight);
      container.appendChild(renderer.domElement);

      document.addEventListener('mousedown', onDocumentMouseDown, false);
      document.addEventListener('mousemove', onDocumentMouseMove, false);
      document.addEventListener('mouseup', onDocumentMouseUp, false);
      document.addEventListener('wheel', onDocumentMouseWheel, false);

      window.addEventListener('resize', onWindowResize, false);

    }

    function onWindowResize() {

      camera.aspect = window.innerWidth / window.innerHeight;
      camera.updateProjectionMatrix();

      renderer.setSize(window.innerWidth, window.innerHeight);

    }

    function onDocumentMouseDown(event) {

      event.preventDefault();

      isUserInteracting = true;

      onPointerDownPointerX = event.clientX;
      onPointerDownPointerY = event.clientY;

      onPointerDownLon = lon;
      onPointerDownLat = lat;

    }

    function onDocumentMouseMove(event) {

      if (isUserInteracting === true) {

        lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
        lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;

      }

    }

    function onDocumentMouseUp(event) {

      isUserInteracting = false;

    }

    function onDocumentMouseWheel(event) {

      distance += event.deltaY * 0.05;

      distance = THREE.Math.clamp(distance, 400, 1000);

    }

    function animate() {

      requestAnimationFrame(animate);
      update();

    }

    function update() {

      if (isUserInteracting === false) {

        lon += 0.1;

      }

      lat = Math.max(- 85, Math.min(85, lat));
      phi = THREE.Math.degToRad(90 - lat);
      theta = THREE.Math.degToRad(lon - 180);

      camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
      camera.position.y = distance * Math.cos(phi);
      camera.position.z = distance * Math.sin(phi) * Math.sin(theta);

      camera.lookAt(scene.position);

      renderer.render(scene, camera);

    }
    body {
      background-color: #000000;
      margin: 0px;
      overflow: hidden;
    }
    <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
    <div id="container"></div>

感谢您的时间。

4

1 回答 1

0

使用 Geometry 而不是 BufferGeometry 法线是每个面 ( face.vertexNormals) 并且是Vector3. uvs 是 Vector2s 数组的数组。

someVector2 = geometry.faceVertexUvs[setNdx][faceNdx][vertexNdx]

var camera, scene, renderer;

    var isUserInteracting = false,
      onMouseDownMouseX = 0, onMouseDownMouseY = 0,
      lon = 0, onMouseDownLon = 0,
      lat = 0, onMouseDownLat = 0,
      phi = 0, theta = 0,
      distance = 500;

    init();
    animate();

    function init() {

      var container, mesh;

      container = document.getElementById('container');

      camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);

      scene = new THREE.Scene();
 
      var geometry = new THREE.SphereGeometry(500, 60, 40);
      // invert the geometry on the x-axis so that all of the faces point inward
      geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));

      // Remap UVs
     
      var uvs = geometry.faceVertexUvs[0];
      geometry.faces.forEach((face, ndx) => {
        const faceUVs = uvs[ndx];
        for (var i = 0; i < 3; ++i) {
          const faceNormal = face.vertexNormals[i];
          var x = faceNormal.x;
          var y = faceNormal.y;
          var z = faceNormal.z;


          if (ndx < geometry.faces.length / 2) {

            var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
            faceUVs[i].x = x * (404 / 1920) * correction + (447 / 1920);
            faceUVs[i].y = z * (404 / 1080) * correction + (582 / 1080);

          } else {

            var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
            faceUVs[i].x = - x * (404 / 1920) * correction + (1460 / 1920);
            faceUVs[i].y = z * (404 / 1080) * correction + (582 / 1080);

          }
        }

      });
      
      geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(Math.PI / 2))

      THREE.ImageUtils.crossOrigin = '';
      var texture = THREE.ImageUtils.loadTexture('https://threejs.org/examples/textures/ricoh_theta_s.jpg');

      this.texture = texture;
      texture.format = THREE.RGBFormat;

      var material = new THREE.MeshBasicMaterial({ map: texture });
      material.map.repeat.set(1, 1);
      material.map.offset.set(0, 0);

      mesh = new THREE.Mesh(geometry, material);
      scene.add(mesh);

      renderer = new THREE.WebGLRenderer();
      renderer.setPixelRatio(window.devicePixelRatio);
      renderer.setSize(window.innerWidth, window.innerHeight);
      container.appendChild(renderer.domElement);

      document.addEventListener('mousedown', onDocumentMouseDown, false);
      document.addEventListener('mousemove', onDocumentMouseMove, false);
      document.addEventListener('mouseup', onDocumentMouseUp, false);
      document.addEventListener('wheel', onDocumentMouseWheel, false);

      window.addEventListener('resize', onWindowResize, false);

    }

    function onWindowResize() {

      camera.aspect = window.innerWidth / window.innerHeight;
      camera.updateProjectionMatrix();

      renderer.setSize(window.innerWidth, window.innerHeight);

    }

    function onDocumentMouseDown(event) {

      event.preventDefault();

      isUserInteracting = true;

      onPointerDownPointerX = event.clientX;
      onPointerDownPointerY = event.clientY;

      onPointerDownLon = lon;
      onPointerDownLat = lat;

    }

    function onDocumentMouseMove(event) {

      if (isUserInteracting === true) {

        lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
        lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;

      }

    }

    function onDocumentMouseUp(event) {

      isUserInteracting = false;

    }

    function onDocumentMouseWheel(event) {

      distance += event.deltaY * 0.05;

      distance = THREE.Math.clamp(distance, 400, 1000);

    }

    function animate() {

      requestAnimationFrame(animate);
      update();

    }

    function update() {

      if (isUserInteracting === false) {

        lon += 0.1;

      }

      lat = Math.max(- 85, Math.min(85, lat));
      phi = THREE.Math.degToRad(90 - lat);
      theta = THREE.Math.degToRad(lon - 180);

      camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
      camera.position.y = distance * Math.cos(phi);
      camera.position.z = distance * Math.sin(phi) * Math.sin(theta);

      camera.lookAt(scene.position);

      renderer.render(scene, camera);

    }
body {
      background-color: #000000;
      margin: 0px;
      overflow: hidden;
    }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
    <div id="container"></div>

让我指出,我通过运行示例,然后在 Chrome 中打开 devtools,放置断点并检查变量来解决这个问题。

这是紫外线

在此处输入图像描述

这是顶点法线

在此处输入图像描述

于 2018-10-03T12:59:31.360 回答