具有多个纹理的Three.js点

时间:2018-07-10 19:18:20

标签: javascript three.js webgl

我正在Three.js场景中工作,在该场景中我想使用许多具有不同纹理的点。但是,到目前为止,我还无法在片段着色器中更改纹理。尽管以下场景中的点应在纹理0(一只猫)和1(一只狗)之间交替,但所有点都显示相同的纹理:

/**
* Generate a scene object with a background color
**/

function getScene() {
  var scene = new THREE.Scene();
  scene.background = new THREE.Color(0xaaaaaa);
  return scene;
}

/**
* Generate the camera to be used in the scene. Camera args:
*   [0] field of view: identifies the portion of the scene
*     visible at any time (in degrees)
*   [1] aspect ratio: identifies the aspect ratio of the
*     scene in width/height
*   [2] near clipping plane: objects closer than the near
*     clipping plane are culled from the scene
*   [3] far clipping plane: objects farther than the far
*     clipping plane are culled from the scene
**/

function getCamera() {
  var aspectRatio = window.innerWidth / window.innerHeight;
  var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 10000);
  camera.position.set(0, 1, -6000);
  return camera;
}

/**
* Generate the renderer to be used in the scene
**/

function getRenderer() {
  // Create the canvas with a renderer
  var renderer = new THREE.WebGLRenderer({antialias: true});
  // Add support for retina displays
  renderer.setPixelRatio(window.devicePixelRatio);
  // Specify the size of the canvas
  renderer.setSize(window.innerWidth, window.innerHeight);
  // Add the canvas to the DOM
  document.body.appendChild(renderer.domElement);
  return renderer;
}

/**
* Generate the controls to be used in the scene
* @param {obj} camera: the three.js camera for the scene
* @param {obj} renderer: the three.js renderer for the scene
**/

function getControls(camera, renderer) {
  var controls = new THREE.TrackballControls(camera, renderer.domElement);
  controls.zoomSpeed = 0.4;
  controls.panSpeed = 0.4;
  return controls;
}

/**
* Generate the points for the scene
* @param {obj} scene: the current scene object
**/

function addPoints(scene) {
  // this geometry builds a blueprint and many copies of the blueprint
  var geometry  = new THREE.InstancedBufferGeometry();

  geometry.addAttribute( 'position',
    new THREE.BufferAttribute( new Float32Array( [0, 0, 0] ), 3));

  // add data for each observation
  var n = 10000; // number of observations
  var translation = new Float32Array( n * 3 );
  var texture = new Float32Array( n );
  var translationIterator = 0;

  for (var i=0; i<n; i++) {
    texture[i] = i % 2;
  }

  for (var i=0; i<n*3; i++) {
    switch (translationIterator % 3) {
      case 0:
        translation[translationIterator++] = ((i * 50) % 10000) - 5000;
        break;
      case 1:
        translation[translationIterator++] = Math.floor((i / 160) * 50) - 5000;
        break;
      case 2:
        translation[translationIterator++] = 10;
        break;
    }
  }

  geometry.addAttribute( 'translation',
    new THREE.InstancedBufferAttribute( translation, 3, 1 ) );

  geometry.addAttribute( 'textureIndex',
    new THREE.InstancedBufferAttribute( texture, 1, 1 ) );

  var loader = new THREE.TextureLoader();
  var material = new THREE.RawShaderMaterial({
    uniforms: {
      textures: {
        'type': 'tv',
        'value': [
          loader.load('https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/cat.jpg'),
          loader.load('https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/dog.jpg'),
        ],
      }
    },
    vertexShader: document.getElementById('vertex-shader').textContent,
    fragmentShader: document.getElementById('fragment-shader').textContent,
  });
  var mesh = new THREE.Points(geometry, material);
  mesh.frustumCulled = false; // prevent the mesh from being clipped on drag
  scene.add(mesh);
}

/**
* Render!
**/

function render() {
  requestAnimationFrame(render);
  renderer.render(scene, camera);
  controls.update();
};

/**
* Main
**/

var scene = getScene();
var camera = getCamera();
var renderer = getRenderer();
var controls = getControls(camera, renderer);
addPoints(scene);
render();
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
<html>
<body>
  <script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js'></script>
  <script src='https://rawgit.com/YaleDHLab/pix-plot/master/assets/js/trackball-controls.js'></script>

    <script type='x-shader/x-vertex' id='vertex-shader'>
    /**
    * The vertex shader's main() function must define `gl_Position`,
    * which describes the position of each vertex in screen coordinates.
    *
    * To do so, we can use the following variables defined by Three.js:
    *   attribute vec3 position - stores each vertex's position in world space
    *   attribute vec2 uv - sets each vertex's the texture coordinates
    *   uniform mat4 projectionMatrix - maps camera space into screen space
    *   uniform mat4 modelViewMatrix - combines:
    *     model matrix: maps a point's local coordinate space into world space
    *     view matrix: maps world space into camera space
    *
    * `attributes` can vary from vertex to vertex and are defined as arrays
    *   with length equal to the number of vertices. Each index in the array
    *   is an attribute for the corresponding vertex. Each attribute must
    *   contain n_vertices * n_components, where n_components is the length
    *   of the given datatype (e.g. for a vec2, n_components = 2; for a float,
    *   n_components = 1)
    * `uniforms` are constant across all vertices
    * `varyings` are values passed from the vertex to the fragment shader
    *
    * For the full list of uniforms defined by three, see:
    *   https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
    **/

    // set float precision
    precision mediump float;

    // specify geometry uniforms
    uniform mat4 modelViewMatrix;
    uniform mat4 projectionMatrix;

    // to get the camera attributes:
    uniform vec3 cameraPosition;

    // blueprint attributes
    attribute vec3 position; // sets the blueprint's vertex positions

    // instance attributes
    attribute vec3 translation; // x y translation offsets for an instance
    attribute float textureIndex; // idx of texture in sampler

    // specify the varyings to pass to instances
    varying float vTextureIndex;

    void main() {
      // pass varyings to fragment shader
      //vTextureIndex = textureIndex;

      // set point position
      vec3 pos = position + translation;
      vec4 projected = projectionMatrix * modelViewMatrix * vec4(pos, 1.0);
      gl_Position = projected;

      // use the delta between the point position and camera position to size point
      float xDelta = pow(projected[0] - cameraPosition[0], 2.0);
      float yDelta = pow(projected[1] - cameraPosition[1], 2.0);
      float zDelta = pow(projected[2] - cameraPosition[2], 2.0);
      float delta  = pow(xDelta + yDelta + zDelta, 0.5);
      gl_PointSize = 50000.0 / delta;
    }
    </script>

    <script type='x-shader/x-fragment' id='fragment-shader'>
    /**
    * The fragment shader's main() function must define `gl_FragColor`,
    * which describes the pixel color of each pixel on the screen.
    *
    * To do so, we can use uniforms passed into the shader and varyings
    * passed from the vertex shader.
    *
    * Attempting to read a varying not generated by the vertex shader will
    * throw a warning but won't prevent shader compiling.
    **/

    precision highp float;

    uniform sampler2D textures[2];

    // declare received varyings
    varying float vTextureIndex; // instance uv offsets

    void main() {
      int texIdx = int(vTextureIndex);
      vec2 uv = vec2(0.0, 0.0) + vec2( gl_PointCoord.x, 1.0 - gl_PointCoord.y );
      if (texIdx == 0) {
        gl_FragColor = texture2D(textures[0], uv );
      } else if (texIdx == 1) {
        gl_FragColor = texture2D(textures[1], uv );
      }
    }
    </script>
</body>
</html>

有人看到我想念的东西吗?非常感谢其他人在这个问题上可以提供的见识!

1 个答案:

答案 0 :(得分:0)

如果其他人最终遇到与我同样的麻烦,我想说的是,上面的示例是一个较大场景的简化版本。在另一个场景中,我正在传递属性texture ,但是在顶点着色器中将属性初始化为textureIndex 。然后,我将textureIndex作为变量传递给片段着色器,该变量始终等于0。因此,故事的寓意是-如果您尝试从未传递给顶点着色器的属性中进行读取,则该值显然等于归零。如果这样抛出错误,那就太好了。

在上面的愚蠢示例中,我注释掉了片段着色器中的各种声明。这是固定的:

  /**
  * Generate a scene object with a background color
  **/

  function getScene() {
    var scene = new THREE.Scene();
    scene.background = new THREE.Color(0xaaaaaa);
    return scene;
  }

  /**
  * Generate the camera to be used in the scene. Camera args:
  *   [0] field of view: identifies the portion of the scene
  *     visible at any time (in degrees)
  *   [1] aspect ratio: identifies the aspect ratio of the
  *     scene in width/height
  *   [2] near clipping plane: objects closer than the near
  *     clipping plane are culled from the scene
  *   [3] far clipping plane: objects farther than the far
  *     clipping plane are culled from the scene
  **/

  function getCamera() {
    var aspectRatio = window.innerWidth / window.innerHeight;
    var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 10000);
    camera.position.set(0, 1, -6000);
    return camera;
  }

  /**
  * Generate the renderer to be used in the scene
  **/

  function getRenderer() {
    // Create the canvas with a renderer
    var renderer = new THREE.WebGLRenderer({antialias: true});
    // Add support for retina displays
    renderer.setPixelRatio(window.devicePixelRatio);
    // Specify the size of the canvas
    renderer.setSize(window.innerWidth, window.innerHeight);
    // Add the canvas to the DOM
    document.body.appendChild(renderer.domElement);
    return renderer;
  }

  /**
  * Generate the controls to be used in the scene
  * @param {obj} camera: the three.js camera for the scene
  * @param {obj} renderer: the three.js renderer for the scene
  **/

  function getControls(camera, renderer) {
    var controls = new THREE.TrackballControls(camera, renderer.domElement);
    controls.zoomSpeed = 0.4;
    controls.panSpeed = 0.4;
    return controls;
  }

  /**
  * Generate the points for the scene
  * @param {obj} scene: the current scene object
  **/

  function addPoints(scene) {
    // this geometry builds a blueprint and many copies of the blueprint
    var geometry  = new THREE.InstancedBufferGeometry();

    geometry.addAttribute( 'position',
      new THREE.BufferAttribute( new Float32Array( [0, 0, 0] ), 3));

    // add data for each observation
    var n = 10000; // number of observations
    var translation = new Float32Array( n * 3 );
    var texture = new Float32Array( n );
    var translationIterator = 0;

    for (var i=0; i<n; i++) {
      texture[i] = i % 2;
    }

    for (var i=0; i<n*3; i++) {
      switch (translationIterator % 3) {
        case 0:
          translation[translationIterator++] = ((i * 50) % 10000) - 5000;
          break;
        case 1:
          translation[translationIterator++] = Math.floor((i / 160) * 50) - 5000;
          break;
        case 2:
          translation[translationIterator++] = 10;
          break;
      }
    }

    geometry.addAttribute( 'translation',
      new THREE.InstancedBufferAttribute( translation, 3, 1 ) );

    geometry.addAttribute( 'textureIndex',
      new THREE.InstancedBufferAttribute( texture, 1, 1 ) );

    var loader = new THREE.TextureLoader();
    var material = new THREE.RawShaderMaterial({
      uniforms: {
        textures: {
          'type': 'tv',
          'value': [
            loader.load('https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/cat.jpg'),
            loader.load('https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/dog.jpg'),
          ],
        }
      },
      vertexShader: document.getElementById('vertex-shader').textContent,
      fragmentShader: document.getElementById('fragment-shader').textContent,
    });
    var mesh = new THREE.Points(geometry, material);
    mesh.frustumCulled = false; // prevent the mesh from being clipped on drag
    scene.add(mesh);
  }

  /**
  * Render!
  **/

  function render() {
    requestAnimationFrame(render);
    renderer.render(scene, camera);
    controls.update();
  };

  /**
  * Main
  **/

  var scene = getScene();
  var camera = getCamera();
  var renderer = getRenderer();
  var controls = getControls(camera, renderer);
  addPoints(scene);
  render();
<html>
<head>
  <style>
  html, body { width: 100%; height: 100%; background: #000; }
  body { margin: 0; overflow: hidden; }
  canvas { width: 100%; height: 100%; }
  </style>
</head>
<body>
  <script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js'></script>
  <script src='https://rawgit.com/YaleDHLab/pix-plot/master/assets/js/trackball-controls.js'></script>

    <script type='x-shader/x-vertex' id='vertex-shader'>
    /**
    * The vertex shader's main() function must define `gl_Position`,
    * which describes the position of each vertex in screen coordinates.
    *
    * To do so, we can use the following variables defined by Three.js:
    *   attribute vec3 position - stores each vertex's position in world space
    *   attribute vec2 uv - sets each vertex's the texture coordinates
    *   uniform mat4 projectionMatrix - maps camera space into screen space
    *   uniform mat4 modelViewMatrix - combines:
    *     model matrix: maps a point's local coordinate space into world space
    *     view matrix: maps world space into camera space
    *
    * `attributes` can vary from vertex to vertex and are defined as arrays
    *   with length equal to the number of vertices. Each index in the array
    *   is an attribute for the corresponding vertex. Each attribute must
    *   contain n_vertices * n_components, where n_components is the length
    *   of the given datatype (e.g. for a vec2, n_components = 2; for a float,
    *   n_components = 1)
    * `uniforms` are constant across all vertices
    * `varyings` are values passed from the vertex to the fragment shader
    *
    * For the full list of uniforms defined by three, see:
    *   https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
    **/

    // set float precision
    precision mediump float;

    // specify geometry uniforms
    uniform mat4 modelViewMatrix;
    uniform mat4 projectionMatrix;

    // to get the camera attributes:
    uniform vec3 cameraPosition;

    // blueprint attributes
    attribute vec3 position; // sets the blueprint's vertex positions

    // instance attributes
    attribute vec3 translation; // x y translation offsets for an instance
    attribute float textureIndex; // idx of texture in sampler

    // specify the varyings to pass to instances
    varying float vTextureIndex;

    void main() {
      // pass varyings to fragment shader
      vTextureIndex = textureIndex;

      // set point position
      vec3 pos = position + translation;
      vec4 projected = projectionMatrix * modelViewMatrix * vec4(pos, 1.0);
      gl_Position = projected;

      // use the delta between the point position and camera position to size point
      float xDelta = pow(projected[0] - cameraPosition[0], 2.0);
      float yDelta = pow(projected[1] - cameraPosition[1], 2.0);
      float zDelta = pow(projected[2] - cameraPosition[2], 2.0);
      float delta  = pow(xDelta + yDelta + zDelta, 0.5);
      gl_PointSize = 50000.0 / delta;
    }
    </script>

    <script type='x-shader/x-fragment' id='fragment-shader'>
    /**
    * The fragment shader's main() function must define `gl_FragColor`,
    * which describes the pixel color of each pixel on the screen.
    *
    * To do so, we can use uniforms passed into the shader and varyings
    * passed from the vertex shader.
    *
    * Attempting to read a varying not generated by the vertex shader will
    * throw a warning but won't prevent shader compiling.
    **/

    precision highp float;

    uniform sampler2D textures[2];

    // declare received varyings
    varying float vTextureIndex; // instance uv offsets

    void main() {
      int texIdx = int(vTextureIndex);
      vec2 uv = vec2(0.0, 0.0) + vec2( gl_PointCoord.x, 1.0 - gl_PointCoord.y );
      if (texIdx == 0) {
        gl_FragColor = texture2D(textures[0], uv );
      } else if (texIdx == 1) {
        gl_FragColor = texture2D(textures[1], uv );
      }
    }
    </script>
</body>
</html>