如何在Angular Project中包含THREE.LensFlareElement?

时间:2019-01-27 06:08:19

标签: three.js angular7

我尝试在场景中使用LensFlareElement,但是每次尝试加载场景时,都会收到以下消息:

  

三个.LensFlare已移至/examples/js/objects/Lensflare.js

我用npm three安装了Three.js,并将其包括如下:

import * as THREE from 'three';

那么我有什么办法可以使用typescript和angular7到达js文件中的代码?

亲切问候

1 个答案:

答案 0 :(得分:0)

因此,在Three.js讨论论坛上发帖之后,@ Mugen87分享了一个有关如何将示例脚本转换为模块的示例:

How do I include THREE.LensFlareElement in Angular Project? on discourse.three.org

如何转换模块:(处理步骤)

  1. 采用原始示例代码
  2. 从“三个”中导入*为三个
  3. 删除每个声明的三个声明,并在末尾添加一个export { TheExampleIWant }…就我而言:export { Lensflare, LensflareElement }

注意:请注意,我仅使用Mugen提供的解决方案对此进行了测试,而未测试其他示例。我可以想象有一些示例将它们转换成模块并不是那么容易,但是也许我错了。

基本上,转换后的示例与提供的原始示例非常相似。

转换后的示例如下所示: (信用归功于Mugen87,他为我提供了此示例)

import * as THREE from 'three';

function Lensflare() {

  THREE.Mesh.call(this, LensflareGeometry, new THREE.MeshBasicMaterial({ opacity: 0, transparent: true }));

  this.type = 'Lensflare';
  this.frustumCulled = false;
  this.renderOrder = Infinity;

  //

  var positionScreen = new THREE.Vector3();
  var positionView = new THREE.Vector3();

  // textures

  var tempMap = new THREE.DataTexture(new Uint8Array(16 * 16 * 3), 16, 16, THREE.RGBFormat);
  tempMap.minFilter = THREE.NearestFilter;
  tempMap.magFilter = THREE.NearestFilter;
  tempMap.wrapS = THREE.ClampToEdgeWrapping;
  tempMap.wrapT = THREE.ClampToEdgeWrapping;
  tempMap.needsUpdate = true;

  var occlusionMap = new THREE.DataTexture(new Uint8Array(16 * 16 * 3), 16, 16, THREE.RGBFormat);
  occlusionMap.minFilter = THREE.NearestFilter;
  occlusionMap.magFilter = THREE.NearestFilter;
  occlusionMap.wrapS = THREE.ClampToEdgeWrapping;
  occlusionMap.wrapT = THREE.ClampToEdgeWrapping;
  occlusionMap.needsUpdate = true;

  // material

  var geometry = LensflareGeometry;

  var material1a = new THREE.RawShaderMaterial({
    uniforms: {
      'scale': { value: null },
      'screenPosition': { value: null }
    },
    vertexShader: [

      'precision highp float;',

      'uniform vec3 screenPosition;',
      'uniform vec2 scale;',

      'attribute vec3 position;',

      'void main() {',

      ' gl_Position = vec4( position.xy * scale + screenPosition.xy, screenPosition.z, 1.0 );',

      '}'

    ].join('\n'),
    fragmentShader: [

      'precision highp float;',

      'void main() {',

      ' gl_FragColor = vec4( 1.0, 0.0, 1.0, 1.0 );',

      '}'

    ].join('\n'),
    depthTest: true,
    depthWrite: false,
    transparent: false
  });

  var material1b = new THREE.RawShaderMaterial({
    uniforms: {
      'map': { value: tempMap },
      'scale': { value: null },
      'screenPosition': { value: null }
    },
    vertexShader: [

      'precision highp float;',

      'uniform vec3 screenPosition;',
      'uniform vec2 scale;',

      'attribute vec3 position;',
      'attribute vec2 uv;',

      'varying vec2 vUV;',

      'void main() {',

      ' vUV = uv;',

      ' gl_Position = vec4( position.xy * scale + screenPosition.xy, screenPosition.z, 1.0 );',

      '}'

    ].join('\n'),
    fragmentShader: [

      'precision highp float;',

      'uniform sampler2D map;',

      'varying vec2 vUV;',

      'void main() {',

      ' gl_FragColor = texture2D( map, vUV );',

      '}'

    ].join('\n'),
    depthTest: false,
    depthWrite: false,
    transparent: false
  });

  // the following object is used for occlusionMap generation

  var mesh1 = new THREE.Mesh(geometry, material1a);

  //

  var elements = [];

  var shader = LensflareElement.Shader;

  var material2 = new THREE.RawShaderMaterial({
    uniforms: {
      'map': { value: null },
      'occlusionMap': { value: occlusionMap },
      'color': { value: new THREE.Color(0xffffff) },
      'scale': { value: new THREE.Vector2() },
      'screenPosition': { value: new THREE.Vector3() }
    },
    vertexShader: shader.vertexShader,
    fragmentShader: shader.fragmentShader,
    blending: THREE.AdditiveBlending,
    transparent: true,
    depthWrite: false
  });

  var mesh2 = new THREE.Mesh(geometry, material2);

  this.addElement = function (element) {

    elements.push(element);

  };

  //

  var scale = new THREE.Vector2();
  var screenPositionPixels = new THREE.Vector2();
  var validArea = new THREE.Box2();
  var viewport = new THREE.Vector4();

  this.onBeforeRender = function (renderer, scene, camera) {

    viewport.copy(renderer.getCurrentViewport());

    var invAspect = viewport.w / viewport.z;
    var halfViewportWidth = viewport.z / 2.0;
    var halfViewportHeight = viewport.w / 2.0;

    var size = 16 / viewport.w;
    scale.set(size * invAspect, size);

    validArea.min.set(viewport.x, viewport.y);
    validArea.max.set(viewport.x + (viewport.z - 16), viewport.y + (viewport.w - 16));

    // calculate position in screen space

    positionView.setFromMatrixPosition(this.matrixWorld);
    positionView.applyMatrix4(camera.matrixWorldInverse);

    if (positionView.z > 0) return; // lensflare is behind the camera

    positionScreen.copy(positionView).applyMatrix4(camera.projectionMatrix);

    // horizontal and vertical coordinate of the lower left corner of the pixels to copy

    screenPositionPixels.x = viewport.x + (positionScreen.x * halfViewportWidth) + halfViewportWidth - 8;
    screenPositionPixels.y = viewport.y + (positionScreen.y * halfViewportHeight) + halfViewportHeight - 8;

    // screen cull

    if (validArea.containsPoint(screenPositionPixels)) {

      // save current RGB to temp texture

      renderer.copyFramebufferToTexture(screenPositionPixels, tempMap);

      // render pink quad

      var uniforms = material1a.uniforms;
      uniforms["scale"].value = scale;
      uniforms["screenPosition"].value = positionScreen;

      renderer.renderBufferDirect(camera, null, geometry, material1a, mesh1, null);

      // copy result to occlusionMap

      renderer.copyFramebufferToTexture(screenPositionPixels, occlusionMap);

      // restore graphics

      var uniforms = material1b.uniforms;
      uniforms["scale"].value = scale;
      uniforms["screenPosition"].value = positionScreen;

      renderer.renderBufferDirect(camera, null, geometry, material1b, mesh1, null);

      // render elements

      var vecX = - positionScreen.x * 2;
      var vecY = - positionScreen.y * 2;

      for (var i = 0, l = elements.length; i < l; i++) {

        var element = elements[i];

        var uniforms = material2.uniforms;

        uniforms["color"].value.copy(element.color);
        uniforms["map"].value = element.texture;
        uniforms["screenPosition"].value.x = positionScreen.x + vecX * element.distance;
        uniforms["screenPosition"].value.y = positionScreen.y + vecY * element.distance;

        var size = element.size / viewport.w;
        var invAspect = viewport.w / viewport.z;

        uniforms["scale"].value.set(size * invAspect, size);

        material2.needsUpdate = true;

        renderer.renderBufferDirect(camera, null, geometry, material2, mesh2, null);

      }

    }

  };

  this.dispose = function () {

    material1a.dispose();
    material1b.dispose();
    material2.dispose();

    tempMap.dispose();
    occlusionMap.dispose();

    for (var i = 0, l = elements.length; i < l; i++) {

      elements[i].texture.dispose();

    }

  };

}

Lensflare.prototype = Object.create(THREE.Mesh.prototype);
Lensflare.prototype.constructor = Lensflare;
Lensflare.prototype.isLensflare = true;

//

function LensflareElement(texture, size, distance, color?) {

  this.texture = texture;
  this.size = size || 1;
  this.distance = distance || 0;
  this.color = color || new THREE.Color(0xffffff);

}

LensflareElement.Shader = {

  uniforms: {

    'map': { value: null },
    'occlusionMap': { value: null },
    'color': { value: null },
    'scale': { value: null },
    'screenPosition': { value: null }

  },

  vertexShader: [

    'precision highp float;',

    'uniform vec3 screenPosition;',
    'uniform vec2 scale;',

    'uniform sampler2D occlusionMap;',

    'attribute vec3 position;',
    'attribute vec2 uv;',

    'varying vec2 vUV;',
    'varying float vVisibility;',

    'void main() {',

    '   vUV = uv;',

    '   vec2 pos = position.xy;',

    '   vec4 visibility = texture2D( occlusionMap, vec2( 0.1, 0.1 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.5, 0.1 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.9, 0.1 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.9, 0.5 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.9, 0.9 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.5, 0.9 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.1, 0.9 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.1, 0.5 ) );',
    '   visibility += texture2D( occlusionMap, vec2( 0.5, 0.5 ) );',

    '   vVisibility =        visibility.r / 9.0;',
    '   vVisibility *= 1.0 - visibility.g / 9.0;',
    '   vVisibility *=       visibility.b / 9.0;',

    '   gl_Position = vec4( ( pos * scale + screenPosition.xy ).xy, screenPosition.z, 1.0 );',

    '}'

  ].join('\n'),

  fragmentShader: [

    'precision highp float;',

    'uniform sampler2D map;',
    'uniform vec3 color;',

    'varying vec2 vUV;',
    'varying float vVisibility;',

    'void main() {',

    '   vec4 texture = texture2D( map, vUV );',
    '   texture.a *= vVisibility;',
    '   gl_FragColor = texture;',
    '   gl_FragColor.rgb *= color;',

    '}'

  ].join('\n')

};

var count = 0;

var LensflareGeometry = (function () {

  var geometry = new THREE.BufferGeometry();

  var float32Array = new Float32Array([
    - 1, - 1, 0, 0, 0,
    1, - 1, 0, 1, 0,
    1, 1, 0, 1, 1,
    - 1, 1, 0, 0, 1
  ]);

  var interleavedBuffer = new THREE.InterleavedBuffer(float32Array, 5);

  geometry.setIndex([0, 1, 2, 0, 2, 3]);
  geometry.addAttribute('position', new THREE.InterleavedBufferAttribute(interleavedBuffer, 3, 0, false));
  geometry.addAttribute('uv', new THREE.InterleavedBufferAttribute(interleavedBuffer, 2, 3, false));

  geometry.index.temp = count++;

  return geometry;

})();

export { Lensflare, LensflareElement };

导入转换后的模块后,我能够使lensflare正常工作。

希望这可以帮助任何不知道如何转换模块的用户

创造性的问候