我正在尝试制作一款可以模拟长时间曝光摄影的应用。我的想法是从网络摄像头抓取当前帧并将其合成到画布上。随着时间的推移,照片将“曝光”,变得更亮更明亮。 (见http://www.chromeexperiments.com/detail/light-paint-live-mercury/?f=)
我有一个完美运行的着色器。这就像在photoshop中的'添加'混合模式。问题是我不能让它回收前一帧。
我认为它会像renderer.autoClear = false;
那样简单,但在这种情况下,这种选择似乎无能为力。
这是使用THREE.EffectComposer应用着色器的代码。
onWebcamInit: function () {
var $stream = $("#user-stream"),
width = $stream.width(),
height = $stream.height(),
near = .1,
far = 10000;
this.renderer = new THREE.WebGLRenderer();
this.renderer.setSize(width, height);
this.renderer.autoClear = false;
this.scene = new THREE.Scene();
this.camera = new THREE.OrthographicCamera(width / -2, width / 2, height / 2, height / -2, near, far);
this.scene.add(this.camera);
this.$el.append(this.renderer.domElement);
this.frameTexture = new THREE.Texture(document.querySelector("#webcam"));
this.compositeTexture = new THREE.Texture(this.renderer.domElement);
this.composer = new THREE.EffectComposer(this.renderer);
// same effect with or without this line
// this.composer.addPass(new THREE.RenderPass(this.scene, this.camera));
var addEffect = new THREE.ShaderPass(addShader);
addEffect.uniforms[ 'exposure' ].value = .5;
addEffect.uniforms[ 'frameTexture' ].value = this.frameTexture;
addEffect.renderToScreen = true;
this.composer.addPass(addEffect);
this.plane = new THREE.Mesh(new THREE.PlaneGeometry(width, height, 1, 1), new THREE.MeshBasicMaterial({map: this.compositeTexture}));
this.scene.add(this.plane);
this.frameTexture.needsUpdate = true;
this.compositeTexture.needsUpdate = true;
new FrameImpulse(this.renderFrame);
},
renderFrame: function () {
this.frameTexture.needsUpdate = true;
this.compositeTexture.needsUpdate = true;
this.composer.render();
}
这是着色器。没什么好看的。
uniforms: {
"tDiffuse": { type: "t", value: null },
"frameTexture": { type: "t", value: null },
"exposure": { type: "f", value: 1.0 }
},
vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),
fragmentShader: [
"uniform sampler2D frameTexture;",
"uniform sampler2D tDiffuse;",
"uniform float exposure;",
"varying vec2 vUv;",
"void main() {",
"vec4 n = texture2D(frameTexture, vUv);",
"vec4 o = texture2D(tDiffuse, vUv);",
"vec3 sum = n.rgb + o.rgb;",
"gl_FragColor = vec4(mix(o.rgb, sum.rgb, exposure), 1.0);",
"}"
].join("\n")
答案 0 :(得分:1)
要实现这种反馈效果,您必须替换WebGLRenderTarget
的单独实例。否则,帧缓冲区被覆盖。不完全确定为什么会发生这种情况......但这是解决方案。
INIT:
this.rt1 = new THREE.WebGLRenderTarget(512, 512, { minFilter: THREE.LinearFilter, magFilter: THREE.NearestFilter, format: THREE.RGBFormat });
this.rt2 = new THREE.WebGLRenderTarget(512, 512, { minFilter: THREE.LinearFilter, magFilter: THREE.NearestFilter, format: THREE.RGBFormat });
渲染:
this.renderer.render(this.scene, this.camera);
this.renderer.render(this.scene, this.camera, this.rt1, false);
// swap buffers
var a = this.rt2;
this.rt2 = this.rt1;
this.rt1 = a;
this.shaders.add.uniforms.tDiffuse.value = this.rt2;
答案 1 :(得分:1)
这实际上等同于posit labs的答案,但我已经成功使用了更简化的解决方案 - 我创建了一个只有我想要回收的ShaderPass的EffectComposer,然后用每个渲染交换该作曲家的renderTargets。
初始化:
THREE.EffectComposer.prototype.swapTargets = function() {
var tmp = this.renderTarget2;
this.renderTarget2 = this.renderTarget1;
this.renderTarget1 = tmp;
};
...
composer = new THREE.EffectComposer(renderer,
new THREE.WebGLRenderTarget(512, 512, { minFilter: THREE.LinearFilter, magFilter: THREE.NearestFilter, format: THREE.RGBFormat })
);
var addEffect = new THREE.ShaderPass(addShader, 'frameTexture');
addEffect.renderToScreen = true;
this.composer.addPass(addEffect);
渲染:
composer.render();
composer.swapTargets();
然后,辅助EffectComposer可以从两个renderTargets中选择一个并将其推送到屏幕或进一步转换。
另请注意,在初始化ShaderPass时,我将“frameTexture”声明为textureID。这让ShaderPass知道使用前一个Pass的结果更新frameTexture uniform。
答案 2 :(得分:0)
试试这个:
this.renderer = new THREE.WebGLRenderer( { preserveDrawingBuffer: true } );