我的问题是我想只有一个WebGLRenderer,但有很多相机视图可以放在许多独特的画布中。在下面的示例中,我有9个视图位于1个画布中,每个视图都有独特的相机,场景和网格,然后使用ctx.drawImage方法将它们绘制到自己的画布上。这个方法有效,但是drawImage太慢了,甚至不能达到10 fps,更不用说60 + fps了。
有没有办法解决这个问题,不涉及使用慢速drawImage方法,还是有办法加速整个过程?
感谢您的帮助,示例代码放在下面。
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Three.js Test</title>
<style>
body {
margin: 0;
padding: 0;
overflow: hidden;
}
</style>
<!--[if lt IE 9]>
<script src="http://html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<script src="./three.min.js"></script>
<script>
var renderer;
var windowWidth, windowHeight;
var numberMeshes = 1;
var dpr = window.devicePixelRatio || 1;
var viewDemensions = {x: 3, y: 3};
var views = [];
init();
animate();
function init() {
for (var i = 0; i < viewDemensions.x; i++) {
for (var j = 0; j < viewDemensions.y; j++) {
var obj = {};
obj.left = i/viewDemensions.x;
obj.bottom = j/viewDemensions.y;
obj.width = 1/viewDemensions.x;
obj.height = 1/viewDemensions.y;
obj.canvas = document.createElement('canvas');
obj.context = obj.canvas.getContext('2d');
document.body.appendChild(obj.canvas);
var camera = new THREE.PerspectiveCamera(75, 100/100, 1, 10000);
camera.position.z = 1000;
obj.camera = camera;
var scene = new THREE.Scene();
var geometry = new THREE.SphereGeometry(100, 10, 10);
obj.meshes = [];
for (var k = 0; k < numberMeshes; k++) {
var material = new THREE.MeshBasicMaterial({ color: 0xffffff*Math.random(), wireframe: true });
var mesh = new THREE.Mesh(geometry, material);
var scale = 2*Math.random();
mesh.scale.set(scale, scale, scale);
scene.add(mesh);
obj.meshes.push(mesh);
}
obj.scene = scene;
views.push(obj);
}
}
renderer = new THREE.WebGLRenderer({
preserveDrawingBuffer: true
});
// document.body.appendChild(renderer.domElement);
}
function updateSize() {
if (windowWidth != window.innerWidth || windowHeight != window.innerHeight) {
windowWidth = window.innerWidth;
windowHeight = window.innerHeight;
renderer.setSize (windowWidth, windowHeight);
}
}
function animate() {
updateSize();
for (var i = 0; i < views.length; i++) {
var view = views[i];
var left = Math.floor(view.left*windowWidth) * dpr;
var bottom = Math.floor(view.bottom*windowHeight) * dpr;
var width = Math.floor(view.width*windowWidth) * dpr;
var height = Math.floor(view.height*windowHeight) * dpr;
view.canvas.width = width;
view.canvas.height = height;
view.canvas.style.width = Math.floor(view.width*windowWidth) + 'px';
view.canvas.style.height = Math.floor(view.height*windowHeight) + 'px';
view.context.scale(dpr, dpr);
view.camera.aspect = width/height;
view.camera.updateProjectionMatrix();
renderer.setViewport(left, bottom, width, height);
renderer.setScissor(left, bottom, width, height);
renderer.enableScissorTest (true);
renderer.setClearColor(new THREE.Color().setRGB(0.5, 0.5, 0.5));
for (var j = 0; j < numberMeshes; j++) {
view.meshes[j].rotation.x += 0.03*Math.random();
view.meshes[j].rotation.y += 0.05*Math.random();
}
renderer.render(view.scene, view.camera);
view.context.drawImage(renderer.domElement,left,bottom,width,height,0,0,view.width*windowWidth,view.height*windowHeight);
}
requestAnimationFrame(animate);
}
</script>
</body>
</html>
答案 0 :(得分:1)
在作为评论进行一些讨论之后,使用具有不同视图的FBO,然后使用这些纹理作为输入来定位在不同的视图中,可能适合您的情况。请检查。请注意,这会not
涉及绘制到缓冲区,然后再读取像素,然后将其应用到画布。
EDIT1:使用Three.js添加了伪代码
Create offscreen target
rtTexture = new THREE.WebGLRenderTarget(window.innerWidth,window.innerHeight,..);
Create screen, material, and mesh
mtlScreen = new THREE.ShaderMaterial({uniforms:{tDiffuse:{type:“t”,value:rtTexture}},
mtl = new THREE.MeshBasicMaterial({map:rtTexture});
mesh = new THREE.Mesh(plane,function(rtTexture));
scene.add(mesh);
Now render to offscreen first, then to display
renderer.render(sceneRTT,cameraRTT,rtTexture,..);
renderer.render(场景,相机);
请参阅标准三示例以获取完整代码 - https://github.com/prabindh/three.js/blob/master/examples/webgl_rtt.html,并在http://www.slideshare.net/prabindh/render-to-texture-with-threejs
上发布了一个简短的幻灯片 Approach With GLES2:
使用GLES2快速设置FBO(对WebGL进行微不足道的更改):
glGenFramebuffers(NUM_FBO, fboId);
glGenTextures(NUM_FBO, fboTextureId);
glGenTextures(1, ®ularTextureId);
然后设置绘制到屏幕外缓冲区:
GL_CHECK(glBindTexture(GL_TEXTURE_2D, 0));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, fboTextureId[i]));
GL_CHECK(glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, globals->inTextureWidth, globals->inTextureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, fboId[i]));
GL_CHECK(glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fboTextureId[i], 0));
然后绘制到屏幕外缓冲区:
//Bind regular texture
GL_CHECK(glBindTexture(GL_TEXTURE_2D, 0));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, regularTextureId));
add_texture(globals->inTextureWidth, globals->inTextureHeight, globals->textureData, globals->inPixelFormat);
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
//Draw with regular draw calls to FBO
GL_CHECK(_test17(globals, numObjectsPerSide, 1));
现在将其用作纹理输入,并绘制为常规显示:
GL_CHECK(glBindTexture(GL_TEXTURE_2D, fboTextureId[i]));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
//draw to display buffer
//Now get back display framebuffer and unbind the FBO
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0));
https://github.com/prabindh/sgxperf/blob/master/sgxperf_test17.cpp
答案 1 :(得分:0)
这是我提出的解决方案。不像prabindh的答案那么专业,但这是我能想出如何解决问题的唯一方法。
基本上我使用WebGLRenderTarget将每个不同的视图/场景渲染到渲染器上,然后使用readPixels将像素复制到画布上。我试图让网络工作者努力提高性能,但却无法让他们在图像编写方面发挥出色。我的代码如下。
如果你的意思是完全不同的prabindh,我会很乐意帮助你如何更好地和/或更好地使用当前的用例。
var renderer;
var gl;
var times = [];
var windowWidth, windowHeight;
var numberMeshes = 10;
var dpr = window.devicePixelRatio || 1;
var viewDemensions = {x: 2, y: 2};
var views = [];
init();
animate();
function init() {
renderer = new THREE.WebGLRenderer({preserveDrawingBuffer: true});
renderer.autoClear = false;
gl = renderer.getContext();
for (var i = 0; i < viewDemensions.x; i++) {
for (var j = 0; j < viewDemensions.y; j++) {
var obj = {};
obj.left = i/viewDemensions.x;
obj.bottom = j/viewDemensions.y;
obj.width = 1/viewDemensions.x;
obj.height = 1/viewDemensions.y;
obj.canvas = document.createElement('canvas');
obj.context = obj.canvas.getContext('2d');
document.body.appendChild(obj.canvas);
var camera = new THREE.PerspectiveCamera(75, 100/100, 1, 10000);
camera.position.z = 1000;
obj.camera = camera;
var scene = new THREE.Scene();
var geometry = new THREE.SphereGeometry(100, 10, 10);
obj.meshes = [];
for (var k = 0; k < numberMeshes; k++) {
var material = new THREE.MeshBasicMaterial({ color: 0xffffff*Math.random(), wireframe: true });
var mesh = new THREE.Mesh(geometry, material);
var scale = 2*Math.random();
mesh.scale.set(scale, scale, scale);
scene.add(mesh);
obj.meshes.push(mesh);
}
obj.scene = scene;
obj.widthVal = 100;
obj.heightVal = 100;
obj.imageData = obj.context.getImageData(0,0,obj.widthVal,obj.heightVal);
obj.pixels = new Uint8Array(obj.imageData.data.length);
// obj.ww = new Worker("ww.js");
// obj.frames = [];
// obj.prevFrame = 0;
// obj.ww.onmessage = function (event) {
// var i = event.data.i;
// var imageData = event.data.imageData;
// views[i].context.putImageData(imageData,0,0);
// };
obj.target = new THREE.WebGLRenderTarget( 100, 100, { minFilter: THREE.LinearFilter, magFilter: THREE.NearestFilter, format: THREE.RGBFormat } );
views.push(obj);
}
}
}
function updateSize() {
if (windowWidth != window.innerWidth || windowHeight != window.innerHeight) {
windowWidth = window.innerWidth;
windowHeight = window.innerHeight;
}
}
function animate() {
updateSize();
var i, j;
var view, width, height;
var sWidth, sHeight;
var mesh;
for (i = 0; i < views.length; i++) {
view = views[i];
// if (!view.lock) {
for (j = 0; j < view.meshes.length; j++) {
mesh = view.meshes[j];
mesh.rotation.x += 0.03;
mesh.rotation.y += 0.05;
}
sWidth = Math.floor(view.width*windowWidth);
sHeight = Math.floor(view.height*windowHeight);
width = sWidth * dpr;
height = sHeight * dpr;
var same = true;
if (view.widthVal != width || view.heightVal != height) {
same = false;
view.widthVal = width;
view.heightVal = height;
}
view.canvas.width = width;
view.canvas.height = height;
view.canvas.style.width = sWidth + 'px';
view.canvas.style.height = sHeight + 'px';
view.context.scale(dpr, dpr);
view.camera.aspect = width/height;
view.camera.updateProjectionMatrix();
renderer.setSize(sWidth, sHeight);
view.target.width = width;
view.target.height = height;
renderer.render(view.scene, view.camera, view.target, true);
if (!same) {
view.imageData = view.context.createImageData(width,height);
view.pixels = new Uint8Array(view.imageData.data.length);
}
gl.readPixels(0,0,width,height,gl.RGBA,gl.UNSIGNED_BYTE,view.pixels);
// view.ww.postMessage({imageData: imageData, pixels: pixels, i:i});
view.imageData.data.set(view.pixels);
view.context.putImageData(view.imageData,0,0);
}
requestAnimationFrame(animate);
}