我需要在服务器上运行WebGLRenderer,但是有不同的声音。有人说这是不可能的,有些人说他们尝试让它运作起来,但是当讨论结束时就是这样。
是否可以这样做,在这种情况下,方法是什么?是否可以使用moch-browser与node-gl结合使用?
[编辑] 添加解决方案
答案 0 :(得分:1)
您可以尝试headless-gl,但是如果您需要,还需要使用其他一些库来模拟DOM和Image标签(用于纹理加载)和Canvas标签和/或Canvas2D。
否则您可以shell to a browser running on top of OSMESA或尝试headless chromium
答案 1 :(得分:1)
这是我自己解决问题的方法。根据场景及其对象的大小,可能需要一些时间。在我的情况下,我想返回相当小的版本的对象,但仍然需要大约400毫秒来响应400x400px png。希望这有助于那里的人!
<强> Server.js 强>
var THREE = require("three.js");
// Create a DOM
var MockBrowser = require('mock-browser').mocks.MockBrowser;
var mock = new MockBrowser();
var document = MockBrowser.createDocument();
var window = MockBrowser.createWindow();
//REST API
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var router = express.Router();
var gl = require('gl')(1,1); //headless-gl
var pngStream = require('three-png-stream');
var port = process.env.PORT || 8080;
router.get('/render', function(req, res){
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(75, this.width / this.height, 0.1, 1000);
var renderer = new THREE.WebGLRenderer({context:gl});
scene.add(camera);
renderer.setSize(this.width, this.height);
renderer.setClearColor(0xFFFFFF, 1);
/*...
Add your objects & light to the scene
...*/
var target = new THREE.WebGLRenderTarget(this.width, this.height);
renderer.render(scene, camera, target);
res.setHeader('Content-Type', 'image/png');
pngStream(renderer, target).pipe(res);
});
app.use('/api', router);
app.listen(port);
console.log('Server active on port: ' + port);
答案 2 :(得分:1)
我设法做到了,技巧是渲染到画布上并取出png图像对象。
var fs = require("fs")
var self = {};
var ratio = 16/9.0;
var canvasWidth = 500;
var canvasHeight = 500;
var window = {
innerWidth: canvasWidth,
innerHeight: canvasHeight
};
var document = {
createElement: function(name) {
if (name == "canvas") {
//return new Canvas(canvasWidth, canvasHeight);
}
var Canvas = require('canvas')
return new Canvas(500,500)
},
createElementNS: function(name) {
var Canvas = require('canvas')
return new Canvas(500,500)
}
};
var THREE = require("./threejs/three.js")
eval(fs.readFileSync("threejs/additionalRenderers.js").toString())
eval(fs.readFileSync("threejs/SceneUtils.js").toString())
const EventEmitter = require('events');
//var OS = new ShereOS()
class ThreeClient extends EventEmitter {
constructor() {
super()
var self = this
this.appId = 667
self.loaded = false
this.bgColor = '#282c34'
this.textColor = '#fff'
this.tildeColor = '#0000ff'
this.selectColor = '#ffffff'
this.width = 500
this.height = 500
this.renderer = new THREE.CanvasRenderer();
this.renderer.setSize(this.width, this.height);
this.camera = new THREE.PerspectiveCamera(75, this.width / this.height, 0.001, 3000);
this.camera.position.z = 2;
this.scene = new THREE.Scene();
this.scene.background = new THREE.Color( 0xECF8FF );
this.scene.add( new THREE.HemisphereLight( 0x606060, 0x404040 ) );
this.light = new THREE.DirectionalLight( 0xffffff );
this.light.position.set( 1, 1, 1 ).normalize();
this.scene.add( this.light );
//console.log(this.scene.children)
this.updated = false
/*
var geometry = new THREE.SphereGeometry( 0.1, 32, 32 );
var material = new THREE.MeshBasicMaterial( {color: 0xFF0000} );
this.sphere = new THREE.Mesh( geometry, material );
this.scene.add( this.sphere );
*/
}
getTexture() {
this.renderer.render(this.scene, this.camera);
var data = this.renderer.domElement.toDataURL().substr("data:image/png;base64,".length)
var buf = new Buffer(data, 'base64');
fs.writeFile('image.png', buf);
//return this.renderer.domElement.toDataURL().substr("data:image/png;base64,".length);
}
}
var THREEClient = new ThreeClient();
npm安装avros