我正在尝试使用Three.js r71重现three.js panaorama dualfisheye example。
我需要坚持使用r71,因为最终我将在基于Three.js r71的Autodesk Forge Viewer上使用此代码。
我取得了一些进展,但是我在浏览器的JavaScript控制台中遇到一条错误消息,内容为:RENDER WARNING: there is no texture bound to the unit 0
var camera, scene, renderer;
var isUserInteracting = false,
onMouseDownMouseX = 0, onMouseDownMouseY = 0,
lon = 0, onMouseDownLon = 0,
lat = 0, onMouseDownLat = 0,
phi = 0, theta = 0,
distance = 500;
init();
animate();
function init() {
var container, mesh;
container = document.getElementById('container');
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);
scene = new THREE.Scene();
// var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed();
var geometry = new THREE.SphereGeometry(500, 60, 40);
// invert the geometry on the x-axis so that all of the faces point inward
// geometry.scale( - 1, 1, 1 );
geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));
// Remap UVs
// var normals = geometry.attributes.normal.array;
var normals = [];
geometry.faces.forEach(element => {
normals.push(element.normal)
});
var uvs = geometry.faceVertexUvs
// var uvs = geometry.attributes.uv.array;
for (var i = 0, l = normals.length / 3; i < l; i++) {
var x = normals[i * 3 + 0];
var y = normals[i * 3 + 1];
var z = normals[i * 3 + 2];
if (i < l / 2) {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920);
uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
} else {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920);
uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
}
}
geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(-Math.PI / 2))
// geometry.rotateZ( - Math.PI / 2 );
//
// var texture = new THREE.TextureLoader().load( 'ricoh_theta_s.jpg' );
var texture = new THREE.TextureLoader('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg');
this.texture = texture;
texture.format = THREE.RGBFormat;
var material = new THREE.MeshBasicMaterial({ map: texture });
material.map.repeat = { x: 0, y: 0 }
material.map.offset = { x: 0, y: 0 };
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
document.addEventListener('mousedown', onDocumentMouseDown, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
document.addEventListener('mouseup', onDocumentMouseUp, false);
document.addEventListener('wheel', onDocumentMouseWheel, false);
//
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseDown(event) {
event.preventDefault();
isUserInteracting = true;
onPointerDownPointerX = event.clientX;
onPointerDownPointerY = event.clientY;
onPointerDownLon = lon;
onPointerDownLat = lat;
}
function onDocumentMouseMove(event) {
if (isUserInteracting === true) {
lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;
}
}
function onDocumentMouseUp(event) {
isUserInteracting = false;
}
function onDocumentMouseWheel(event) {
distance += event.deltaY * 0.05;
distance = THREE.Math.clamp(distance, 400, 1000);
}
function animate() {
// requestAnimationFrame(animate);
update();
}
function update() {
if (isUserInteracting === false) {
lon += 0.1;
}
lat = Math.max(- 85, Math.min(85, lat));
phi = THREE.Math.degToRad(90 - lat);
theta = THREE.Math.degToRad(lon - 180);
camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
camera.position.y = distance * Math.cos(phi);
camera.position.z = distance * Math.sin(phi) * Math.sin(theta);
camera.lookAt(scene.position);
renderer.render(scene, camera);
}
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
<div id="container"></div>
谢谢您的时间。
答案 0 :(得分:1)
代码有很多问题
r71的加载代码错误。应该是这样的
THREE.ImageUtils.crossOrigin = '';
var texture = THREE.ImageUtils.loadTexture('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg');
IIRC TH3 r71并未使用可渲染的内容预先初始化纹理,因此您需要等待纹理加载
var texture = THREE.ImageUtils.loadTexture(
'https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg',
undefined,
animate); // call animate after texture has loaded
并从顶部删除了对animate
的呼叫
这将摆脱警告,但会继续
代码将重复次数设置为0
material.map.repeat = { x: 0, y: 0 };
material.map.offset = { x: 0, y: 0 };
将重复次数设置为0意味着您将仅看到纹理的第一个像素,因为所有UV都将乘以0
该代码错误地设置了重复和偏移。
设置重复和偏移量的正确方法是设置
material.map.repeat.set(1, 1);
material.map.offset.set(0, 0);
它以其他方式起作用,但只能靠运气。这两个设置是THREE.Vector2
对象。使用重复和偏移量的代码可以随时更改为
在THREE.Vector2
上使用方法或将重复和偏移量传递给函数
期望使用THREE.Vector2
,所以最好不要替换它们
请注意,虽然没有理由进行设置。默认值是1 1(重复)和0 0(偏移)。
该代码仅呈现一次
requestAnimationFrame
已被注释掉。纹理异步加载
因此您不会看到几帧的纹理。你要么需要等待
为了在渲染之前加载纹理,请在完成后再次渲染
加载或连续渲染,以便在加载时使用它
代码正在使用跨域图片
这实际上不是错误,只是警告。 WebGL不能使用跨域 图片,除非服务器本身给予许可。一个代码链接 确实允许该权限,但我不确定您是否知道 还是很幸运。来自服务器的大多数图像不是您自己的 不太可能起作用。
代码的uv数学错误
您应该问另一个问题。评论说我可以看到纹理
var camera, scene, renderer;
var isUserInteracting = false,
onMouseDownMouseX = 0, onMouseDownMouseY = 0,
lon = 0, onMouseDownLon = 0,
lat = 0, onMouseDownLat = 0,
phi = 0, theta = 0,
distance = 500;
init();
function init() {
var container, mesh;
container = document.getElementById('container');
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 2000);
scene = new THREE.Scene();
// var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 ).toNonIndexed();
var geometry = new THREE.SphereGeometry(500, 60, 40);
// invert the geometry on the x-axis so that all of the faces point inward
// geometry.scale( - 1, 1, 1 );
geometry.applyMatrix(new THREE.Matrix4().makeScale(-1, 1, 1));
// Remap UVs
// var normals = geometry.attributes.normal.array;
var normals = [];
geometry.faces.forEach(element => {
normals.push(element.normal)
});
var uvs = geometry.faceVertexUvs
// var uvs = geometry.attributes.uv.array;
for (var i = 0, l = normals.length / 3; i < l; i++) {
var x = normals[i * 3 + 0];
var y = normals[i * 3 + 1];
var z = normals[i * 3 + 2];
if (i < l / 2) {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
// uvs[i * 2 + 0] = x * (404 / 1920) * correction + (447 / 1920);
// uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
} else {
var correction = (x == 0 && z == 0) ? 1 : (Math.acos(- y) / Math.sqrt(x * x + z * z)) * (2 / Math.PI);
// uvs[i * 2 + 0] = - x * (404 / 1920) * correction + (1460 / 1920);
// uvs[i * 2 + 1] = z * (404 / 1080) * correction + (582 / 1080);
}
}
geometry.applyMatrix(new THREE.Matrix4().makeRotationZ(-Math.PI / 2))
// geometry.rotateZ( - Math.PI / 2 );
//
THREE.ImageUtils.crossOrigin = '';
var texture = THREE.ImageUtils.loadTexture('https://preview.ibb.co/hZXYmz/ricoh_theta_s.jpg', undefined, animate);
var material = new THREE.MeshBasicMaterial({ map: texture });
material.map.repeat.set(1, 1);
material.map.offset.set(0, 0);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
document.addEventListener('mousedown', onDocumentMouseDown, false);
document.addEventListener('mousemove', onDocumentMouseMove, false);
document.addEventListener('mouseup', onDocumentMouseUp, false);
document.addEventListener('wheel', onDocumentMouseWheel, false);
//
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseDown(event) {
event.preventDefault();
isUserInteracting = true;
onPointerDownPointerX = event.clientX;
onPointerDownPointerY = event.clientY;
onPointerDownLon = lon;
onPointerDownLat = lat;
}
function onDocumentMouseMove(event) {
if (isUserInteracting === true) {
lon = (onPointerDownPointerX - event.clientX) * 0.1 + onPointerDownLon;
lat = (onPointerDownPointerY - event.clientY) * 0.1 + onPointerDownLat;
}
}
function onDocumentMouseUp(event) {
isUserInteracting = false;
}
function onDocumentMouseWheel(event) {
distance += event.deltaY * 0.05;
distance = THREE.Math.clamp(distance, 400, 1000);
}
function animate() {
requestAnimationFrame(animate);
update();
}
function update() {
if (isUserInteracting === false) {
lon += 0.1;
}
lat = Math.max(- 85, Math.min(85, lat));
phi = THREE.Math.degToRad(90 - lat);
theta = THREE.Math.degToRad(lon - 180);
camera.position.x = distance * Math.sin(phi) * Math.cos(theta);
camera.position.y = distance * Math.cos(phi);
camera.position.z = distance * Math.sin(phi) * Math.sin(theta);
camera.lookAt(scene.position);
renderer.render(scene, camera);
}
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/71/three.js"></script>
<div id="container"></div>
答案 1 :(得分:0)
对于那些只是在寻找警告答案的人
RENDER WARNING: there is no texture bound to the unit 0
它在以下情况下由 Chrome 发布:
来源和进一步链接: https://github.com/NASAWorldWind/WebWorldWind/issues/302#issuecomment-346188472
解决方法是始终将纹理绑定到着色器采样器,即使着色器不使用它。
正如 gman 在其较长的回答中所建议的那样,在“没有纹理”时绑定一个白色的 1px 纹理是一种很好的做法,因为它不会占用太多空间或带宽,并且着色器代码可以使用它与另一种颜色相乘而不改变它。