好的,让我先说一下,我对OpenGL很新,因为它与Android有关,虽然我已经阅读了一段时间,但在编码时我无法克服这个障碍。
目前我正在尝试编写一个类来将位于我的drawables文件夹中的.png文件中的纹理加载到我在Blender中创建的.obj模型中。我在Blender模型上进行了UV展开,然后使用uv unwrap作为.png文件的指南。
目前的问题是我能够将纹理加载到模型上,但它是一种纯色,似乎来自纹理文件。很明显我对Blender中的UV纹理不够了解,但是有很多不同的OpenGL库,从PC到Android的变化太多了,我很难围绕在哪里工作。
如果有人能帮助我,我将非常感激。以下是一些相关代码,我会根据需要发布更多内容:
来自TextureLoader的:
public Texture getTexture(GL10 gl, final int ref) throws IOException {
Texture tex = (Texture) table.get(ref);
if (tex != null) {
return tex;
}
Log.i("Textures:", "Loading texture: " + ref);
tex = getTexture(gl, ref,
GL10.GL_TEXTURE_2D, // target
GL10.GL_RGBA, // dst pixel format
GL10.GL_LINEAR, // min filter (unused)
GL10.GL_NEAREST);
table.put(ref,tex);
return tex;
}
public Texture getTexture(GL10 gl, final int ref,
int target,
int dstPixelFormat,
int minFilter,
int magFilter) throws IOException {
if (!sReady) {
throw new RuntimeException("Texture Loader not prepared");
}
int srcPixelFormat = 0;
// create the texture ID for this texture
int id = createID(gl);
Texture texture = new Texture(target, id);
// bind this texture
gl.glBindTexture(target, id);
Bitmap bitmap = loadImage(ref);
texture.setWidth(bitmap.getWidth());
texture.setHeight(bitmap.getHeight());
if (bitmap.hasAlpha()) {
srcPixelFormat = GL10.GL_RGBA;
} else {
srcPixelFormat = GL10.GL_RGB;
}
// convert that image into a byte buffer of texture data
ByteBuffer textureBuffer = convertImageData(bitmap);
if (target == GL10.GL_TEXTURE_2D) {
gl.glTexParameterf(target, GL10.GL_TEXTURE_MIN_FILTER, minFilter);
gl.glTexParameterf(target, GL10.GL_TEXTURE_MAG_FILTER, magFilter);
}
GLUtils.texImage2D(target, 0, bitmap, 0);
/*gl.glTexImage2D(target,
0,
dstPixelFormat,
get2Fold(bitmap.getWidth()),
get2Fold(bitmap.getHeight()),
0,
srcPixelFormat,
GL10.GL_UNSIGNED_BYTE,
textureBuffer);*/
bitmap.recycle();
return texture;
}
/**
* Get the closest greater power of 2 to the fold number
*
* @param fold The target number
* @return The power of 2
*/
private int get2Fold(int fold) {
int ret = 2;
while (ret < fold) {
ret *= 2;
}
return ret;
}
/**
* Convert the buffered image to a texture
*
* @param bufferedImage The image to convert to a texture
* @param texture The texture to store the data into
* @return A buffer containing the data
*/
private ByteBuffer convertImageData(Bitmap bitmap) {
ByteBuffer imageBuffer = null;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] data = stream.toByteArray();
imageBuffer = ByteBuffer.allocateDirect(data.length);
imageBuffer.order(ByteOrder.nativeOrder());
imageBuffer.put(data, 0, data.length);
imageBuffer.flip();
return imageBuffer;
}
/**
* Creates an integer buffer to hold specified ints
* - strictly a utility method
*
* @param size how many int to contain
* @return created IntBuffer
*/
protected IntBuffer createIntBuffer(int size) {
ByteBuffer temp = ByteBuffer.allocateDirect(4 * size);
temp.order(ByteOrder.nativeOrder());
return temp.asIntBuffer();
}
private Bitmap loadImage(int ref) {
Bitmap bitmap = null;
Matrix flip = new Matrix();
flip.postScale(1f, -1f);
// This will tell the BitmapFactory to not scale based on the device's pixel density:
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inScaled = false;
Bitmap temp = BitmapFactory.decodeResource(sContext.getResources(), ref, opts);
bitmap = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), flip, true);
temp.recycle();
return bitmap;
}
来自Texture:
public void bind(GL10 gl) {
gl.glBindTexture(target, textureID);
gl.glEnable(GL10.GL_TEXTURE_2D);
}
因为它被称为:
public void render() {
//Clear Screen And Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_LIGHTING);
gl.glPushMatrix();
gl.glTranslatef(0.0f, -1.2f, z); //Move down 1.2 Unit And Into The Screen 6.0
gl.glRotatef(xrot, 1.0f, 0.0f, 0.0f); //X
gl.glRotatef(yrot, 0.0f, 1.0f, 0.0f); //Y
texture.bind(gl);
model.draw(gl);
gl.glPopMatrix();
}