我正在研究基于2D纹理的体积渲染项目,当我尝试将alpha和混合应用到我的程序时会出现问题。程序从一个文件中读取2D帧,并根据这些帧设置纹理。这是我的计划:
//
// VolumeRendering.cpp
// Volume_Rendering
//
// Created by HOBBY on 4/5/14.
// Copyright (c) 2014 Yihao Jiang. All rights reserved.
//
#include <GLTools.h>
#include <GL/glew.h>
#include <Opengl/gl.h>
#include <glut/glut.h>
#include <fstream>
#include "VolumeRendering.h"
#include <GLMatrixStack.h>
#include <GLFrustum.h>
#include <GLGeometryTransform.h>
int m_uImageCount;
int m_uImageWidth;
int m_uImageHeight;
GLuint* m_puTextureIDs;
GLMatrixStack modelViewMatrix;
GLMatrixStack projectionMatrix;
GLFrame cameraFrame;
GLFrame objectFrame;
GLFrustum viewFrustum;
GLBatch myBatch;
GLGeometryTransform transformPipeline;
GLShaderManager shaderManager;
void ChangeSize(int w, int h)
{
glViewport(0, 0, w, h);
//viewFrustum.SetPerspective(35.0f, float(w) / float(h), 1.0f, 500.0f);
viewFrustum.SetOrthographic(-1.0f, 1.0f, -1.0f, 1.0f, -2.0f, 2.0f);
projectionMatrix.LoadMatrix(viewFrustum.GetProjectionMatrix());
transformPipeline.SetMatrixStacks(modelViewMatrix, projectionMatrix);
}
void SetupRC()
{
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
shaderManager.InitializeStockShaders();
glEnable(GL_DEPTH_TEST);
glEnable(GL_ALPHA_TEST);
glAlphaFunc(GL_GREATER, 0.5f);
const char* filePath = "/Users/WensarHobby/Documents/Codes/workspace/Volume_Rendering/Volume_Rendering/head256x256x109";
if(!InitTextures2D(filePath))
{
printf("InitTexture error");
}
}
bool InitTextures2D(const char* filePath)
{
std::fstream myFile;
myFile.open(filePath, std::ios::in | std::ios::binary);
m_uImageCount = 109;
m_uImageWidth = 256;
m_uImageHeight = 256;
// Holds the texuture IDs
m_puTextureIDs = new GLuint[m_uImageCount];
// Holds the luminance buffer
char* chBuffer = new char[m_uImageWidth * m_uImageHeight];
char* chRGBABuffer = new char[m_uImageWidth * m_uImageHeight * 4];
glGenTextures(m_uImageCount, m_puTextureIDs);
// Read each frames and construct the texture
for( int nIndx = 0; nIndx < m_uImageCount; ++nIndx )
{
// Read the frame
myFile.read(chBuffer, m_uImageWidth*m_uImageHeight);
// Convert the data to RGBA data.
// Here we are simply putting the same value to R, G, B and A channels.
// Usually for raw data, the alpha value will
// be constructed by a threshold value given by the user
for( int nIndx = 0; nIndx < m_uImageWidth*m_uImageHeight; ++nIndx )
{
chRGBABuffer[nIndx*4] = chBuffer[nIndx];
chRGBABuffer[nIndx*4+1] = chBuffer[nIndx];
chRGBABuffer[nIndx*4+2] = chBuffer[nIndx];
//printf("%i ", chBuffer[nIndx]);
if( chBuffer[nIndx] < 20 )
{
chRGBABuffer[nIndx*4+3] = 0;
}
else
{
chRGBABuffer[nIndx*4+3] = 255;
}
}
// Set the properties of the texture.
glBindTexture( GL_TEXTURE_2D, m_puTextureIDs[nIndx] );
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_uImageWidth, m_uImageHeight , 0,
GL_RGBA, GL_UNSIGNED_BYTE,(GLvoid *) chRGBABuffer);
glBindTexture( GL_TEXTURE_2D, 0 );
}
delete[] chBuffer;
delete[] chRGBABuffer;
return true;
}
void SpecialKeys(int key, int x, int y)
{
glutPostRedisplay();
}
void RenderScene(void)
{
static GLfloat vLightPos [] = { 1.0f, 1.0f, 0.0f };
static GLfloat vWhite [] = { 1.0f, 1.0f, 1.0f, 1.0f };
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
modelViewMatrix.PushMatrix();
M3DMatrix44f mCamera;
cameraFrame.GetCameraMatrix(mCamera);
modelViewMatrix.MultMatrix(mCamera);
// M3DMatrix44f mObjectFrame;
// objectFrame.GetMatrix(mObjectFrame);
// modelViewMatrix.MultMatrix(mObjectFrame);
for(int nIndx=m_uImageCount - 1; nIndx >= 0;nIndx--)
{
glBindTexture(GL_TEXTURE_2D, m_puTextureIDs[nIndx]);
MakeQuads(nIndx);
glBindTexture(GL_TEXTURE_2D, m_puTextureIDs[nIndx]);
shaderManager.UseStockShader(GLT_SHADER_TEXTURE_POINT_LIGHT_DIFF,
transformPipeline.GetModelViewMatrix(),
transformPipeline.GetProjectionMatrix(),
vLightPos, vWhite, 0);
myBatch.Draw();
myBatch.Reset();
}
modelViewMatrix.PopMatrix();
glutSwapBuffers();
}
void MakeQuads(int quads_index)
{
myBatch.Begin(GL_QUADS, 4, 1);
myBatch.Normal3f(0.0f, 0.0f, -1.0f);
myBatch.MultiTexCoord2f(0, 0.0f, 0.0f);
myBatch.Vertex3f(-1.0f, -1.0f, 1.0f - 2.0f * (GLfloat)(quads_index/m_uImageCount));
myBatch.Normal3f(0.0f, 0.0f, -1.0f);
myBatch.MultiTexCoord2f(0, 1.0f, 0.0f);
myBatch.Vertex3f(1.0f, -1.0f, 1.0f - 2.0f * (GLfloat)(quads_index/m_uImageCount));
myBatch.Normal3f(0.0f, 0.0f, -1.0f);
myBatch.MultiTexCoord2f(0, 1.0f, 1.0f);
myBatch.Vertex3f(1.0f, 1.0f, 1.0f - 2.0f * (GLfloat)(quads_index/m_uImageCount));
myBatch.Normal3f(0.0f, 0.0f, -1.0f);
myBatch.MultiTexCoord2f(0, 0.0f, 1.0f);
myBatch.Vertex3f(-1.0f, 1.0f, 1.0f - 2.0f * (GLfloat)(quads_index/m_uImageCount));
myBatch.End();
}
void ShutdownRC(void)
{
glDeleteTextures(m_uImageCount, (GLuint*)m_puTextureIDs);
}
int main(int argc, char* argv[])
{
gltSetWorkingDirectory(argv[0]);
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH | GLUT_STENCIL);
glutInitWindowSize(400, 400);
glutCreateWindow("Volume_Rendering");
glutReshapeFunc(ChangeSize);
glutSpecialFunc(SpecialKeys);
glutDisplayFunc(RenderScene);
GLenum err = glewInit();
if (GLEW_OK != err) {
fprintf(stderr, "GLEW Error: %s\n", glewGetErrorString(err));
return 1;
}
SetupRC();
glutMainLoop();
ShutdownRC();
return 0;
}
如您所见,在InitTextures2D函数中,我将原始数据转换为RGBA数据。如果一个像素的亮度值低于20,我假设它是黑色并将α值设置为0(透明)。对于其他像素,alpha值都设置为255(不透明)。所以在我看来,由于像素的alpha值可以是0或255,所以无论我赋予函数glAlphaFunc(GL_GREATER,reference_value)的第二个参数的值是什么,最终结果的外观应该总是相同的。我做了一些测试但不幸的是结果完全不同。
glAlphaFunc(GL_GREATER,0.1f)的结果: glAlphaFunc(GL_GREATER,0.5f)的结果: glAlphaFunc的结果(GL_GREATER,0.99f):
告诉我原因。
答案 0 :(得分:1)
所以在我看来,因为像素的alpha值可以是0或255
这不完全正确。真实的是 texels 的alpha值将为0或255.但是,您使用的是GL_LINEAR
图像过滤器,并且也不会将您的纹素映射到输出像素1:1,所以你实际上在一些纹素之间进行采样,并且当在透明&#34;之间采样时可以获得0到255之间的任何值。和&#34;不透明&#34;边界。