我在GLSurfaceView上渲染相机数据。一切都很好。但问题是图像在景观情绪上有点延伸。我已阅读此内容并尝试了该解决方案http://www.learnopengles.com/understanding-opengls-matrices。请检查"调整屏幕的宽高比"该链接的一部分。我试过那个解决方案。我的解决方案代码如下。这些代码来自我的Renderer
类的一部分,它实现了GLSurfaceView.Renderer
接口:
private float[] vertices;
private float[] verticesFrontCameraPortrait = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f};
private float[] verticesFrontCamera_90_Degree_Right = new float[]{-1.f, -1.f, -1.f, 1.f, 1.f, 1.f, 1.f, -1.f,};
private float[] verticesFrontCamera_270_Degree_right = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f,};
@Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
}
我的vertices
变量根据设备配置保存三个顶点矩阵中的任何一个的引用。这完全可以。但是如果尝试onSurfaceChanged(GL10 gl10, int width, int height)
上的工作就会出现问题
方法。它粉碎了我的节目ArrayIndexOutOfBoundsExceptio
。但如果我不使用这两行:
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
一切正常,但视频/摄像机图像数据在景观情绪上得以延伸。我不想要在我的GLSurfaceView上渲染的拉伸图像。
这是我的完整Renderer
课程。
/ ** *重要 - 请在更改前阅读 *
*此类使用OpenGl在GlSurfaceView中渲染NV21(YV12)图像字节数组。 * NV21图像格式每像素有12位,其中8位用于亮度(Y)和4位 *用于色度(UV)。因此yBuffer大小相同,uvBuffer大小为一半 *像素数。 y纹理高度和宽度也与图像高度和宽度相同。 *输入图像阵列的前2/3是Y值,最后1/3是uv *订购v0u0v1u1 ...(改变v和u值)等等。所以GL_LUMINANCE和GL_LUMINANCE_ALPHA格式 *分别用于传递yBuffer和uvBuffer,fragment_shader用于从alpha通道获取U值 *和红色通道的V值(可以是绿色或蓝色通道,结果相同)。 uv纹理高度 *和宽度也是原始图像高度和宽度的1/4 *
* yTexture和uvTexture必须使用GL_TEXTURE0 + 1(GL_TEXTURE1)和GL_TEXTURE0 + 2(GL_TEXTURE2)。 *如果GL_TEXTURE0用于yTexture,则它在某些设备中不起作用。 * /
public class VideoCallImageRenderer implements GLSurfaceView.Renderer {
private final String TAG = "VideoCallImageRenderer";
private int cameraType; // 0 for backcamera, 1 for front camera, 2 for opponent camera
private Context context;
private int shaderProgram;
short[] indices = new short[]{0, 1, 2, 0, 2, 3};
// y texture handle
private int[] yTexture = new int[1];
// uv texture handle
private int[] uvTexture = new int[1];
// texture coordinate and vertices buffers
private FloatBuffer texCoordBuffer, vertexBuffer;
// indices buffer
private ShortBuffer indexBuffer;
// y and uv texture buffers
private ByteBuffer yBuffer, uvBuffer;
// image height and width
private int width = 0, height = 0;
// true when a valid image data is set. default value false.
private boolean render = false;
// position attribute location handle in vertex shader
private int positionLocation;
// texture coordinate attribute location handle in vertex shader
private int textureCoordinateLocation;
// y_texture sampler2D location handle in fragment shader
private int yTextureLocation;
// uv_texture sampler2D location handle in fragment shader
private int uvTextureLocation;
final private float bytePerPixel = 1.5f;
/*This value of vertices are changed to roatate the image: explanation
let 1.f, 1.f = A
-1.f, 1.f = B
-1.f, -1.f= C
1.f, -1.f = D
SO ABCD = Back camera in normal stage
if we want to rotate 90 degre right then value will be DABC
if we want to rotate 90 degre left then value will be BCDA
private float[] vertices = new float[]{
1.f, -1.f,
-1.f, -1.f,
-1.f, 1.f,
1.f, 1.f
};
*/
private float[] vertices;
private float[] verticesPortrait;
private float[] vertices_90_Degree_Right;
private float[] vertices_270_Degree_Right;
private int currentDeviceOrientation = -1;
private float[] verticesFrontCameraPortrait = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f};
private float[] verticesFrontCamera_90_Degree_Right = new float[]{-1.f, -1.f, -1.f, 1.f, 1.f, 1.f, 1.f, -1.f,};
private float[] verticesFrontCamera_270_Degree_right = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f,};
private float[] verticesBackCameraPortrait = new float[]{1.f, 1.f, -1.f, 1.f, -1.f, -1.f, 1.f, -1.f};
private float[] verticesBackCamera_90_Degree_Right = new float[]{1.f, -1.f, 1.f, 1.f, -1.f, 1.f, -1.f, -1.f,};
private float[] verticesBackCamera_270_Degree_Right = new float[]{-1.f, 1.f, -1.f, -1.f, 1.f, -1.f, 1.f, 1.f,};
private float[] verticesOpponentCamera = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f};
private float[] verticeOpponentCamera_90_Degree_Right = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f,};
private float[] verticeOpponentCamera_270_Degree_Right = new float[]{-1.f, 1.f, 1.f, 1.f, 1.f, -1.f, -1.f, -1.f,};
private float[] texCoords = new float[]{
0.f, 0.f,
0.f, 1.f,
1.f, 1.f,
1.f, 0.f
};
public VideoCallImageRenderer(Context context, int cameraType) {
this.context = context;
// initialize texture coordinate buffer
this.cameraType = cameraType;
if (cameraType == 0) {
verticesPortrait = verticesBackCameraPortrait;
vertices_90_Degree_Right = verticesBackCamera_90_Degree_Right;
vertices_270_Degree_Right = verticesBackCamera_270_Degree_Right;
} else if (cameraType == 1) {
verticesPortrait = verticesFrontCameraPortrait;
vertices_90_Degree_Right = verticesFrontCamera_90_Degree_Right;
vertices_270_Degree_Right = verticesFrontCamera_270_Degree_right;
} else {
verticesPortrait = verticesOpponentCamera;
vertices_90_Degree_Right = verticeOpponentCamera_90_Degree_Right;
vertices_270_Degree_Right = verticeOpponentCamera_270_Degree_Right;
}
ByteBuffer tcbb = ByteBuffer.allocateDirect(texCoords.length * 4);
tcbb.order(ByteOrder.nativeOrder());
texCoordBuffer = tcbb.asFloatBuffer();
texCoordBuffer.put(texCoords);
texCoordBuffer.position(0);
// initialize vertices buffer
vertices = verticesPortrait;
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
// initialize indices buffer
ByteBuffer ibb = ByteBuffer.allocateDirect(indices.length * 2);
ibb.order(ByteOrder.nativeOrder());
indexBuffer = ibb.asShortBuffer();
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void resetVertex(float[] new_vertices) {
Constants.debugLog(TAG, "resetVertex");
ByteBuffer vbb = ByteBuffer.allocateDirect(new_vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(new_vertices);
vertexBuffer.position(0);
}
public void setImageBuffer(final byte[] imageBytes, int height, int width, int iDeviceOrientation) {
// reinitialize texture buffers if width or height changes
Constants.debugLog(TAG, "setImageBuffer device Orientation == " + iDeviceOrientation);
if (currentDeviceOrientation == -1) {
currentDeviceOrientation = iDeviceOrientation;
}
if (iDeviceOrientation != currentDeviceOrientation) {
currentDeviceOrientation = iDeviceOrientation;
switch (currentDeviceOrientation) {
case 0:
vertices = verticesPortrait;
resetVertex(vertices);
break;
case 1:
vertices = vertices_90_Degree_Right;
resetVertex(vertices);
break;
case 3:
vertices = vertices_270_Degree_Right;
resetVertex(vertices);
break;
}
}
final boolean resolutionChanged = this.width != width || this.height != height;
if (resolutionChanged) {
this.width = width;
this.height = height;
final int numberOfPixels = this.height * this.width;
this.yBuffer = ByteBuffer.allocateDirect(numberOfPixels);
this.yBuffer.order(ByteOrder.nativeOrder());
this.uvBuffer = ByteBuffer.allocateDirect(numberOfPixels / 2);
this.uvBuffer.order(ByteOrder.nativeOrder());
}
this.render = updateYUVBuffers(imageBytes);
}
private boolean updateYUVBuffers(final byte[] imageBytes) {
final int numberOfPixels = this.height * this.width;
final int numberOfExpectedBytes = (int) (numberOfPixels * this.bytePerPixel);
if (imageBytes != null && imageBytes.length != (int) (numberOfPixels * this.bytePerPixel)) {
return false;
}
// put image bytes into texture buffers
yBuffer.put(imageBytes, 0, numberOfPixels);
yBuffer.position(0);
uvBuffer.put(imageBytes, numberOfPixels, numberOfPixels / 2);
uvBuffer.position(0);
return true;
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
createShader();
positionLocation = GLES20.glGetAttribLocation(shaderProgram, "a_position");
textureCoordinateLocation = GLES20.glGetAttribLocation(shaderProgram, "a_texCoord");
// generate y texture
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
yTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "y_texture");
GLES20.glGenTextures(1, yTexture, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
// generate uv texture
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
uvTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "uv_texture");
GLES20.glGenTextures(1, uvTexture, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uvTexture[0]);
// clear display color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
}
@Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
}
@Override
public void onDrawFrame(GL10 gl10) {
// clear display
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (render) {
GLES20.glUseProgram(shaderProgram);
GLES20.glVertexAttribPointer(positionLocation, 2,
GLES20.GL_FLOAT, false,
0, vertexBuffer);
GLES20.glVertexAttribPointer(textureCoordinateLocation, 2, GLES20.GL_FLOAT,
false,
0, texCoordBuffer);
GLES20.glEnableVertexAttribArray(positionLocation);
GLES20.glEnableVertexAttribArray(textureCoordinateLocation);
// create and update y texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 1);
GLES20.glUniform1i(yTextureLocation, 1);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, this.width,
this.height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, this.yBuffer);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// create and update uv texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 2);
GLES20.glUniform1i(uvTextureLocation, 2);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, this.width / 2,
this.height / 2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, this.uvBuffer);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// render image
GLES20.glDrawElements(GLES20.GL_TRIANGLES, indices.length,
GLES20.GL_UNSIGNED_SHORT, indexBuffer);
GLES20.glDisableVertexAttribArray(positionLocation);
GLES20.glDisableVertexAttribArray(textureCoordinateLocation);
}
}
void createShader() {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER,
CallConstants.readRawTextFile(context, R.raw.vertex_shader));
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
CallConstants.readRawTextFile(context, R.raw.fragment_shader));
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
GLES20.glLinkProgram(shaderProgram);
GLES20.glUseProgram(shaderProgram);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(shaderProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e("Render", "Could not link program: ");
Log.e("Render", GLES20.glGetProgramInfoLog(shaderProgram));
GLES20.glDeleteProgram(shaderProgram);
shaderProgram = 0;
}
// free up no longer needed shader resources
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
}
public int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}