使用视频流作为开放GL ES 2.0纹理

时间:2015-05-04 10:44:24

标签: android opengl-es-2.0 google-cardboard

我正在尝试通过将Open GL ES纹理设置为android TextureView来捕获视频并将其显示在屏幕上。由于我使用的是Google Cardboard,因此我无法使用SurfaceTextureListener并按照this tutorial实施<core_glBindTexture:572>: GL_INVALID_OPERATION

我已按照the Android documentation关于如何初始化Open GL ES 2.0并使用它,以及this tutorial关于纹理。

将2放在一起我得到一个空白屏幕,偶尔在控制台窗口中得到onSurfaceCreated()

我不知道的那么多新概念让我感到不知所措,我无法调试或只是理解这两种方法是否可以像这样使用。这是我的绘图代码,它在MainActivity类的onEyeDraw()中初始化,并从package com.example.rich.test3; import android.hardware.Camera; import android.opengl.GLES20; import android.view.TextureView; import java.nio.ShortBuffer; import java.nio.FloatBuffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; /** * Created by rich on 03/05/2015. */ public class Square { private java.nio.FloatBuffer vertexBuffer; private java.nio.ShortBuffer drawListBuffer; private final java.nio.FloatBuffer mCubeTextureCoordinates; float color[] = { 1.f, 1.f, 1.f, 1.0f }; private final String vertexShaderCode = "attribute vec4 vPosition;" + "attribute vec2 a_TexCoordinate;" + "varying vec2 v_TexCoordinate;" + "void main() {" + " gl_Position = vPosition;" + " v_TexCoordinate = a_TexCoordinate;" + "}"; private final String fragmentShaderCode = "precision mediump float;" + "uniform vec4 vColor;" + "uniform sampler2D u_Texture;" + "varying vec2 v_TexCoordinate;" + "void main() {" + "gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" + "}"; // number of coordinates per vertex in this array static final int COORDS_PER_VERTEX = 3; static float squareCoords[] = { -0.5f, -0.5f, 0.0f, // bottom left 0.5f, -0.5f, 0.0f, // bottom right -0.5f, 0.5f, 0.0f, // top left 0.5f, 0.5f, 0.0f}; // top right private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices private int mProgram; private int mPositionHandle; private int mColorHandle; private int mTextureUniformHandle; private int mTextureCoordinateHandle; private final int mTextureCoordinateDataSize = 2; private final int vertexCount = squareCoords.length / COORDS_PER_VERTEX; private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex private int mTextureDataHandle; float textureCoordinates[] = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f }; Camera _camera; TextureView _textureView; int[] textures; android.graphics.SurfaceTexture _surface; public Square() { ByteBuffer bb = ByteBuffer.allocateDirect( // (# of coordinate values * 4 bytes per float) squareCoords.length * 4); bb.order(ByteOrder.nativeOrder()); vertexBuffer = bb.asFloatBuffer(); vertexBuffer.put(squareCoords); vertexBuffer.position(0); // initialize byte buffer for the draw list ByteBuffer dlb = ByteBuffer.allocateDirect( // (# of coordinate values * 2 bytes per short) drawOrder.length * 2); dlb.order(ByteOrder.nativeOrder()); drawListBuffer = dlb.asShortBuffer(); drawListBuffer.put(drawOrder); drawListBuffer.position(0); mCubeTextureCoordinates = ByteBuffer.allocateDirect(textureCoordinates.length * 4) .order(ByteOrder.nativeOrder()).asFloatBuffer(); mCubeTextureCoordinates.put(textureCoordinates).position(0); // create empty OpenGL ES Program mProgram = GLES20.glCreateProgram(); textures = new int[1]; GLES20.glGenTextures(1, textures, 0); _surface = new android.graphics.SurfaceTexture(textures[0]); _camera = Camera.open(); Camera.Size previewSize = _camera.getParameters().getPreviewSize(); try { _camera.setPreviewTexture(_surface); } catch (java.io.IOException ex) { // Console.writeLine (ex.Message); } final int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); GLES20.glShaderSource(vertexShaderHandle, vertexShaderCode); GLES20.glCompileShader(vertexShaderHandle); final int[] compileStatus = new int[1]; GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] == 0) { //do check here } final int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderCode); GLES20.glCompileShader(fragmentShaderHandle); GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0); if (compileStatus[0] == 0) { //do check here } GLES20.glAttachShader(mProgram, vertexShaderHandle); GLES20.glAttachShader(mProgram, fragmentShaderHandle); GLES20.glBindAttribLocation(mProgram, 0, "a_Position"); GLES20.glBindAttribLocation(mProgram, 0, "a_TexCoordinate"); GLES20.glLinkProgram(mProgram); final int[] linkStatus = new int[1]; GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] == 0) { //do check here } GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); mTextureDataHandle = textures[0]; // Set filtering GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); } public void draw() { _surface.updateTexImage(); GLES20.glUseProgram(mProgram); mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture"); mPositionHandle = GLES20.glGetAttribLocation(mProgram, "a_Position"); mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color"); mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate"); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle); GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates); GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glUniform1i(mTextureUniformHandle, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount); GLES20.glDisableVertexAttribArray(mPositionHandle); } } 中绘制,这是Cardboard的绘制函数。

<%= f.input :radio, as: :select, include_blank: false, collection: ['si','no'] %>

2 个答案:

答案 0 :(得分:7)

渲染SurfaceTexture纹理对象时,需要使用GL_TEXTURE_EXTERNAL_OES纹理目标:

  

纹理对象使用GL_TEXTURE_EXTERNAL_OES纹理目标,该目标由GL_OES_EGL_image_external OpenGL ES扩展定义。这限制了纹理的使用方式。每次绑定纹理时,它必须绑定到GL_TEXTURE_EXTERNAL_OES目标而不是GL_TEXTURE_2D目标。此外,任何从纹理中采样的OpenGL ES 2.0着色器必须使用例如&#34; #extension GL_OES_EGL_image_external:require&#34;来声明其对此扩展的使用。指示。此类着色器还必须使用samplerExternalOES GLSL采样器类型访问纹理。

因此,您需要像这样更改片段着色器,添加#extension声明并将纹理统一为samplerExternalOES

private final String fragmentShaderCode =
    "#extension GL_OES_EGL_image_external : require\n" +
    "precision mediump float;" +
    "uniform vec4 vColor;" +
    "uniform samplerExternalOES u_Texture;" +
    "varying vec2 v_TexCoordinate;" +
    "void main() {" +
            "gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
    "}";

同样在你的draw()函数中,绑定纹理如下:

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureDataHandle);

答案 1 :(得分:4)

您无法使用普通纹理渲染相机或视频预览,您必须使用GL_TEXTURE_EXTERNAL_OES扩展名。我有同样的问题,我在github上找到了一个完整的工作解决方案。该项目的名称是android_instacam。

Here您会找到要学习的源代码。如果您想直接在自己的设备上看到它,请转到Play商店here