在Google Cardboard SurfaceTexture

时间:2015-06-02 17:15:07

标签: android video opengl-es google-cardboard

我正在使用gstreamer检索视频供稿(从RaspPi发送),我需要将其显示在Google Cardboard中。

我的工作基于tutorial-3 example of gstreamer。通过提供我的Surface(从SurfaceView.SurfaceHolder.getSurface()检索,我设法将我的视频显示到SurfaceView中,但我现在需要将其与Google Cardboard连接。

如果我没有弄错,Google Cardboard依赖于某些SurfaceTexture。所以我认为使用Surface(SurfaceTexture)构造函数从SurfaceTexture中简单地获取Surface会很容易。

问题是它根本不起作用。我的Google纸板应用程序Cardboard Pasthrough example,我还没有触及OpenGL代码,因为我对此一无所知。

在调试时,我发现我正在使用的代码存在(至少)一个问题。看来就行了

GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
由于GL_TEXTURE_EXTERNAL_OES不在glActiveTexture方法的所需范围内(需要GL从0到GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS),

会给我带来一些麻烦。 这是我的日志:

GLConsumer  W  [unnamed-12520-0] bindTextureImage: clearing GL error: 0x500
Adreno-ES20  W  <core_glActiveTexture:348>: GL_INVALID_ENUM

那么现在有什么用?

gstreamer似乎收到了视频供稿,它尝试更新Surface(我收到一些有关正在更新的SurfaceTexture的onFrameAvailable的通知,并且只发送错误日志) 。但是,屏幕保持黑色,好像什么都没有更新。

以下是我的代码中最有趣的部分:

@Override
public void onCreate(Bundle savedInstanceState)
{
    super.onCreate(savedInstanceState);
    setContentView(R.layout.main);
    CardboardView cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
    cardboardView.setRenderer(this);
    setCardboardView(cardboardView);

    // Initialize GStreamer and warn if it fails
    try {
        GStreamer.init(this);
    } catch (Exception e) {
    //Catch e...
    }
    mCamera = new float[16];
    mView = new float[16];
    mHeadView = new float[16];
    //gstreamer stuff
    nativeInit();
}


@Override
public void onSurfaceCreated(EGLConfig eglConfig) {
            Log.d(TAG, "onSurfaceCreated start");
    GLES20.glClearColor(0.5f, 0.1f, 0.1f, 0.5f);
    ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
    bb.order(ByteOrder.nativeOrder());
    vertexBuffer = bb.asFloatBuffer();
    vertexBuffer.put(squareVertices);
    vertexBuffer.position(0);


    ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
    dlb.order(ByteOrder.nativeOrder());
    drawListBuffer = dlb.asShortBuffer();
    drawListBuffer.put(drawOrder);
    drawListBuffer.position(0);


    ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
    bb2.order(ByteOrder.nativeOrder());
    textureVerticesBuffer = bb2.asFloatBuffer();
    textureVerticesBuffer.put(textureVertices);
    textureVerticesBuffer.position(0);

    int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
    int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

    mProgram = GLES20.glCreateProgram();             // create empty OpenGL ES Program
    GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
    GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
    GLES20.glLinkProgram(mProgram);
    checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    Log.d(TAG, "Surface created");
    texture = createTexture();
    initSurface(texture);
}
static private int createTexture()
{
    Log.d(TAG + "_cardboard", "createTexture");

    int[] texture = new int[1];

    GLES20.glGenTextures(1,texture, 0);
    checkGLError("GenTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());

    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
    checkGLError("BindTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    return texture[0];
}
//Give the surface to gstreamer.
private void initSurface(int texture) {
    mSurface = new SurfaceTexture(texture);
    mSurface.setOnFrameAvailableListener(this);
    Log.d(TAG, "OnFrameAvailableListener set");

    Surface toto = new Surface(mSurface);
    nativeSurfaceInit(toto);
    toto.release();

}

//When we need to render
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    Log.d(TAG, "onFrameAvailable");
    this.getCardboardView().requestRender();

}

//Display to cardboard
@Override
public void onNewFrame(HeadTransform headTransform) {

    headTransform.getHeadView(mHeadView, 0);

    // Build the camera matrix and apply it to the ModelView.
    Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, 0.01f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    mSurface.updateTexImage();
    mSurface.getTransformMatrix(mtx);

    float[] test = new float[3];
    headTransform.getEulerAngles(test, 0);

    //if(networkThread != null){
    //    networkThread.setRegValue(test);
    //}
}

@Override
public void onDrawEye(Eye eye) {
    // Log.d(TAG, "onDrawEye");

    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    GLES20.glUseProgram(mProgram);
    Log.d(TAG, "trying to access " + GL_TEXTURE_EXTERNAL_OES +" out of " + GLES20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS);
    GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());


    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
    GLES20.glEnableVertexAttribArray(mPositionHandle);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, vertexStride, vertexBuffer);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());


    mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
    GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
    GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, vertexStride, textureVerticesBuffer);

    mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");


    GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
            GLES20.GL_UNSIGNED_SHORT, drawListBuffer);


    // Disable vertex array
    GLES20.glDisableVertexAttribArray(mPositionHandle);

    GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
    Matrix.multiplyMM(mView, 0, eye.getEyeView(), 0, mCamera, 0);

}

有关更多代码,请参阅以下两个主要文件:https://gist.github.com/MagicMicky/4caa3ac669215652e40f

编辑:尝试使用gstreamer上的相机应用程序时,logcat中显示的错误与我之前描述的相同。所以这可能不重要......

0 个答案:

没有答案