TextureView使用OpenGl显示空白屏幕

时间:2019-03-01 11:49:42

标签: java android opengl-es grafika

我正在尝试在Android应用程序中使用open gl。我遵循了这份文档https://developer.android.com/training/graphics/opengl/draw,并且能够使用GlSurfaceView绘制三角形和一些复杂的动画。 现在,我想在列表中使用它们,但是GLSurfaceView无法做到这一点,所以我切换到TextureView。这是展示如何在Grafika中进行操作的实现 https://github.com/google/grafika/blob/master/app/src/main/java/com/android/grafika/TextureViewGLActivity.java

现在,当我将代码放入doAnimation()时,它不起作用.. 我要附加一些代码

公共类“三角形” {     私有静态最终String TAG =“ Triangle”;

private final String vertexShaderCode =
        // This matrix member variable provides a hook to manipulate
        // the coordinates of the objects that use this vertex shader
        "uniform mat4 uMVPMatrix;" +
        "attribute vec4 vPosition;" +
        "void main() {" +
        // the matrix must be included as a modifier of gl_Position
        // Note that the uMVPMatrix factor *must be first* in order
        // for the matrix multiplication product to be correct.
        "  gl_Position = uMVPMatrix * vPosition;" +
        "}";
private final String fragmentShaderCode =
        "precision mediump float;" +
        "uniform vec4 vColor;" +
        "void main() {" +
        "  gl_FragColor = vColor;" +
        "}";
private int mProgram;





//for draw function
private int mPositionHandle;
private int mColorHandle;
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Use to access and set the view transformation
private int mMVPMatrixHandle;




private FloatBuffer vertexBuffer;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = {   // in counterclockwise order:
        0.0f,  0.622008459f, 0.0f, // top
        -0.5f, -0.311004243f, 0.0f, // bottom left
        0.5f, -0.311004243f, 0.0f  // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };


public Triangle() {
    // initialize vertex byte buffer for shape coordinates
    ByteBuffer bb = ByteBuffer.allocateDirect(
            // (number of coordinate values * 4 bytes per float)
            triangleCoords.length * 4);
    // use the device hardware's native byte order
    bb.order(ByteOrder.nativeOrder());
    // create a floating point buffer from the ByteBuffer
    vertexBuffer = bb.asFloatBuffer();
    // add the coordinates to the FloatBuffer
    vertexBuffer.put(triangleCoords);
    // set the buffer to read the first coordinate
    vertexBuffer.position(0);





    setUpShaders();
}


private void setUpShaders(){

    int vertexShader = OpenGlUtil.loadShader(GLES20.GL_VERTEX_SHADER,
            vertexShaderCode);
    int fragmentShader = OpenGlUtil.loadShader(GLES20.GL_FRAGMENT_SHADER,
            fragmentShaderCode);
    // create empty OpenGL ES Program
    mProgram = GLES20.glCreateProgram();
    // add the vertex shader to program
    GLES20.glAttachShader(mProgram, vertexShader);
    // add the fragment shader to program
    GLES20.glAttachShader(mProgram, fragmentShader);
    // creates OpenGL ES program executables
    GLES20.glLinkProgram(mProgram);

    GlUtil.checkGlError("glGenTextures");

}





public void draw(float[] mvpMatrix) {
    // Add program to OpenGL ES environment
    GLES20.glUseProgram(mProgram);
    GlUtil.checkGlError("glUseProgram");


    // get handle to vertex shader's vPosition member
    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
    GlUtil.checkGlError("glGetAttribLocation");

    // Enable a handle to the triangle vertices
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    GlUtil.checkGlError("glEnableVertexAttribArray");

    // Prepare the triangle coordinate data
    GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false,
            vertexStride, vertexBuffer);
    GlUtil.checkGlError("glVertexAttribPointer");


    // get handle to fragment shader's vColor member
    mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
    // Set color for drawing the triangle
    GLES20.glUniform4fv(mColorHandle, 1, color, 0);
    GlUtil.checkGlError("glUniform4fv");



    // get handle to shape's transformation matrix
    mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
    // Pass the projection and view transformation to the shader
    GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
    GlUtil.checkGlError("glUniformMatrix4fv");



    // Draw the triangle
    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
    GlUtil.checkGlError("glDrawArrays");
    // Disable vertex array
    GLES20.glDisableVertexAttribArray(mPositionHandle);
    GlUtil.checkGlError("glDisableVertexAttribArray");
}



}

这是纹理视图类

public class TestTextureView extends TextureView {

private static final String TAG = "TestTextureView";

private Renderer mRenderer;
private static volatile boolean sReleaseInCallback = true;

public TestTextureView(Context context) {
    super(context);
    init(context);
}

public TestTextureView(Context context, AttributeSet attrs) {
    super(context, attrs);
    init(context);
}

public TestTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);
    init(context);
}

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public TestTextureView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
    super(context, attrs, defStyleAttr, defStyleRes);
    init(context);
}


private void init(Context context){
    // Start up the Renderer thread.  It'll sleep until the TextureView is ready.
    mRenderer = new Renderer(context);
    mRenderer.start();


    // Set the Renderer for drawing on the GLSurfaceView
    setSurfaceTextureListener(mRenderer);

}

public void onDestroy(){
    mRenderer.halt();
}

public Renderer getmRenderer() {
    return mRenderer;
}

/**
 * Handles GL rendering and SurfaceTexture callbacks.
 * <p>
 * We don't create a Looper, so the SurfaceTexture-by-way-of-TextureView callbacks
 * happen on the UI thread.
 */
public static class Renderer extends Thread implements SurfaceTextureListener {
    private Object mLock = new Object();        // guards mSurfaceTexture, mDone
    private SurfaceTexture mSurfaceTexture;
    private EglCore mEglCore;
    private boolean mDone;


    // mMVPMatrix is an abbreviation for "Model View Projection Matrix"
    private final float[] mMVPMatrix = new float[16];
    private final float[] mProjectionMatrix = new float[16];
    private final float[] mViewMatrix = new float[16];


    private int starshipType=Config.TYPE_STARSHIP_LIFE,numMoons=1,type_phase=1;
    private int level=1;

    private Bitmap newProfileBitmap,newVesselBitmap;

    //for life
    private ArrayList<Shape> list;
    private String name;
    private Image image,imageStar,imageStar2,imageStar3;
    private CircleImage circleImage,planet;
    float degrees=0,degrees2=120,degrees3=240;
    float shrinkingDegrees=0,animScale=0;


    private float[] star1Array,star2Array,star3Array;
    private float[] vesselArray,profileArray,planetArray;

    private float baseTranslation=1;
    private Context context;



    public Renderer(Context context) {
        this.context = context;

    }


    /**
     * Tells the thread to stop running.
     */
    public void halt() {
        synchronized (mLock) {
            mDone = true;
            mLock.notify();
        }
    }

    @Override   // will be called on UI thread
    public void onSurfaceTextureAvailable(SurfaceTexture st, int width, int height) {
        Log.d(TAG, "onSurfaceTextureAvailable(" + width + "x" + height + ")");
        synchronized (mLock) {
            mSurfaceTexture = st;
            mLock.notify();

        }


    }

    @Override   // will be called on UI thread
    public void onSurfaceTextureSizeChanged(SurfaceTexture st, int width, int height) {
        Log.d(TAG, "onSurfaceTextureSizeChanged(" + width + "x" + height + ")");
        // TODO: ?
        GLES20.glViewport(0, 0, width, height);


        float ratio = (float) width / height;
        // this projection matrix is applied to object coordinates
        // in the onDrawFrame() method
        Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
    }

    @Override   // will be called on UI thread
    public boolean onSurfaceTextureDestroyed(SurfaceTexture st) {
        Log.d(TAG, "onSurfaceTextureDestroyed");

        // We set the SurfaceTexture reference to null to tell the Renderer thread that
        // it needs to stop.  The renderer might be in the middle of drawing, so we want
        // to return false here so that the caller doesn't try to release the ST out
        // from under us.
        //
        // In theory.
        //
        // In 4.4, the buffer queue was changed to be synchronous, which means we block
        // in dequeueBuffer().  If the renderer has been running flat out and is currently
        // sleeping in eglSwapBuffers(), it's going to be stuck there until somebody
        // tears down the SurfaceTexture.  So we need to tear it down here to ensure
        // that the renderer thread will break.  If we don't, the thread sticks there
        // forever.
        //
        // The only down side to releasing it here is we'll get some complaints in logcat
        // when eglSwapBuffers() fails.
        synchronized (mLock) {
            mSurfaceTexture = null;
        }
        if (sReleaseInCallback) {
            Log.i(TAG, "Allowing TextureView to release SurfaceTexture");
        }
        return sReleaseInCallback;
    }

    @Override   // will be called on UI thread
    public void onSurfaceTextureUpdated(SurfaceTexture st) {
        //Log.d(TAG, "onSurfaceTextureUpdated");
    }

    @Override
    public void run() {
        while (true) {
            SurfaceTexture surfaceTexture = null;

            // Latch the SurfaceTexture when it becomes available.  We have to wait for
            // the TextureView to create it.
            synchronized (mLock) {
                while (!mDone && (surfaceTexture = mSurfaceTexture) == null) {
                    try {
                        mLock.wait();
                    } catch (InterruptedException ie) {
                        throw new RuntimeException(ie);     // not expected
                    }
                }
                if (mDone) {
                    break;
                }
            }
            Log.d(TAG, "Got surfaceTexture=" + surfaceTexture);

            // Create an EGL surface for our new SurfaceTexture.  We're not on the same
            // thread as the SurfaceTexture, which is a concern for the *consumer*, which
            // wants to call updateTexImage().  Because we're the *producer*, i.e. the
            // one generating the frames, we don't need to worry about being on the same
            // thread.
            mEglCore = new EglCore(null, EglCore.FLAG_TRY_GLES3);
            WindowSurface windowSurface = new WindowSurface(mEglCore, mSurfaceTexture);
            windowSurface.makeCurrent();


            //setEGLContextClientVersion();


            // Render frames until we're told to stop or the SurfaceTexture is destroyed.
            doAnimation(windowSurface);

            windowSurface.release();
            mEglCore.release();
            if (!sReleaseInCallback) {
                Log.i(TAG, "Releasing SurfaceTexture in renderer thread");
                surfaceTexture.release();
            }
        }

        Log.d(TAG, "Renderer thread exiting");
    }

    /**
     * Draws updates as fast as the system will allow.
     * <p>
     * In 4.4, with the synchronous buffer queue queue, the frame rate will be limited.
     * In previous (and future) releases, with the async queue, many of the frames we
     * render may be dropped.
     * <p>
     * The correct thing to do here is use Choreographer to schedule frame updates off
     * of vsync, but that's not nearly as much fun.
     */
    private void doAnimation(WindowSurface eglSurface) {
        final int BLOCK_WIDTH = 80;
        final int BLOCK_SPEED = 2;
        float clearColor = 0.0f;
        int xpos = -BLOCK_WIDTH / 2;
        int xdir = BLOCK_SPEED;
        int width = eglSurface.getWidth();
        int height = eglSurface.getHeight();

        Log.d(TAG, "Animating " + width + "x" + height + " EGL surface");

        while (true) {
            // Check to see if the TextureView's SurfaceTexture is still valid.
            synchronized (mLock) {
                SurfaceTexture surfaceTexture = mSurfaceTexture;
                if (surfaceTexture == null) {
                    Log.d(TAG, "doAnimation exiting");
                    return;
                }
            }
            //Log.d(TAG, "doAnimation: loading new frame");

/*
            //ORIGINAL CODE IN GRAFIKA ----------------------------------------------
            // Still alive, render a frame.
            GLES20.glClearColor(clearColor, clearColor, clearColor, 1.0f);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

            GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
            GLES20.glScissor(xpos, height / 4, BLOCK_WIDTH, height / 2);
            GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            GLES20.glDisable(GLES20.GL_SCISSOR_TEST);


            // Publish the frame.  If we overrun the consumer, frames will be dropped,
            // so on a sufficiently fast device the animation will run at faster than
            // the display refresh rate.
            //
            // If the SurfaceTexture has been destroyed, this will throw an exception.
            eglSurface.swapBuffers();

            // Advance state
            clearColor += 0.015625f;
            if (clearColor > 1.0f) {
                clearColor = 0.0f;
            }
            xpos += xdir;
            if (xpos <= -BLOCK_WIDTH / 2 || xpos >= width - BLOCK_WIDTH / 2) {
                Log.d(TAG, "change direction");
                xdir = -xdir;
            }
*/

            //MY CODE -----------------------------------------

            onDrawFrame();
            //eglSurface.swapBuffers();
            if (!eglSurface.swapBuffers()) {
                Log.e(TAG, "cannot swap buffers!");
            }
            GlUtil.checkGlError("cannot swap buffers");
        }
    }


    boolean created=false;
    Triangle triangle;
    private void setUpObjects(){

        image=new Image(context, -1);
        imageStar=new Image(context,R.drawable.moon1);
        imageStar2=new Image(context,R.drawable.moon2);
        imageStar3=new Image(context,R.drawable.moon3);
        circleImage=new CircleImage(context,R.drawable.profile);
        planet=new CircleImage(context,R.drawable.earth);

        triangle=new Triangle();
        setInitialSettings();
    }
    private void setInitialSettings(){
        // Set the background color to black ( rgba ).
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0f);

        GLES20.glClearDepthf(1.0f);
        GLES20.glEnable( GLES20.GL_DEPTH_TEST );
        GLES20.glDepthFunc( GLES20.GL_LEQUAL );
        //https://stackoverflow.com/questions/3388294/opengl-question-about-the-usage-of-gldepthmask/3390094#3390094
        GLES20.glDepthMask( true );
    }
    private void onDrawFrame(){

        //Log.d(TAG, "onDrawFrame: ");
        if(!created){
            setUpObjects();
            created=true;
        }

        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT|GLES20.GL_DEPTH_BUFFER_BIT);

        // Set the camera position (View matrix)
        Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
        // Calculate the projection and view transformation
        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);

        triangle.draw(mProjectionMatrix);

    }


}
}

0 个答案:

没有答案