将SurfaceTexture渲染为Unity Texture2D

时间:2016-02-05 15:02:20

标签: c# android unity3d opengl-es texture2d

我之前提出过simillar问题,但是他们并没有很好地澄清,现在我想建议我在我的代码中做错了什么。

所以我要做的是将SurfaceTexture从Android插件渲染到Unity Texture2D。

这是我的Unity代码:

public class AndroidHandler : MonoBehaviour {

    [SerializeField]
    private RawImage _rawImage;

    private Texture2D _inputTexture;
    private AndroidJavaObject androidStreamerObj;
    private System.IntPtr _nativePtr;

    void Start () {

        _rawImage.material.SetTextureScale("_MainTex", new Vector2(-1, -1));
        InitAndroidStreamerObject();
    }

    private void InitAndroidStreamerObject()
    {
        androidStreamerObj = new AndroidJavaObject("makeitbetter.figazzz.com.vitamiousing7.AndroidStreamer");

        Int32 texPtr = androidStreamerObj.Call <Int32> ("GetTexturePtr");
        Debug.Log("texture pointer? " + texPtr);
        Texture2D nativeTexture = Texture2D.CreateExternalTexture (128, 128, TextureFormat.RGBA32 , false, false, new System.IntPtr(texPtr));

        _rawImage.texture = nativeTexture;
    }

    public void StartStream()
    {
        string streamLink = "rtmp://live.hkstv.hk.lxdns.com/live/hks"; //"rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
        androidStreamerObj.Call("LaunchStream", streamLink);
    }

    void Update()
    {
        androidStreamerObj.Call("DrawFrame");
    }
}

你可以看到它非常基本。我要求我的Android插件创建openGLTexture,我正在使用全新纹理的指针在Unity中分配Texture2D。

然后是我的Android插件代码:

public class AndroidStreamer {

    private final int FLOAT_SIZE_BYTES = 4;
    private final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
    private final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
    private final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;

    private Activity _currActivity;
    private VideoView _streamConnection;

    private Surface _cachedSurface;
    private SurfaceTexture _cachedSurfaceTexture;

    private Boolean isNewFrame = false;

    //open gl
    private int texWidth = 128;
    private int texHeight = 128;
    private float[] mMVPMatrix = new float[16];
    private float[] mSTMatrix = new float[16];

    private int glProgram;
    private int muMVPMatrixHandle;
    private int muSTMatrixHandle;
    private int maPositionHandle;
    private int maTextureHandle;
    private int unityTextureID = -1;
    private int mTextureId = -1;   //surface texture id
    private int idFBO = -1;
    private int idRBO = -1;

    private final float[] mTriangleVerticesData = {
            // X, Y, Z, U, V
            -1.0f, -1.0f, 0, 0.f, 0.f,
            1.0f, -1.0f, 0, 1.f, 0.f,
            -1.0f,  1.0f, 0, 0.f, 1.f,
            1.0f,  1.0f, 0, 1.f, 1.f,
    };

    private FloatBuffer mTriangleVertices;

    private final String vertexShaderCode =
            "uniform mat4 uMVPMatrix;\n" +
                    "uniform mat4 uSTMatrix;\n" +
                    "attribute vec4 aPosition;\n" +
                    "attribute vec4 aTextureCoord;\n" +
                    "varying vec2 vTextureCoord;\n" +
                    "void main() {\n" +
                    "    gl_Position = uMVPMatrix * aPosition;\n" +
                    "    vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
                    "}\n";

    private final String fragmentShaderCode =
            "#extension GL_OES_EGL_image_external : require\n" +
                    "precision mediump float;\n" +      // highp here doesn't seem to matter
                    "varying vec2 vTextureCoord;\n" +
                    "uniform samplerExternalOES sTexture;\n" +
                    "void main() {\n" +
                    "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
                    "}\n";

    public AndroidStreamer() {
        Log.d("Unity", "AndroidStreamer was initialized");

        _currActivity = UnityPlayer.currentActivity;
        Vitamio.isInitialized(_currActivity);

        _currActivity.runOnUiThread(new Runnable() {
            @Override
            public void run() {
                _streamConnection = new VideoView(_currActivity);
                _currActivity.addContentView(_streamConnection, new FrameLayout.LayoutParams(100, 100));
            }
        });

        mTriangleVertices = ByteBuffer.allocateDirect(
                mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
        mTriangleVertices.put(mTriangleVerticesData).position(0);
        Matrix.setIdentityM(mSTMatrix, 0);

        initShaderProgram();
    }

    private void initShaderProgram()
    {
        Log.d("Unity", "initShaderProgram");
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
        int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

        glProgram = GLES20.glCreateProgram();
        GLES20.glAttachShader(glProgram, vertexShader);
        checkGlError("glAttachVertexShader");
        GLES20.glAttachShader(glProgram, fragmentShader);
        checkGlError("glAttachFragmentShader");
        GLES20.glLinkProgram(glProgram);
        checkGlError("glLinkProgram");

        maPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
        checkLocation(maPositionHandle, "aPosition");
        maTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
        checkLocation(maTextureHandle, "aTextureCoord");

        muMVPMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uMVPMatrix");
        checkLocation(muMVPMatrixHandle, "uVMPMatrix");
        muSTMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uSTMatrix");
        checkLocation(muSTMatrixHandle, "uSTMatrix");
    }

    private int loadShader(int shaderType, String source) {
        int shader = GLES20.glCreateShader(shaderType);
        if (shader != 0) {
            GLES20.glShaderSource(shader, source);
            GLES20.glCompileShader(shader);
            int[] compiled = new int[1];
            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
            if (compiled[0] == 0) {
                Log.e("Unity", "Could not compile shader " + shaderType + ":");
                Log.e("Unity", GLES20.glGetShaderInfoLog(shader));
                GLES20.glDeleteShader(shader);
                shader = 0;
            }
        }
        return shader;
    }

    private void checkLocation(int location, String label) {
        if (location < 0) {
            throw new RuntimeException("Unable to locate '" + label + "' in program");
        }
    }

    private void checkGlError(String op) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e("Unity", op + ": glError " + error);
            throw new RuntimeException(op + ": glError " + error);
        }
    }

    private void checkFrameBufferStatus()
    {
        int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
        checkGlError("glCheckFramebufferStatus");
        switch (status)
        {
            case GLES20.GL_FRAMEBUFFER_COMPLETE:
                Log.d("Unity", "complete");
                break;
            case GLES20.GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
                Log.e("Unity", "incomplete attachment");
                break;
            case GLES20.GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
                Log.e("Unity", "incomplete missing attachment");
                break;
            case GLES20.GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS:
                Log.e("Unity", "incomplete dimensions");
                break;
            case GLES20.GL_FRAMEBUFFER_UNSUPPORTED:
                Log.e("Unity", "framebuffer unsupported");
                break;

            default : Log.d("Unity", "default");
        }
    }

    private void initGLTexture()
    {
        Log.d("Unity", "initGLTexture");
        int textures[] = new int[1];
        GLES20.glGenTextures(1, textures, 0);
        checkGlError("glGenTextures initGLTexture");
        mTextureId = textures[0];

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        checkGlError("glActiveTexture initGLTexture");
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
        checkGlError("glBindTexture initGLTexture");

        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        checkGlError("glTexParameterf initGLTexture");
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        checkGlError("glTexParameterf initGLTexture");
    }

    public int GetTexturePtr()
    {
        Bitmap bitmap = Bitmap.createBitmap(texWidth, texHeight, Bitmap.Config.ARGB_8888);
        for(int x = 0; x < texWidth; x++)
        {
            for (int y = 0; y < texHeight; y++)
            {
                bitmap.setPixel(x, y, Color.argb(155, 255, 50, 255));
            }
        }
        Log.d("Unity", "Bitmap is: " + bitmap);

        ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
        bitmap.copyPixelsToBuffer(buffer);

        //GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
        //checkGlError("glEnable GetTexturePtr");

        int textures[] = new int[1];
        GLES20.glGenTextures(1, textures, 0);
        checkGlError("0");
        unityTextureID = textures[0];

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        checkGlError("1");
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, unityTextureID);
        checkGlError("2");

        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, texWidth, texHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
        checkGlError("12");
        //GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
        //checkGlError("3");
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        checkGlError("4");
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        checkGlError("5");
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        checkGlError("6");
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        checkGlError("7");
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        checkGlError("8");

        setupBuffers();
        Log.d("Unity", "texture id returned: " + unityTextureID);

        return unityTextureID;
    }

    private void setupBuffers()
    {
        Log.d("Unity", "setupBuffers");

        //framebuffer
        int buffers[] = new int[1];
        GLES20.glGenFramebuffers(1, buffers, 0);
        checkGlError("9");
        idFBO = buffers[0];
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
        checkGlError("10");

        //render buffer
        int rbuffers[] = new int[1];
        GLES20.glGenRenderbuffers(1, rbuffers, 0);
        checkGlError("glGenRenderBuffers setupBuffers");
        idRBO = rbuffers[0];
        GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, idRBO);
        checkGlError("glBindRenderBuffer setupBuffers");
        GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_RGBA4, texWidth, texHeight);
        checkGlError("glRenderBufferStorage setupBuffers");

        GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_RENDERBUFFER, idRBO);
        checkGlError("glFramebufferRenderbuffer setupBuffers");

        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, unityTextureID, 0);
        checkGlError("glFrameBufferTexture2D");

        checkFrameBufferStatus();

        GLES20.glClearColor(1.0f, 0.5f, 0.0f, 1.0f);
        checkGlError("glClearColor setupBuffers");
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        checkGlError("glClear setupBuffers");
    }

    public void DrawFrame()
    {
        if(isNewFrame && mSTMatrix != null) {

            int[] testBuffer = new int[1];
            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, testBuffer, 0);

            Log.d("Unity", "DrawFrame binded = " + testBuffer[0] + " idFBO = " + idFBO);

            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
            checkGlError("glBindFrameBuffer DrawFrame");

            GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
            checkGlError("glClearColor DrawFrame");
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            checkGlError("glClear DrawFrame");

            GLES20.glUseProgram(glProgram);
            checkGlError("glUseProgram DrawFrame");

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            checkGlError("glActiveTexture DrawFrame");
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
            checkGlError("glBindTexture DrawFrame");

            mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
            GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
            checkGlError("glVertexAttribPointer DrawFrame");
            GLES20.glEnableVertexAttribArray(maTextureHandle);
            checkGlError("glEnableVertexAttribArray DrawFrame");

            Matrix.setIdentityM(mMVPMatrix, 0);
            GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
            checkGlError("glUniformMatrix4fv MVP onFrameAvailable");
            GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
            checkGlError("glUniformMatrix4fv ST onFrameAvailable");

            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            checkGlError("glDrawArrays onFrameAvailable");

            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
            checkGlError("glBindFrameBuffer 0 onFrameAvailable");
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
            checkGlError("glBindTexture onFrameAvailable");

            isNewFrame = false;
        }
    }

    public void LaunchStream(String streamLink) {

        final String path = streamLink; //"http://dlqncdn.miaopai.com/stream/MVaux41A4lkuWloBbGUGaQ__.mp4"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
        Log.i("Unity", "hop hop1 = " + path);

        _currActivity.runOnUiThread(new Runnable() {
            @Override
            public void run() {

                _streamConnection.setVideoPath(path);
                _streamConnection.setMediaController(new MediaController(_currActivity));
                _streamConnection.requestFocus();

                _streamConnection.setOnErrorListener(new MediaPlayer.OnErrorListener() {
                    @Override
                    public boolean onError(MediaPlayer mp, int what, int extra) {
                        Log.i("Unity", "some error, I don't know. what = " + what + " extra = " + extra);
                        return false;
                    }
                });

                _streamConnection.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {

                    @Override
                    public void onPrepared(MediaPlayer mediaPlayer) {
                        // optional need Vitamio 4.0
                        Log.i("Unity", "hop hop5");
                        mediaPlayer.setPlaybackSpeed(1.0f);

                    }
                });

                initGLTexture();

                _cachedSurfaceTexture = new SurfaceTexture(mTextureId);
                _cachedSurfaceTexture.setDefaultBufferSize(texWidth, texHeight);

                _cachedSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
                    @Override
                    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
                        synchronized (this) {
                            surfaceTexture.updateTexImage();

                            mSTMatrix = new float[16];
                            surfaceTexture.getTransformMatrix(mSTMatrix);
                            isNewFrame = true;
                        }
                    }
                });

                _cachedSurface = new Surface(_cachedSurfaceTexture);
                _streamConnection.setSurfaceToPlayer(_cachedSurface);

                Log.i("Unity", "You're the best around!");
            }
        });
    }
}

我决定提供Android插件的所有代码,以便最清楚地了解我所拥有的情况。基本上,我正在尝试做的事情: 1)我从Unity方面调用方法“GetTexturePtr”,它创建GL_TEXTURE_2D纹理,我将其应用于Unity Texture2D。同样在Android端我设置框架和渲染缓冲区来改变这个纹理的颜色。它工作正常,因为它完美地填充了颜色。 2)然后我调用方法“LaunchStream”,其中创建GL_TEXTURE_EXTERNAL_OES纹理(在“initGLTexture()”方法中)并且此纹理适用于SurfaceTexture。 3)同样在Unity Update()方法中我调用android方法“DrawFrame()”,它应该根据SurfaceTexture更改来更新我的Unity纹理。

现在我在GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);上有glError 1282,当然纹理只是填充绿色

GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
            checkGlError("glClearColor DrawFrame");

我做错了什么?

2 个答案:

答案 0 :(得分:3)

很少有人知道这个技巧。 我想给您一些简短的介绍,我想您可以找出其余的答案:

  1. 首先,您需要一个ImageReader,它可以接受您要读取的表面,并且一旦图像准备就绪,就可以调用您的代码,并且它具有回调ImageReader.OnImageAvailableListener
  2. 使用ImageReader.acquireLatestImage()来获取Image
  3. 使用Image.getHardwareBuffer()获得一个HardwareBuffer
  4. HardwareBuffer传递给JNI函数并更新纹理

    //Target your texture
    glBindTexture(GL_TEXTURE_2D, textureName);
    // Get native AHardwareBuffer
    AHardwareBuffer *hwbuffer = AHardwareBuffer_fromHardwareBuffer(env, hardwareBuffer);
    // Create EGLClientBuffer from the AHardwareBuffer.
    EGLClientBuffer native_buffer = eglGetNativeClientBufferANDROID(hwbuffer);
    // Destroy last created EGLImageKHR
    if (cachedImages.find(textureName) != cachedImages.end()){
        eglDestroyImageKHR(eglGetCurrentDisplay(), cachedImages[textureName]);
    }
    // Begin to make new EGLImageKHR
    EGLImageKHR image {EGL_NO_IMAGE_KHR};
    EGLint attrs[] = {
            EGL_IMAGE_PRESERVED_KHR,
            EGL_TRUE,
            EGL_NONE,
    };
    // Create EGLImage from EGLClientBuffer.
    image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, native_buffer, attrs);
    if (image == EGL_NO_IMAGE_KHR) {
        LOGE("Failed to create EGLImage.");
        return false;
    }
    // Cache the image
    cachedImages[textureName] = image;
    // Get glEGLImageTargetTexture2DOES
    if (!isGlEGLImageTargetTexture2DOESInited) {
        glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) eglGetProcAddress("glEGLImageTargetTexture2DOES");
        isGlEGLImageTargetTexture2DOESInited = true;
    }
    if(glEGLImageTargetTexture2DOES == NULL){
        LOGE("Error: Failed to find glEGLImageTargetTexture2DOES at %s:%in", __FILE__, __LINE__);
        return false;
    }
    // Allocate the OpenGL texture using the EGLImage.
    glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
    //Not GL_TEXTURE_EXTERNAL_OES
    //glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
    
    glBindTexture(GL_TEXTURE_2D, 0);
    
  5. 现在您已经更新了texturename,它是您之前在代码中创建的(来自本机或Android EGL或Unity)

整个过程就像:

  1. ImageReader的回调设置图像准备标志
  2. Unity的Update()检查是否已准备好图像
  3. 使用上面的代码更新纹理。

答案 1 :(得分:0)

您无法调用surfaceTexture.updateTexImage();在onFrameAvailable中,在DrawFrame()中调用它。

在Unity3D中:

void Update()
    {
        androidStreamerObj.Call("DrawFrame");
        GL.InvalidateState(); // ADD it
    }