VideoPlayback与对象识别vuforia

时间:2017-04-28 15:41:53

标签: augmented-reality vuforia

我试图在真实物体目标上播放视频,并且我使用来自在图像上播放视频的样本的渲染帧功能,但是当在真实物体上使用它时,它不会显示视频虽然可以播放视频,但您可以听到视频的声音。

     public void renderFrame(State state, float[] projectionMatrix)
{
    // Renders video background replacing Renderer.DrawVideoBackground()
    mSampleAppRenderer.renderVideoBackground();

    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    if(tappingProjectionMatrix == null)
    {
        tappingProjectionMatrix = new Matrix44F();
        tappingProjectionMatrix.setData(projectionMatrix);
    }

    // Did we find any trackables this frame?
    for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
    {
        // Get the trackable:
        TrackableResult trackableResult = state.getTrackableResult(tIdx);

        ObjectTarget imageTarget = (ObjectTarget) trackableResult
            .getTrackable();

        int currentTarget;

        // We store the modelview matrix to be used later by the tap
        // calculation
        if (imageTarget.getName().compareTo("grafik1") == 0)
            currentTarget = VideoPlayback.STONES;
        else
            currentTarget = VideoPlayback.CHIPS;

        modelViewMatrix[currentTarget] = Tool
            .convertPose2GLMatrix(trackableResult.getPose());

        isTracking[currentTarget] = true;

        targetPositiveDimensions[currentTarget] = imageTarget.getSize();


        // If the movie is ready to start playing or it has reached the end
        // of playback we render the keyframe
        if ((currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.READY)
            || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.REACHED_END)
            || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.NOT_READY)
            || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.ERROR)) {


            float[] modelViewMatrixKeyframe = Tool.convertPose2GLMatrix(
                    trackableResult.getPose()).getData();


            float[] modelViewProjectionKeyframe = new float[16];

            Log.d("size ", "" + targetPositiveDimensions[currentTarget].getData()[0] + "  heigt" + targetPositiveDimensions[currentTarget].getData()[1] );

            Matrix.scaleM(modelViewMatrixKeyframe, 0,
                    0.1235f,
                    0.1235f,
                    0.1235f);

            Matrix.multiplyMM(modelViewProjectionKeyframe, 0,
                    projectionMatrix, 0, modelViewMatrixKeyframe, 0);

            GLES20.glUseProgram(keyframeShaderID);

            // Prepare for rendering the keyframe
            GLES20.glVertexAttribPointer(keyframeVertexHandle, 3,
                    GLES20.GL_FLOAT, false, 0, quadVertices);
            GLES20.glVertexAttribPointer(keyframeTexCoordHandle, 2,
                    GLES20.GL_FLOAT, false, 0, quadTexCoords);

            GLES20.glEnableVertexAttribArray(keyframeVertexHandle);
            GLES20.glEnableVertexAttribArray(keyframeTexCoordHandle);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

            // The first loaded texture from the assets folder is the
            // keyframe
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                    mTextures.get(0).mTextureID[0]);
            GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false,
                    modelViewProjectionKeyframe, 0);
            GLES20.glUniform1i(keyframeTexSampler2DHandle, 0);

            // Render
            GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX,
                    GLES20.GL_UNSIGNED_SHORT, quadIndices);

            GLES20.glDisableVertexAttribArray(keyframeVertexHandle);
            GLES20.glDisableVertexAttribArray(keyframeTexCoordHandle);

            GLES20.glUseProgram(0);
        }

        SampleUtils.checkGLError("VideoPlayback renderFrame");
    }

    GLES20.glDisable(GLES20.GL_DEPTH_TEST);


    Renderer.getInstance().end();

}

我真的很感激任何帮助。

0 个答案:

没有答案