需要在ARCORE中播放视频

时间:2017-12-05 15:40:10

标签: android android-layout augmented-reality arcore

正如我们在ARCore中所知,我们可以在点击水平平面时检测3d对象。当用户要点击PLane Surfaces 时,我需要显示视频而不是3d对象。外观和感觉应该像3d对象一样显示。视频应该以预览模式显示,而不是3d对象。

在ARcore中,他们目前正在使用Surfaceview的一个Relativelayout。因此,为了显示视频,我正在使用Surfaceview并附加媒体播放器。

public void onsurfacecreatedvideo(){
    mProgram = createProgram(mVertexShader, mFragmentShader);
    if (mProgram == 0) {
        return;
    }
    maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
    checkGlError("glGetAttribLocation aPosition");
    if (maPositionHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aPosition");
    }
    maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
    checkGlError("glGetAttribLocation aTextureCoord");
    if (maTextureHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aTextureCoord");
    }

    muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
    checkGlError("glGetUniformLocation uMVPMatrix");
    if (muMVPMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uMVPMatrix");
    }

    muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
    checkGlError("glGetUniformLocation uSTMatrix");
    if (muSTMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uSTMatrix");
    }


    int[] textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);

    mTextureID = textures[0];
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
    checkGlError("glBindTexture mTextureID");

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
            GLES20.GL_NEAREST);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
            GLES20.GL_LINEAR);

        /*
         * Create the SurfaceTexture that will feed this textureID,
         * and pass it to the MediaPlayer
         */
    mSurface = new SurfaceTexture(mTextureID);
    mSurface.setOnFrameAvailableListener(this);



    Surface surface = new Surface(mSurface);
    mMediaPlayer.setSurface(surface);
    mMediaPlayer.setScreenOnWhilePlaying(true);

    surface.release();

    mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
        @Override
        public void onPrepared(MediaPlayer mp) {
            Log.i(TAG,"ONPREPArED abhilash");
            setVideoSize();
            mp.start();
        }
    });
    try {
        mMediaPlayer.prepare();
    } catch (IOException t) {
        Log.e(TAG, "media player prepare failed");
    }

    synchronized(this) {
        updateSurface = false;
    }

    mMediaPlayer.start();

}

void ondrawvideo(){
    synchronized(this) {
        if (updateSurface) {
            mSurface.updateTexImage();
            mSurface.getTransformMatrix(mSTMatrix);
            updateSurface = false;
        }
    }

    /////////////
    GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
    GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);


    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);



    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");

    Matrix.setIdentityM(mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    checkGlError("glDrawArrays");
    GLES20.glFinish();

}


        // Visualize planes.
        mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx);


        // Visualize anchors created by touch.
        float scaleFactor = 1.0f;
        for (PlaneAttachment planeAttachment : mTouches) {
            ondrawvideo();
            if (!planeAttachment.isTracking()) {
                continue;
            }


            // Get the current combined pose of an Anchor and Plane in world space. The Anchor
            // and Plane poses are updated during calls to session.update() as ARCore refines
            // its estimate of the world.
            planeAttachment.getPose().toMatrix(mAnchorMatrix, 0);

            // Update and draw the model and its shadow.
            mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
            mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}

目前我的输出是这样的。当我点击Surface Surfaces时,它显示如下:

enter image description here

正如你所看到的,在Image下面,我需要像这样实现它。我只是在这个特定的bugdroid图像中标记了视频应该播放,视频不应该超过全屏;它应该只显示为bugdroid图像大小:

enter image description here

1 个答案:

答案 0 :(得分:4)

我通过创建一个名为MovieClipRenderer的新类来完成此操作 - 该类以HelloAR示例中的ObjectRenderer类为模型。这将创建四边形几何体,并从四边形中的媒体播放器渲染纹理。四边形锚定在一个平面上,因此当用户环顾四周时它不会移动。

为了测试,我使用了以下的股票电影:https://www.videvo.net/video/chicken-on-green-screen/3435/ 并将其添加到src/main/assets

然后我将渲染器的成员变量添加到HelloArActivity

  private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer();

onSurfaceCreated()中,我使用其他人

初始化了渲染器
 mMovieClipRenderer.createOnGlThread();

为了尝试一下,我在飞机上首次点击通过稍微更改命中测试代码来创建电影主播:

if (mMovieAnchor == null) {
    mMovieAnchor = hit.createAnchor();
} else {
    mAnchors.add(hit.createAnchor());
}

然后在onDrawFrame()的底部我检查了锚点并开始播放它:

    if (mMovieAnchor != null) {
        // Draw chickens!
        if (!mMovieClipRenderer.isStarted()) {
            mMovieClipRenderer.play("chicken.mp4", this);
        }
        mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
        mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
        mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
    }

渲染类非常长,但它是非常标准的GLES代码,用于创建OES纹理并初始化视频播放器,创建四边形的顶点并加载绘制OES纹理的片段着色器。

/**
 * Renders a movie clip with a green screen aware shader.
 * <p>
 * Copyright 2018 Google LLC
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
public class MovieClipRenderer implements 
     SurfaceTexture.OnFrameAvailableListener {
  private static final String TAG = MovieClipRenderer.class.getSimpleName();

  // Quad geometry
  private static final int COORDS_PER_VERTEX = 3;
  private static final int TEXCOORDS_PER_VERTEX = 2;
  private static final int FLOAT_SIZE = 4;
  private static final float[] QUAD_COORDS = new float[]{
          -1.0f, -1.0f, 0.0f,
          -1.0f, +1.0f, 0.0f,
          +1.0f, -1.0f, 0.0f,
          +1.0f, +1.0f, 0.0f,
  };

  private static final float[] QUAD_TEXCOORDS = new float[]{
          0.0f, 1.0f,
          0.0f, 0.0f,
          1.0f, 1.0f,
          1.0f, 0.0f,
  };

  // Shader for a flat quad.
  private static final String VERTEX_SHADER =
      "uniform mat4 u_ModelViewProjection;\n\n" +
      "attribute vec4 a_Position;\n" +
      "attribute vec2 a_TexCoord;\n\n" +
      "varying vec2 v_TexCoord;\n\n" +
      "void main() {\n" +
      "   gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
     "   v_TexCoord = a_TexCoord;\n" +
     "}";

  // The fragment shader samples the video texture, blending to
  //  transparent for the green screen
  //  color.  The color was determined by sampling a screenshot
  //  of the video in an image editor.
  private static final String FRAGMENT_SHADER =
      "#extension GL_OES_EGL_image_external : require\n" +
      "\n" +
      "precision mediump float;\n" +
      "varying vec2 v_TexCoord;\n" +
      "uniform samplerExternalOES sTexture;\n" +
      "\n" +
      "void main() {\n" +
      "    //TODO make this a uniform variable - " +
      " but this is the color of the background. 17ad2b\n" +
      "  vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
      "  float thresh = 0.4f; // 0 - 1.732\n" +
      "  float slope = 0.2;\n" +
      "  vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
      "  float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
      "  float edge0 = thresh * (1.0f - slope);\n" +
      "  float alpha = smoothstep(edge0,thresh,d);\n" +
      "  gl_FragColor = vec4(input_color, alpha);\n" +
      "}";

  // Geometry data in GLES friendly data structure.
  private FloatBuffer mQuadVertices;
  private FloatBuffer mQuadTexCoord;

  // Shader program id and parameters.
  private int mQuadProgram;
  private int mQuadPositionParam;
  private int mQuadTexCoordParam;
  private int mModelViewProjectionUniform;
  private int mTextureId = -1;

  // Matrix for the location and perspective of the quad.
  private float[] mModelMatrix = new float[16];

  // Media player,  texture and other bookkeeping.
  private MediaPlayer player;
  private SurfaceTexture videoTexture;
  private boolean frameAvailable = false;
  private boolean started = false;
  private boolean done;
  private boolean prepared;
  private static Handler handler;


  // Lock used for waiting if the player was not yet created.
  private final Object lock = new Object();

  /**
   * Update the model matrix based on the location and scale to draw the quad.
   */
  public void update(float[] modelMatrix, float scaleFactor) {
    float[] scaleMatrix = new float[16];
    Matrix.setIdentityM(scaleMatrix, 0);
    scaleMatrix[0] = scaleFactor;
    scaleMatrix[5] = scaleFactor;
    scaleMatrix[10] = scaleFactor;
    Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
  }

  /**
   * Initialize the GLES objects.  
   * This is called from the GL render thread to make sure
   * it has access to the EGLContext.
   */
  public void createOnGlThread() {

    // 1 texture to hold the video frame.
    int textures[] = new int[1];
    GLES20.glGenTextures(1, textures, 0);
    mTextureId = textures[0];
    int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
    GLES20.glBindTexture(mTextureTarget, mTextureId);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
       GLES20.GL_NEAREST);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
       GLES20.GL_NEAREST);

    videoTexture = new SurfaceTexture(mTextureId);
    videoTexture.setOnFrameAvailableListener(this);

    // Make a quad to hold the movie
    ByteBuffer bbVertices = ByteBuffer.allocateDirect(
         QUAD_COORDS.length * FLOAT_SIZE);
    bbVertices.order(ByteOrder.nativeOrder());
    mQuadVertices = bbVertices.asFloatBuffer();
    mQuadVertices.put(QUAD_COORDS);
    mQuadVertices.position(0);

    int numVertices = 4;
    ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
            numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
    bbTexCoords.order(ByteOrder.nativeOrder());
    mQuadTexCoord = bbTexCoords.asFloatBuffer();
    mQuadTexCoord.put(QUAD_TEXCOORDS);
    mQuadTexCoord.position(0);

    int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
    int fragmentShader = loadGLShader(TAG,
         GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);

    mQuadProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mQuadProgram, vertexShader);
    GLES20.glAttachShader(mQuadProgram, fragmentShader);
    GLES20.glLinkProgram(mQuadProgram);
    GLES20.glUseProgram(mQuadProgram);

    ShaderUtil.checkGLError(TAG, "Program creation");

    mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
    mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
    mModelViewProjectionUniform = GLES20.glGetUniformLocation(
            mQuadProgram, "u_ModelViewProjection");

    ShaderUtil.checkGLError(TAG, "Program parameters");

    Matrix.setIdentityM(mModelMatrix, 0);

    initializeMediaPlayer();
  }

  public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) {
    if (done || !prepared) {
      return;
    }
    synchronized (this) {
      if (frameAvailable) {
        videoTexture.updateTexImage();
        frameAvailable = false;
      }
    }

    float[] modelMatrix = new float[16];
    pose.toMatrix(modelMatrix, 0);

    float[] modelView = new float[16];
    float[] modelViewProjection = new float[16];
    Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
    Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);

    ShaderUtil.checkGLError(TAG, "Before draw");

    GLES20.glEnable(GL10.GL_BLEND);
    GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, 0, mQuadVertices);
    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoord);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
    GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
                              modelViewProjection, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    ShaderUtil.checkGLError(TAG, "Draw");
  }

  private void initializeMediaPlayer() {
    if (handler == null)
      handler = new Handler(Looper.getMainLooper());

    handler.post(new Runnable() {
      @Override
      public void run() {
        synchronized (lock) {
          player = new MediaPlayer();
          lock.notify();
        }
      }
    });
  }

  @Override
  public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (this) {
      frameAvailable = true;
    }
  }

  public boolean play(final String filename, Context context)
                     throws FileNotFoundException {
    // Wait for the player to be created.
    if (player == null) {
      synchronized (lock) {
        while (player == null) {
          try {
            lock.wait();
          } catch (InterruptedException e) {
            return false;
          }
        }
      }
    }

    player.reset();
    done = false;

    player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
      @Override
      public void onPrepared(MediaPlayer mp) {
        prepared = true;
        mp.start();
      }
    });
    player.setOnErrorListener(new MediaPlayer.OnErrorListener() {
      @Override
      public boolean onError(MediaPlayer mp, int what, int extra) {
        done = true;
        Log.e("VideoPlayer",
            String.format("Error occured: %d, %d\n", what, extra));
        return false;
      }
    });

    player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
      @Override
      public void onCompletion(MediaPlayer mp) {
        done = true;
      }
    });

    player.setOnInfoListener(new MediaPlayer.OnInfoListener() {
      @Override
      public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) {
        return false;
      }
    });

    try {
      AssetManager assets = context.getAssets();
      AssetFileDescriptor descriptor = assets.openFd(filename);
      player.setDataSource(descriptor.getFileDescriptor(),
                           descriptor.getStartOffset(),
                           descriptor.getLength());
      player.setSurface(new Surface(videoTexture));
      player.prepareAsync();
      synchronized (this) {
        started = true;
      }
    } catch (IOException e) {
      Log.e(TAG, "Exception preparing movie", e);
      return false;
    }

    return true;
  }

  public synchronized boolean isStarted() {
    return started;
  }

  static int loadGLShader(String tag, int type, String code) {
    int shader = GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, code);
    GLES20.glCompileShader(shader);

    // Get the compilation status.
    final int[] compileStatus = new int[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

    // If the compilation failed, delete the shader.
    if (compileStatus[0] == 0) {
      Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
    }

    if (shader == 0) {
      throw new RuntimeException("Error creating shader.");
    }

    return shader;
  }
}