目标-我需要每32毫秒获得一个新的缓冲区。
我将此示例作为基础项目:
https://github.com/google-ar/arcore-android-sdk/tree/master/samples/hello_ar_java
我在项目中进行了更改,因为我需要每32毫秒更新一次AR object data
。
所以我的视图中有这种方法
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Prepare the rendering objects. This involves reading shaders, so may throw an IOException.
try
{
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
planeRenderer.createOnGlThread(/*context=*/ this, "models/trigrid.png");
pointCloudRenderer.createOnGlThread(/*context=*/ this);
virtualObject.setObjAssetName("models/andy.obj");
virtualObject.setDiffuseTextureAssetName("models/andy.png");
virtualObject.setContext(this);
virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
virtualObject.configure();
}
catch (IOException e)
{
Log.e(TAG, "Failed to read an asset file", e);
}
}
还有这个
@Override
public void onDrawFrame(GL10 gl)
{
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null)
{
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try
{
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle one tap per frame.
handleTap(frame, camera);
// If frame is ready, render camera preview image to the GL surface.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3D objects, show tracking failure reason instead.
if (camera.getTrackingState() == TrackingState.PAUSED)
{
messageSnackbarHelper.showMessage(this, TrackingStateHelper.getTrackingFailureReasonString(camera));
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
// The first three components are color scaling factors.
// The last one is the average pixel intensity in gamma space.
final float[] colorCorrectionRgba = new float[4];
frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);
// Visualize tracked points.
// Use try-with-resources to automatically release the point cloud.
try (PointCloud pointCloud = frame.acquirePointCloud())
{
pointCloudRenderer.update(pointCloud);
pointCloudRenderer.draw(viewmtx, projmtx);
}
// No tracking error at this point. If we detected any plane, then hide the
// message UI, otherwise show searchingPlane message.
if (hasTrackingPlane())
{
messageSnackbarHelper.hide(this);
}
else
{
messageSnackbarHelper.showMessage(this, SEARCHING_PLANE_MESSAGE);
}
// Visualize planes.
planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
float scaleFactor = 1.0f;
for (ColoredAnchor coloredAnchor : anchors)
{
if (coloredAnchor.anchor.getTrackingState() != TrackingState.TRACKING)
{
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
coloredAnchor.anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
virtualObject.updateModelMatrix(anchorMatrix, scaleFactor);
virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba, coloredAnchor.color);
}
}
catch (Throwable t)
{
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
但这不是非常有趣的部分,位于ObjectRendered
我们有方法configure()
,我们从View
的{{1}}调用了一次,这里我们只是为对象进行配置
onSurfaceCreated
我们这里也有方法public void configure() throws IOException
{
vertexShader = ShaderUtil.loadGLShader(TAG, mContext, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME);
fragmentShader = ShaderUtil.loadGLShader(TAG, mContext, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME);
program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
GLES20.glUseProgram(program);
ShaderUtil.checkGLError(TAG, "Program creation");
modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView");
modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection");
positionAttribute = GLES20.glGetAttribLocation(program, "a_Position");
normalAttribute = GLES20.glGetAttribLocation(program, "a_Normal");
texCoordAttribute = GLES20.glGetAttribLocation(program, "a_TexCoord");
textureUniform = GLES20.glGetUniformLocation(program, "u_Texture");
lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters");
materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters");
colorCorrectionParameterUniform = GLES20.glGetUniformLocation(program, "u_ColorCorrectionParameters");
colorUniform = GLES20.glGetUniformLocation(program, "u_ObjColor");
ShaderUtil.checkGLError(TAG, "Program parameters");
// Read the texture.
Bitmap textureBitmap = BitmapFactory.decodeStream(mContext.getAssets().open(mDiffuseTextureAssetName));
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glGenTextures(textures.length, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
textureBitmap.recycle();
ShaderUtil.checkGLError(TAG, "Texture loading");
// Read the obj file.
InputStream objInputStream = mContext.getAssets().open(mObjAssetName);
obj = ObjReader.read(objInputStream);
// Prepare the Obj so that its structure is suitable for
// rendering with OpenGL:
// 1. Triangulate it
// 2. Make sure that texture coordinates are not ambiguous
// 3. Make sure that normals are not ambiguous
// 4. Convert it to single-indexed data
obj = ObjUtils.convertToRenderable(obj);
}
obtainARObjectData
方法public void obtainARObjectData()
{
// OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format
// that OpenGL understands.
// Obtain the data from the OBJ, as direct buffers:
IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
FloatBuffer vertices = ObjData.getVertices(obj);
FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
// FloatBuffer normals = ObjData.getNormals(obj);
// Convert int indices to shorts for GL ES 2.0 compatibility
ShortBuffer indices = ByteBuffer.allocateDirect(2 * wideIndices.limit()).order(ByteOrder.nativeOrder()).asShortBuffer();
while (wideIndices.hasRemaining())
{
indices.put((short) wideIndices.get());
}
indices.rewind();
int[] buffers = new int[2];
GLES20.glGenBuffers(2, buffers, 0);
vertexBufferId = buffers[0];
indexBufferId = buffers[1];
// Load vertex buffer
verticesBaseAddress = 0;
texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit();
normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit();
final int totalBytes = normalsBaseAddress; //* normals.limit();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords);
// GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
// Load index buffer
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
indexCount = indices.limit();
GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * indexCount, indices, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
ShaderUtil.checkGLError(TAG, "OBJ buffer load");
Matrix.setIdentityM(modelMatrix, 0);
}
draw()
因此,最后我们有了最有趣的方法public void draw(float[] cameraView, float[] cameraPerspective, float[] colorCorrectionRgba, float[] objColor)
{
requestRefreshARData();
ShaderUtil.checkGLError(TAG, "Before draw");
// Build the ModelView and ModelViewProjection matrices
// for calculating object position and light.
Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0);
Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0);
GLES20.glUseProgram(program);
ShaderUtil.checkGLError(TAG, "Before draw");
// Set the lighting environment properties.
GLES20.glUniform4fv(colorCorrectionParameterUniform, 1, colorCorrectionRgba, 0);
// Set the object color property.
GLES20.glUniform4fv(colorUniform, 1, objColor, 0);
// Set the object material properties.
GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower);
// Attach the object texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glUniform1i(textureUniform, 0);
// Set the vertex attributes.
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
GLES20.glVertexAttribPointer(positionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, verticesBaseAddress);
GLES20.glVertexAttribPointer(normalAttribute, 3, GLES20.GL_FLOAT, false, 0, normalsBaseAddress);
GLES20.glVertexAttribPointer(texCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, texCoordsBaseAddress);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
// Set the ModelViewProjection matrix in the shader.
GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMatrix, 0);
GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0);
// Enable vertex arrays
GLES20.glEnableVertexAttribArray(positionAttribute);
GLES20.glEnableVertexAttribArray(normalAttribute);
GLES20.glEnableVertexAttribArray(texCoordAttribute);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, indexCount, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
ShaderUtil.checkGLError(TAG, "After draw");
}
,该方法是我们从先前的requestRefreshARData()
调用的。
如果我这样编写此方法
draw()
一切正常,但是实际上我们第一次调用draw方法只是刷新对象数据一次,而我们需要每32毫秒更新一次对象数据。
因此,仅出于测试目的,我添加了此方法的这种实现
private boolean mFlag = true;
private void requestRefreshARData()
{
if (mFlag)
{
obtainARObjectData();
mFlag = false;
}
}
现在我有了这样的结果 t
我期望什么?我希望每32毫秒更新一次我的AR对象(我知道这次对象将使用相同的数据进行更新,我将在最近更改一个数据集),并且用户可以在屏幕上看到它而不会滑动。
我进行了很多Google搜索,并了解由于OpenGL上下文,在另一个线程中更新数据不是一个好的解决方案...以及openGL具有单个缓冲区,现在当我尝试更新缓冲区时并在屏幕上绘制对象,我就有这种轻弹效果(因为缓冲区更新对于OpenGL来说很重)
我在做什么错?为什么物体在轻弹?