我是OpenGL和ARCore的新手,我使用GoogleArCore Sample作为创建应用程序的基础。我能够缩放和缩放3D对象,但我无法用另一个3D对象替换当前的3D对象。我在加载新对象之前已经检测到了阴影。
基本上,我想重新加载onSurfaceCreated(GL10 gl, EGLConfig config)
但我不知道怎么做。
这是我的代码
public class MyARActivity extends BaseActivity<MyActivityArBinding> implements GLSurfaceView.Renderer {
private static final String TAG = DashboardActivity.class.getSimpleName();
//AR Variables
private int mWidth;
private int mHeight;
private boolean capturePicture = false;
private boolean installRequested;
private boolean moving;
float[] projmtx = new float[16];
float[] viewmtx = new float[16];
private Session session;
private GestureDetector gestureDetector;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private ObjectRenderer virtualObject;// = new ObjectRenderer();
private ObjectRenderer virtualObjectShadow;// = new ObjectRenderer();
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private PointCloudRenderer pointCloud = new PointCloudRenderer();
private ScaleGestureDetector scaleGestureDetector;
private MyScaleGestureDetector_1 myScaleGestureDetector;
// Temporary matrix allocated here to reduce number of allocations for each frame.
private float[] anchorMatrix = new float[16];
// Tap handling and UI.
private ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private ArrayList<Anchor> anchors = new ArrayList<>();
private int[] m3DCharacters = new int[]{R.drawable.cat, R.drawable.old_man, R.drawable.bat};
private SQLiteHelper sqlHelper;
private boolean isUpdate;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHeaderVisible(false);
doDefaults();
}
private void doDefaults() {
binding.setPresenter(this);
sqlHelper = SQLiteHelper.getInstance(this);
initAR();
}
@SuppressLint("ClickableViewAccessibility")
private void initAR() {
displayRotationHelper = new DisplayRotationHelper(this);
myScaleGestureDetector = new MyScaleGestureDetector_1();
scaleGestureDetector = new ScaleGestureDetector(this, myScaleGestureDetector);
// Set up tap listener.
gestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
if (anchors.size() <= 0) {
onSingleTap(e);
}
return true;
}
@Override
public boolean onDown(MotionEvent e) {
return true;
}
});
binding.surfaceView.setOnTouchListener(
new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
moving = true;
DebugHelper.log("ACTION_DOWN");
break;
case MotionEvent.ACTION_UP:
DebugHelper.log("ACTION_UP");
moving = false;
DebugHelper.log("SF at ACTION_UP::::", String.valueOf(myScaleGestureDetector.getScaleFactor()));
break;
case MotionEvent.ACTION_MOVE:
if (anchors.size() > 0) {
onSecondTouch(event);
}
break;
}
return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
binding.surfaceView.setPreserveEGLContextOnPause(true);
binding.surfaceView.setEGLContextClientVersion(2);
binding.surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
binding.surfaceView.setRenderer(this);
binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private void onSavePicture() {
setLoading(true, "Capturing image...");
Log.e("SavePicture Called", "Yes");
int pixelData[] = new int[mWidth * mHeight];
// Read the pixels from the current GL frame.
IntBuffer buf = IntBuffer.wrap(pixelData);
buf.position(0);
GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
// Convert the pixel data from RGBA to what Android wants, ARGB.
int bitmapData[] = new int[pixelData.length];
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
int p = pixelData[i * mWidth + j];
int b = (p & 0x00ff0000) >> 16;
int r = (p & 0x000000ff) << 16;
int ga = p & 0xff00ff00;
bitmapData[(mHeight - i - 1) * mWidth + j] = ga | r | b;
}
}
// Create a bitmap.
Bitmap capturedBitmap = Bitmap.createBitmap(bitmapData, mWidth, mHeight, Bitmap.Config.ARGB_8888);
Bitmap waterBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.watermark_camar);
Bitmap waterMarkBitmap = ImageUtil.getInstance().createWaterMaskRightBottom(this, capturedBitmap, waterBitmap, 10, 5);
sqlHelper.saveToInternalStorage(this, waterMarkBitmap);
setLoading(false, "");
}
/**
* To capture the current AR Frame
*/
public void onCaptureClick() {
if (!ExternalPermissionHelper.hasExtStoragePermission(MyARActivity.this)) {
ExternalPermissionHelper.requestExtStoragePermission(MyARActivity.this);
return;
}
capturePicture = true;
}
/**
* To open and close the character selection horizontal list
*/
public void onCharClick() {
createCharacterScrollView();
}
/**
* To close the entire AR view
*/
public void closeScreen() {
finish();
}
private void createCharacterScrollView() {
try {
binding.LinearTopSlider.setVisibility(View.VISIBLE);
LayoutInflater inflater = LayoutInflater.from(this);
binding.charContainer.removeAllViews();
binding.horizontalChar.scrollTo(0, 0);
for (int i = 0; i < m3DCharacters.length; i++) {
View cell = inflater.inflate(R.layout.item_character, null);
ImageView imgGroup = cell.findViewById(R.id.imgChar);
View view = cell.findViewById(R.id.view);
//String name = "cartoon" + (i + 1);
imgGroup.setImageResource(m3DCharacters[i]); //getResourceId(name)
view.setVisibility(i < m3DCharacters.length - 1 ? View.VISIBLE : View.GONE);
binding.charContainer.addView(cell);
cell.setTag(i);
cell.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
final int position = (int) view.getTag();
switch (position) {
case 0:
loadObject(0.28f, 3.5f, 0.28f, "cat/cat.obj");
break;
case 1:
loadObject(0.0085f, 5f, 1f, "man/muro.obj");
break;
case 2:
loadObject(0.0009f, 2.75f, 0.25f, "bloodwing/bloodwing.obj");
break;
}
}
});
}
binding.charContainer.setVisibility(View.VISIBLE);
} catch (Exception e) {
e.printStackTrace();
}
}
private int getResourceId(String drawableName) {
return getResources().getIdentifier(drawableName, "drawable", getPackageName());
}
private void onSecondTouch(MotionEvent e) {
Log.e("Second Touch", "Executed");
if (e.getPointerCount() > 1) {
scaleGestureDetector.onTouchEvent(e);
} else {
queuedSingleTaps.offer(e);
}
}
@Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
showMsg(message);
Log.e(TAG, "Exception creating session", exception);
return;
}
// Create default config and check if supported.
Config config = new Config(session);
if (!session.isSupported(config)) {
showMsg("This device does not support AR");
}
session.configure(config);
}
showLoadingMessage();
// Note that order matters - see the note in onPause(), the reverse applies here.
session.resume();
binding.surfaceView.onResume();
displayRotationHelper.onResume();
}
@Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
binding.surfaceView.onPause();
session.pause();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
switch (requestCode) {
case CameraPermissionHelper.CAMERA_PERMISSION_CODE:
if (!CameraPermissionHelper.hasCameraPermission(this)) {
showMsg("Camera permission is needed to run this application");
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
break;
case ExternalPermissionHelper.EXT_STORAGE_PERMISSION_CODE:
if (!ExternalPermissionHelper.hasExtStoragePermission(this)) {
showMsg("External storage permission is needed to capture the photo");
if (!ExternalPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
ExternalPermissionHelper.launchPermissionSettings(this);
}
finish();
}
break;
}
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
// Standard Android full-screen functionality.
getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
DebugHelper.log("onSingleTap()");
queuedSingleTaps.offer(e);
}
private void loadCatObject() {
try {
myScaleGestureDetector.setScaleFactor(0.25f);
myScaleGestureDetector.setMinValue(3.5f);
myScaleGestureDetector.setMaxValue(0.25f);
virtualObject = new ObjectRenderer("cat/cat.obj");
//virtualObject.createOnGlThread(this, "cat/cat.obj", "cat/cat.png");
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (IOException ex) {
ex.printStackTrace();
}
}
private void loadBloodwingObject() {
try {
myScaleGestureDetector.setScaleFactor(0.0009f);
myScaleGestureDetector.setMinValue(2.75f);
myScaleGestureDetector.setMaxValue(0.25f);
virtualObject = new ObjectRenderer("bloodwing/bloodwing.obj");
virtualObject.createOnGlThread(this);
//virtualObject.createOnGlThread(this, "bloodwing/bloodwing.obj", "bloodwing/bloodwing.jpg");
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (IOException ex) {
ex.printStackTrace();
}
}
private void loadMan() {
try {
myScaleGestureDetector.setScaleFactor(0.0085f);
myScaleGestureDetector.setMinValue(5f);
myScaleGestureDetector.setMaxValue(1f);
virtualObject = new ObjectRenderer("man/muro.obj");
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (Exception ex) {
ex.printStackTrace();
finish();
}
}
private void loadObject(float scaleFactor, float minValue, float maxValue, String objectPath) {
try {
myScaleGestureDetector.setScaleFactor(scaleFactor);
myScaleGestureDetector.setMinValue(minValue);
myScaleGestureDetector.setMaxValue(maxValue);
if (virtualObject == null) {
virtualObject = new ObjectRenderer(objectPath);
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} else {
isUpdate = true;
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
virtualObject.updateObjectPath(objectPath);
virtualObject.createOnGlThread(this);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
// Prepare the other rendering objects.
loadObject(0.40f, 3.5f, 0.28f, "cat/cat.obj");
try {
planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
} catch (IOException e) {
Log.e(TAG, "Failed to read plane texture");
}
pointCloud.createOnGlThread(/*context=*/ this);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
}
@Override
public void onDrawFrame(GL10 gl) {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
//if (!isUpdate) {
DebugHelper.log("Anchor size = " + anchors.size());
if (anchors.size() >= 1) {
anchors.get(0).detach();
anchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
if (isUpdate) {
if (anchors.size() > 0) {
DebugHelper.log("anchor list has data");
for (Anchor anchor : anchors) {
anchor.detach();
anchors.remove(anchor);
}
}
}
Anchor anchor = hit.createAnchor();
if (anchor != null)
anchors.add(anchor);
else
DebugHelper.log("anchor is null");
//}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
if (!capturePicture)
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
{
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
//xgfgdfgfgd
//binding.setCharClick(true);
}
}
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING && plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
//dfgdfgdfgdf
//binding.setCharClick(true);
}
}
// Visualize planes.
if (!capturePicture)
planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
for (Anchor anchor : anchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
if (virtualObject != null) {
virtualObject.updateModelMatrix(anchorMatrix, myScaleGestureDetector.getScaleFactor());
if (viewmtx != null && projmtx != null) {
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
}
if (virtualObjectShadow != null) {
virtualObjectShadow.updateModelMatrix(anchorMatrix, myScaleGestureDetector.getScaleFactor());
if (viewmtx != null && projmtx != null)
virtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
}
}
if (capturePicture) {
capturePicture = false;
onSavePicture();
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
private void setLoading(boolean isLoading, String message) {
binding.setLoading(isLoading);
binding.setLoadingMessage(message);
}
private void showSnackbarMessage(String message, boolean finishOnDismiss) {
messageSnackbar =
Snackbar.make(
MyARActivity.this.findViewById(android.R.id.content),
message,
Snackbar.LENGTH_INDEFINITE);
messageSnackbar.getView().setBackgroundColor(0xbf323232);
if (finishOnDismiss) {
messageSnackbar.setAction(
"Dismiss",
new View.OnClickListener() {
@Override
public void onClick(View v) {
messageSnackbar.dismiss();
}
});
messageSnackbar.addCallback(
new BaseTransientBottomBar.BaseCallback<Snackbar>() {
@Override
public void onDismissed(Snackbar transientBottomBar, int event) {
super.onDismissed(transientBottomBar, event);
finish();
}
});
}
messageSnackbar.show();
}
private void showLoadingMessage() {
runOnUiThread(
new Runnable() {
@Override
public void run() {
showSnackbarMessage("Searching for surfaces...", false);
}
});
}
private void hideLoadingMessage() {
runOnUiThread(
new Runnable() {
@Override
public void run() {
if (messageSnackbar != null) {
messageSnackbar.dismiss();
}
messageSnackbar = null;
}
});
}
}
在我的ObjectRenderer中,BackgroundRenderer重置了我的代码。
嘿是我的重置代码
BackgroundRenderer.java
public void resetData() {
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteProgram(quadProgram);
textureId = -1;
}
ObjectRenderer.java
Public void updateObjectPath(String objectPath) {
try {
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteProgram(mProgram);
reset();
this.OBJ_PATH = objectPath;
}
catch (Exception e) {
e.printStackTrace();
}
}
private void reset() {
this.OBJ_PATH = null;
this.mObj = null;
this.mModelMatrix = null;
this.mModelViewMatrix = null;
this.mModelViewProjectionMatrix = null;
this.mViewLightDirection = null;
this.mTextures = null;
this.vectorArrayObjectIds = null;
this.mMaterialParametersUniform = -1;
this.mModelViewProjectionUniform = -1;
this.mVerticesBaseAddress = -1;
this.mTexCoordsBaseAddress = -1;
this.mNormalAttribute = -1;
this.mNormalsBaseAddress = -1;
this.mIndexBufferId = -1;
this.mVertexBufferId = -1;
this.mBlendMode = null;
this.mProgram = -1;
this.mLightingParametersUniform = -1;
this.mIndexCount = -1;
this.mModelViewUniform = -1;
this.mPositionAttribute = -1;
this.mTexCoordAttribute = -1;
this.vertexShader = -1;
this.fragmentShader = -1;
this.mAmbient = 0.3f;
this.mDiffuse = 1.0f;
this.mSpecular = 1.0f;
this.mSpecularPower = 6.0f;
this.mModelMatrix = new float[16];
this.mModelViewMatrix = new float[16];
this.mModelViewProjectionMatrix = new float[16];
this.mViewLightDirection = new float[4];
}
有人可以指导我吗?
提前致谢。
答案 0 :(得分:3)
您需要将新对象名称及其纹理传递给onDrawFrame方法,因为它仅适用于GL线程。 另请注意,您使用的是GL版本2。
glSurfaceView.setEGLContextClientVersion(2);
@Override
public void onDrawFrame(GL10 gl) {
if (isObjChanged) {
isObjChanged = false;
try {
virtualObject.createOnGlThread(getContext(), objName, textureName);
virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
} catch (IOException e) {
e.printStackTrace();
}
return;
}
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = tapHelper.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (anchors.size() >= 1) {
anchors.get(0).detach();
anchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
anchors.add(hit.createAnchor());
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
// The first three components are color scaling factors.
// The last one is the average pixel intensity in gamma space.
final float[] colorCorrectionRgba = new float[4];
frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
pointCloudRenderer.update(pointCloud);
pointCloudRenderer.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbarHelper.isShowing()) {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
messageSnackbarHelper.hide(getActivity());
break;
}
}
}
// Visualize planes.
planeRenderer.drawPlanes(
session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
for (Anchor anchor : anchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
virtualObject.updateModelMatrix(anchorMatrix, scaleFactor);
virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
答案 1 :(得分:0)
在OpenGL中,ES定义了以下6个错误值Code:
1280 GL_INVALID_ENUM
1281 GL_INVALID_VALUE
1282 GL_INVALID_OPERATION
1283 GL_STACK_OVERFLOW
1284 GL_STACK_UNDERFLOW
1285 GL_OUT_OF_MEMORY
您收到 GL_INVALID_ENUM 错误,这意味着您将不受支持的枚举值传递给GL函数。