3D对象旋转y轴时缩放

时间:2018-04-20 05:41:50

标签: android opengl-es-2.0 gesture

我是OpenGL和ARCore的新手,我使用GoogleARCore Sample作为创建应用程序的基础。我使用的是OpenGL-ES-2.0版本。我可以使用android.view.ScaleGestureDetector.SimpleOnScaleGestureListener进行双指缩放(2指)。通过使用这个旋转Rotation Gesture的库类,我可以获得旋转度,并且它可以很好地处理我的3D对象。

在旋转我的3D对象时,我的对象也会缩放。我想在用户进行旋转时停止缩放。我怎样才能做到这一点?或者我如何以不同的方法传递我的缩放和旋转来更新它们各自的矩阵?我不想为此使用任何3D派对库。

请帮我解决这个问题。下面是我的代码,并建议我在哪里做错了什么。

ScaleGesture

private class CustomScaleGesture extends ScaleGestureDetector.SimpleOnScaleGestureListener {
        @Override
        public boolean onScale(ScaleGestureDetector detector) {
            DebugHelper.log("detector.getScaleFactor(): " + detector.getScaleFactor() + " scaleFactor = " + scaleFactor);
            scaleFactor *= detector.getScaleFactor();
            DebugHelper.log("final scaleFactor: " + scaleFactor);
            return true;
        }
    }

RotationGesture

private class RotateListener extends RotateGestureDetector.SimpleOnRotateGestureListener {
        @Override
        public boolean onRotate(RotateGestureDetector detector) {
            DebugHelper.log("RotateListener called..");
            mRotationDegrees -= detector.getRotationDegreesDelta();
            DebugHelper.log("RotateListener: " + mRotationDegrees);
            return true;
        }
    }

MainActivity

public class MyARActivity extends BaseActivity<MyActivityArBinding> implements GLSurfaceView.Renderer {


    //AR Variables
    private int mWidth;
    private int mHeight;
    private boolean capturePicture = false;
    private boolean installRequested;
    private boolean moving;
    float[] projmtx = new float[16];
    float[] viewmtx = new float[16];
    private Session session;

    private Snackbar messageSnackbar;
    private DisplayRotationHelper displayRotationHelper;
    private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
    private ObjectRenderer virtualObject;
    private ObjectRenderer virtualObjectShadow;
    private final PlaneRenderer planeRenderer = new PlaneRenderer();
    private PointCloudRenderer pointCloud = new PointCloudRenderer();

    // Temporary matrix allocated here to reduce number of allocations for each frame.
    private float[] anchorMatrix = new float[16];
    // Tap handling and UI.
    private ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
    private ArrayList<Anchor> anchors = new ArrayList<>();

    //load and manipulate obj
    private SQLiteHelper sqlHelper;
    private boolean isObjectChanged = false;
    private String objectPath;
    private List<CharacterModel> characterModelList = new ArrayList<>();
    private boolean isFirstTimeLoad = true;
    //Gestures
    private float mRotationDegrees = 0.f;
    private RotateGestureDetector mRotateDetector;
    private float scaleFactor = 1.0f;
    private ScaleGestureDetector scaleDetector;
    private GestureDetector gestureDetector;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setHeaderVisible(false);
        doDefaults();
    }

    private void doDefaults() {
        binding.setPresenter(this);
        sqlHelper = SQLiteHelper.getInstance(this);
        load3DCharacters();
        initAR();
    }

    @SuppressLint("ClickableViewAccessibility")
    private void initAR() {
        displayRotationHelper = new DisplayRotationHelper(this);
        scaleDetector = new ScaleGestureDetector(this, new CustomScaleGesture());
        mRotateDetector = new RotateGestureDetector(getApplicationContext(), new RotateListener());

        // Set up tap listener.
        gestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
            @Override
            public boolean onSingleTapUp(MotionEvent e) {
                if (anchors.size() <= 0) {
                    onSingleTap(e);
                }
                return true;
            }

            @Override
            public boolean onDown(MotionEvent e) {
                return true;
            }
        });

        binding.surfaceView.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                DebugHelper.log("binding.surfaceView.setOnTouchListener called..");
                mRotateDetector.onTouchEvent(event);
                scaleDetector.onTouchEvent(event);
                switch (event.getAction()) {
                    case MotionEvent.ACTION_DOWN:
                        moving = true;
                        DebugHelper.log("ACTION_DOWN");
                        break;

                    case MotionEvent.ACTION_UP:
                        DebugHelper.log("ACTION_UP");
                        moving = false;
                        break;
                    case MotionEvent.ACTION_MOVE:
                        DebugHelper.log("ACTION_MOVE");
                        if (anchors.size() > 0) {
                            onSecondTouch(event);
                        }
                        break;
                }
                return gestureDetector.onTouchEvent(event);

            }
        });

        // Set up renderer.
        binding.surfaceView.setPreserveEGLContextOnPause(true);
        binding.surfaceView.setEGLContextClientVersion(2);
        binding.surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
        binding.surfaceView.setRenderer(this);
        binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
        installRequested = false;
    }
    private void onSecondTouch(MotionEvent e) {
        Log.e("Second Touch", "Executed");
        if (e.getPointerCount() > 1) {
            scaleDetector.onTouchEvent(e);
        } else {
            queuedSingleTaps.offer(e);
        }
    }
private void onSingleTap(MotionEvent e) {
        // Queue tap if there is space. Tap is lost if queue is full.
        DebugHelper.log("onSingleTap()");
        queuedSingleTaps.offer(e);
    }

    private void load3DCharacters() {
        CharacterModel model = new CharacterModel();
        model.setName("Cat");
        model.setObjectPath("cat/cat.obj");
        model.setScaleFactor(0.25f);
        model.setResourceId(R.drawable.cat);
        characterModelList.add(model);

        model = new CharacterModel();
        model.setName("Old Man");
        model.setObjectPath("man/muro.obj");
        model.setScaleFactor(0.0085f);
        model.setResourceId(R.drawable.old_man);
        characterModelList.add(model);


        model = new CharacterModel();
        model.setName("Bloodwing");
        model.setObjectPath("bloodwing/bloodwing.obj");
        model.setScaleFactor(0.0009f);
        model.setResourceId(R.drawable.bat);
        characterModelList.add(model);
    }

    private void loadObject(CharacterModel model) {
        try {
            this.objectPath = model.getObjectPath();
            this.scaleFactor = model.getScaleFactor();
            if (virtualObject == null) {
                virtualObject = new ObjectRenderer(objectPath);
                virtualObject.createOnGlThread(this);
                virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
            } else {
                // Clear screen to notify driver it should not load any pixels from previous frame.
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
                isObjectChanged = true;
                virtualObject.updateObjectPath(model.getObjectPath());
            }
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }


@Override
        public void onDrawFrame(GL10 gl) {
        if (isObjectChanged) {
            isObjectChanged = false;
            try {
                virtualObject.createOnGlThread(this);
                virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
            } catch (IOException e) {
                e.printStackTrace();
            }
            return;
        }

        // Clear screen to notify driver it should not load any pixels from previous frame.
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

        if (session == null) {
            return;
        }
        // Notify ARCore session that the view size changed so that the perspective matrix and
        // the video background can be properly adjusted.
        displayRotationHelper.updateSessionIfNeeded(session);

        try {
            session.setCameraTextureName(backgroundRenderer.getTextureId());

            // Obtain the current frame from ARSession. When the configuration is set to
            // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
            // camera framerate.
            Frame frame = session.update();
            Camera camera = frame.getCamera();

            // Handle taps. Handling only one tap per frame, as taps are usually low frequency
            // compared to frame rate.
            MotionEvent tap = queuedSingleTaps.poll();
            if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
                for (HitResult hit : frame.hitTest(tap)) {
                    // Check if any plane was hit, and if it was hit inside the plane polygon
                    Trackable trackable = hit.getTrackable();
                    // Creates an anchor if a plane or an oriented point was hit.
                    if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) || (trackable instanceof Point && ((Point) trackable).getOrientationMode() == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
                        // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
                        // Cap the number of objects created. This avoids overloading both the
                        // rendering system and ARCore.
                        //if (!isUpdate) {
                        DebugHelper.log("Anchor size = " + anchors.size());
                        if (anchors.size() >= 1) {
                            anchors.get(0).detach();
                            anchors.remove(0);
                        }
                        // Adding an Anchor tells ARCore that it should track this position in
                        // space. This anchor is created on the Plane to place the 3D model
                        // in the correct position relative both to the world and to the plane.
                        if (anchors.size() > 0) {
                            DebugHelper.log("anchor list has data");
                            for (Anchor anchor : anchors) {
                                anchor.detach();
                                anchors.remove(anchor);
                            }
                        }
                        Anchor anchor = hit.createAnchor();
                        if (anchor != null)
                            anchors.add(anchor);
                        else
                            DebugHelper.log("anchor is null");
                        //}
                        break;
                    }
                }
            }

            // Draw background.
            backgroundRenderer.draw(frame);

            // If not tracking, don't draw 3d objects.
            if (camera.getTrackingState() == TrackingState.PAUSED) {
                return;
            }

            // Get projection matrix.
            camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

            // Get camera matrix and draw.
            camera.getViewMatrix(viewmtx, 0);

            // Compute lighting from average intensity of the image.
            final float lightIntensity = frame.getLightEstimate().getPixelIntensity();

            // Visualize tracked points.
            PointCloud pointCloud = frame.acquirePointCloud();
            this.pointCloud.update(pointCloud);
            if (!capturePicture)
                this.pointCloud.draw(viewmtx, projmtx);

            // Application is responsible for releasing the point cloud resources after
            // using it.
            pointCloud.release();

            // Check if we detected at least one plane. If so, hide the loading message.
            if (messageSnackbar != null) {
                {
                    for (Plane plane : session.getAllTrackables(Plane.class)) {
                        if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING
                                && plane.getTrackingState() == TrackingState.TRACKING) {
                            hideLoadingMessage();
                            break;
                        }
                    }
                }
                for (Plane plane : session.getAllTrackables(Plane.class)) {
                    if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING && plane.getTrackingState() == TrackingState.TRACKING) {
                        hideLoadingMessage();
                        break;
                    }
                }
            }
            // Visualize planes.
            if (!capturePicture)
                planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);

            // Visualize anchors created by touch.
            for (Anchor anchor : anchors) {
                if (anchor.getTrackingState() != TrackingState.TRACKING) {
                    continue;
                }
                // Get the current pose of an Anchor in world space. The Anchor pose is updated
                // during calls to session.update() as ARCore refines its estimate of the world.
                anchor.getPose().toMatrix(anchorMatrix, 0);

                // Update and draw the model and its shadow.
                if (virtualObject != null) {
//passing my scaleFector and rotationDegree to update my matrix.
                    virtualObject.updateModelMatrix(anchorMatrix, scaleFactor, mRotationDegrees);
                    if (viewmtx != null && projmtx != null) {
                        virtualObject.draw(viewmtx, projmtx, lightIntensity);
                    }
                }

            }

            if (capturePicture) {
                capturePicture = false;
                onSavePicture();
            }

        } catch (Throwable t) {
            Log.e(TAG, "Exception on the OpenGL thread", t);
        }
    }

ObjectRenderer

 public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity) {
        try {
            ShaderUtil.checkGLError(TAG, "Before draw");
Matrix.multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
            Matrix.multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
            Matrix.setRotateM(mRotationMatrix, 0, MyARActivity.rotationDegrees, 0.0f, 1.0f, 0.0f); //rotation degree pass to matrix
            Matrix.multiplyMM(mFinalModelViewProjectionMatrix, 0, mModelViewProjectionMatrix, 0, mRotationMatrix, 0);
            GLES20.glUseProgram(mProgram);

            // Set the lighting environment properties.
            Matrix.multiplyMV(mViewLightDirection, 0, mModelViewMatrix, 0, LIGHT_DIRECTION, 0);
            normalizeVec3(mViewLightDirection);
            GLES20.glUniform4f(mLightingParametersUniform, mViewLightDirection[0], mViewLightDirection[1], mViewLightDirection[2], lightIntensity);

            // Set the object material properties.
            GLES20.glUniform4f(mMaterialParametersUniform, mAmbient, mDiffuse, mSpecular, mSpecularPower);

            // Set the ModelViewProjection matrix in the shader.
            GLES20.glUniformMatrix4fv(mModelViewUniform, 1, false, mModelViewMatrix, 0);
            GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, mModelViewProjectionMatrix, 0);
            GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, mFinalModelViewProjectionMatrix, 0);

            if (mBlendMode != null) {
                GLES20.glDepthMask(false);
                GLES20.glEnable(GLES20.GL_BLEND);
                switch (mBlendMode) {
                    case Shadow:
                        // Multiplicative blending function for Shadow.
                        GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA);
                        break;
                    case Grid:
                        // Grid, additive blending function.
                        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
                        break;
                }
            }

            if (mObj != null && mObj.getNumMaterialGroups() > 0) {
                //Start drawing data from each VAO
                for (int i = 0; i < mObj.getNumMaterialGroups(); i++) {
                    // Attach the object texture.
                    GLES20.glUniform1i(mTextureUniform, 0);
                    GLES20.glBindTexture(GL_TEXTURE_2D, mTextures[i]);

                    GLES30.glBindVertexArray(vectorArrayObjectIds[i]);
                    GLES20.glDrawElements(GLES20.GL_TRIANGLES, mObj.getMaterialGroup(i).getNumFaces() * 3,
                            GLES20.GL_UNSIGNED_SHORT, 0);
                    GLES30.glBindVertexArray(0);

                    //Unbind texture
                    GLES20.glBindTexture(GL_TEXTURE_2D, 0);
                }
            }

            if (mBlendMode != null) {
                GLES20.glDisable(GLES20.GL_BLEND);
                GLES20.glDepthMask(true);
            }

            ShaderUtil.checkGLError(TAG, "After draw");
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }

0 个答案:

没有答案