旋转矢量传感器反转"水平"回转

时间:2014-05-15 15:51:52

标签: android model rotation android-sensors

我做了一个小型测试应用程序,模拟在对象周围移动。我在移动设备上使用了JPCT-AE库和旋转矢量传感器。

我的问题是旋转的当前状态不能模拟正确地绕对象移动。轮换是相反的。

这是一个更清晰的问题图片:

enter image description here

在图片中,上半部分显示了用户从A点移动到B.下面的部分显示了应用程序如何模拟在对象周围移动。 "它是如何"屏幕显示当前如何旋转对象。

代码如下所示:

public class HelloWorld extends Activity {

private GLSurfaceView mGLSurfaceView;
private SensorManager mSensorManager;
private MyRenderer mRenderer;
Object3D object = null;
private World world = null;
private Light sun = null;
Context context = this;
private FrameBuffer fb = null;
private RGBColor back = new RGBColor(175, 175, 175);

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    // Get an instance of the SensorManager
    mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);

    mRenderer = new MyRenderer(context);
    mGLSurfaceView = new GLSurfaceView(this);
    mGLSurfaceView.setRenderer(mRenderer);
    setContentView(mGLSurfaceView);
}

@Override
protected void onResume() {
    super.onResume();
    mRenderer.start();
    mGLSurfaceView.onResume();
}

@Override
protected void onPause() {
    super.onPause();
    mRenderer.stop();
    mGLSurfaceView.onPause();
}

class MyRenderer implements GLSurfaceView.Renderer, SensorEventListener {
    private Sensor mRotationVectorSensor;
    private final float[] mRotationMatrix = new float[16];
    Context context;

    public MyRenderer(Context context) {
        // find the rotation-vector sensor
        this.context = context;
        mRotationVectorSensor = mSensorManager
                .getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);

        // initialize the rotation matrix to identity
        mRotationMatrix[0] = 1;
        mRotationMatrix[4] = 1;
        mRotationMatrix[8] = 1;
        mRotationMatrix[12] = 1;
    }

    public void start() {
        mSensorManager.registerListener(this, mRotationVectorSensor, 10000);
    }

    public void stop() {
        // make sure to turn our sensor off when the activity is paused
        mSensorManager.unregisterListener(this);
    }

    public void onSensorChanged(SensorEvent event) {
        if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
            SensorManager.getRotationMatrixFromVector(mRotationMatrix,
                    event.values);
            SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_X,  SensorManager.AXIS_MINUS_Y, mRotationMatrix);


        }
    }

    public void onDrawFrame(GL10 gl) {

        Matrix m = new Matrix();
        m.setDump(mRotationMatrix);

        object.setRotationMatrix(m);

        fb.clear(back);
        world.renderScene(fb);
        world.draw(fb);
        fb.display();
    }

    public void onSurfaceChanged(GL10 gl, int width, int height) {
        if (fb != null) {
            fb.dispose();
        }
        fb = new FrameBuffer(gl, width, height);
        world = new World();
        world.setAmbientLight(250, 250, 250);

        // set view-port
        gl.glViewport(0, 0, width, height);
        // set projection matrix
        float ratio = (float) width / height;
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
        try {               
            object = loadModel("untitled.obj", "untitled.mtl", 0.1F);                   
        } catch (UnsupportedEncodingException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }   

        object.build();
        world.addObject(object);

        Camera cam = world.getCamera();
        cam.moveCamera(Camera.CAMERA_MOVEOUT, 50);
        cam.lookAt(object.getTransformedCenter());
    }

    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // dither is enabled by default, we don't need it
        gl.glDisable(GL10.GL_DITHER);
        // clear screen in white
        gl.glClearColor(1, 1, 1, 1);
    }

    public void onAccuracyChanged(Sensor sensor, int accuracy) {
    }

    private Object3D loadModel(String filename, String mtlFileName,
            float scale) throws UnsupportedEncodingException {

        InputStream stream = null;
        InputStream mtlStream = null;
        try {

            stream = context.getAssets().open(filename);
            mtlStream = context.getAssets().open(mtlFileName);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }


        Object3D[] model = Loader.loadOBJ(stream, mtlStream, scale);

        return Object3D.mergeAll(model);

    }
}}

这里有人有这方面的经验吗?在开始对此进行完整数学研究之前,我想知道是否有一个简单的解决方案。

感谢您的时间!我真的很感激!

1 个答案:

答案 0 :(得分:0)

好的,我自己找到了一个解决方案。我不得不旋转提供的旋转矩阵,以便在适合我的问题的坐标系中表示。

我在我的问题中编辑了代码。该代码现在适用于我。

基本上我在onSensorChanged方法中添加了这行代码:

SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_X,  SensorManager.AXIS_MINUS_Y, mRotationMatrix);

我知道它不是最佳性能解决方案,因为我使用mRotationMatrix作为上述方法中的第一个和最后一个参数。但是现在我对性能问题并不感到困扰。

我希望将来可以帮助某人。