Android中的Vuforia和Jpct-ae不会将3D对象附加到标记

时间:2018-11-21 08:31:23

标签: android augmented-reality vuforia jpct

我尝试使用Jpct和Vuforia将3D对象放置在标记中,对象渲染成功但未固定在标记中。我遵循本教程— Integrating Vuforia and Jpct-ae in Android

我有用于渲染3D模型的代码,但是它不能按我预期的那样工作,因为当带摄像头的手机移动时3D模型会移动,但是我将cam.setFOV(fov),cam.setYFOV(fovy)设置为可以在代码中看到,但这并不能帮助我解决问题。为了更加清晰,您可以查看屏幕截图。

下面有关工作代码的图片

enter image description here

 public class ImageTargetRenderJpctExample implements GLSurfaceView.Renderer, SampleAppRendererControl{
    private static final String LOGTAG = "ImageTargetRenderer";

private SampleApplicationSession vuforiaAppSession;
private ImageTargets mActivity;
private SampleAppRenderer mSampleAppRenderer;

private boolean mIsActive = false;

private World world = null;
private Light sun = null;
public Object3D obj;
public Texture texture;
private FrameBuffer fb = null;
private GL10 lastGl = null;
private Camera cam;
private float[] modelViewMat;
private float fov;
private float fovy;

public ImageTargetRenderJpctExample(ImageTargets activity, SampleApplicationSession session)
{
    mActivity = activity;
    vuforiaAppSession = session;

    mSampleAppRenderer = new SampleAppRenderer(
            this, mActivity, Device.MODE.MODE_AR, false, -1.0f, 5);


    world = new World();
    world.setAmbientLight(200, 200, 200);

    sun = new Light(world);
    sun.setIntensity(250, 250, 250);

    Object3D[] object3Darray = new Object3D[0];
    try {
        object3Darray = Loader.load3DS(mActivity.getAssets().open("Audi_S3.3DS"),0.10f);
    } catch (IOException e) {
        e.printStackTrace();
    }

    obj = Object3D.mergeAll(object3Darray);
    obj.strip();
    obj.build();

    world.addObject(obj);

    cam = world.getCamera();
    obj.translate(0,0, 1.5f);

    SimpleVector sv = new SimpleVector();
    sv.set(obj.getTransformedCenter());
    sv.y -= 100;
    sv.z -= 100;
    sun.setPosition(sv);
    MemoryHelper.compact();
}

@Override
public void onDrawFrame(GL10 gl)
{
    if (!mIsActive)
        return;

    //obj.rotateZ(0.025f);

    mSampleAppRenderer.render();

    updateCamera();
    world.renderScene(fb);
    world.draw(fb);
    fb.display();
}


public void setActive(boolean active)
{
    mIsActive = active;

    if(mIsActive)
        mSampleAppRenderer.configureVideoBackground();
}


@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
    vuforiaAppSession.onSurfaceCreated();
    mSampleAppRenderer.onSurfaceCreated();
}

@Override
public void onSurfaceChanged(GL10 gl, int w, int h) {
    vuforiaAppSession.onSurfaceChanged(w, h);

    mSampleAppRenderer.onConfigurationChanged(mIsActive);
    initRendering();

    if (lastGl != gl) {
        if (fb != null) {
            fb.dispose();
        }
        fb = new FrameBuffer(w, h);
        Config.viewportOffsetAffectsRenderTarget = true;

        fb.setVirtualDimensions(fb.getWidth(), fb.getHeight());
        lastGl = gl;
    } else {
        fb.resize(w, h);
        fb.setVirtualDimensions(w, h);
    }


}

private void initRendering()
{
    GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);

    mActivity.loadingDialogHandler
            .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}

public void updateConfiguration()
{
    mSampleAppRenderer.onConfigurationChanged(mIsActive);
    CameraCalibration camCalibration = com.vuforia.CameraDevice.getInstance().getCameraCalibration();
    Vec2F size = camCalibration.getSize();
    Vec2F focalLength = camCalibration.getFocalLength();
    float fovyRadians = (float) (2 * Math.atan(0.5f * size.getData()[1] / focalLength.getData()[1]));
    float fovRadians = (float) (2 * Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]));

    if (mSampleAppRenderer.mIsPortrait) {
        setFovy(fovRadians);
        setFov(fovyRadians);
    } else {
        setFov(fovRadians);
        setFovy(fovyRadians);
    }

    Timber.d("Updated Config called");
}

public void renderFrame(State state, float[] projectionMatrix)
{
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    mSampleAppRenderer.renderVideoBackground(state);

    float[] modelviewArray = new float[16];

    for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
        TrackableResult result = state.getTrackableResult(tIdx);
        Trackable trackable = result.getTrackable();
        printUserData(trackable);

        Matrix44F modelViewMatrix = Tool.convertPose2GLMatrix(result.getPose());
        Matrix44F inverseMV = SampleMath.Matrix44FInverse(modelViewMatrix);
        Matrix44F invTranspMV = SampleMath.Matrix44FTranspose(inverseMV);

        modelviewArray = invTranspMV.getData();
        updateModelviewMatrix(modelviewArray);

    }

    if (state.getNumTrackableResults() == 0) {
        float m [] = {
                1,0,0,0,
                0,1,0,0,
                0,0,1,0,
                0,0,-10000,1
        };
        modelviewArray = m;
        updateModelviewMatrix(modelviewArray);
    }

    Renderer.getInstance().end();
}

private void updateModelviewMatrix(float mat[]) {
    modelViewMat = mat;
}

private void printUserData(Trackable trackable)
{
    String userData = (String) trackable.getUserData();
}


private void updateCamera() {
    if (modelViewMat != null) {
        float[] m = modelViewMat;

        /*Matrix mat = new Matrix();
        mat.setDump(modelViewMat);
        cam.setBack(mat);*/

        final SimpleVector camUp;
        if (mSampleAppRenderer.mIsPortrait) {
            camUp = new SimpleVector(-m[0], -m[1], -m[2]);
        } else {
            camUp = new SimpleVector(-m[4], -m[5], -m[6]);
        }

        final SimpleVector camDirection = new SimpleVector(m[8], m[9], m[10]);
        final SimpleVector camPosition = new SimpleVector(m[12], m[13], m[14]);

        cam.setOrientation(camDirection, camUp);
        cam.setPosition(camPosition);

        cam.setFOV(fov);
        cam.setYFOV(fovy);
    }
}

private void setFov(float fov) {
    this.fov = fov;
}

private void setFovy(float fovy) {
    this.fovy = fovy;
}}

0 个答案:

没有答案