如何在Android OpenGL-ES中为绘制的纹理设置onTouch Listener

时间:2015-09-29 01:42:31

标签: android opengl-es

我的应用程序中有一个纹理,我可以用手指拖动它但是如何为纹理设置onTouch Listener,当我触摸手机屏幕的任何地方时,纹理移动到我触摸的位置,我该如何制作它只有当手指触摸纹理时才会移动?

任何指导都将受到赞赏〜

这是我的主要课程:

import android.app.Activity;
import android.os.Bundle;
import android.view.WindowManager;

public class MainActivity extends Activity {

private Stage stage;

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    //screen setting
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);

    setContentView(R.layout.main_layout);
    stage = (Stage)findViewById(R.id.my_stage);
}

@Override
protected void onPause() {
    super.onPause();
    stage.onPause();
}

@Override
protected void onResume() {
    super.onResume();
    stage.onResume();
}
}

这是阶段子类:

import android.content.Context;
import android.opengl.GLES10;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class Stage extends GLSurfaceView{

//Stage width and height
private float w, h;
//Texture position
private float xPos, yPos;
//Scale ratio
private float r, ratio, dist1, dist2;
//Screen width and height
private int screenWidth, screenHeight;
//Our native vertex buffer
private FloatBuffer vertexBuffer;
private Texture tex;
MyRenderer mRenderer;

@Override
public boolean onTouchEvent(MotionEvent event) {
    final int action = event.getAction() & MotionEvent.ACTION_MASK;
    float x, y, x1, x2, y1, y2;
    int pointerIndex;

    if(event.getPointerCount()==2){
        if (action == MotionEvent.ACTION_POINTER_UP) {
            x1 = event.getX(0);
            y1 = event.getY(0);
        } else {
            x1 = event.getX(0);
            y1 = event.getY(0);
        }
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist1 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        } else {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist2 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        }
        ratio = dist2/dist1;
        mRenderer.setRatio(ratio);
        requestRender();
    }
    if(event.getPointerCount()==1){
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
                x = event.getX();
                y = event.getY();
        } else {
            pointerIndex = event.getActionIndex();
            x = event.getX(pointerIndex);
            y = event.getY(pointerIndex);
        }
        mRenderer.setXY(x, y);
        requestRender();
    }
    return true;
}

public Stage(Context context, AttributeSet attrs) {
    super(context, attrs);
    setEGLConfigChooser(8, 8, 8, 8, 0, 0);
    mRenderer = new MyRenderer();
    setRenderer(mRenderer);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    float vertices[] = {
            -0.5f, -0.5f,  0.0f,  // 0. left-bottom
            0.5f, -0.5f,  0.0f,  // 1. right-bottom
            -0.5f,  0.5f,  0.0f,  // 2. left-top
            0.5f,  0.5f,  0.0f   // 3. right-top
    };

    ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
    vbb.order(ByteOrder.nativeOrder());
    vertexBuffer = vbb.asFloatBuffer();
    vertexBuffer.put(vertices);
    vertexBuffer.position(0);

    tex = new Texture(R.drawable.kdk);
}

private class MyRenderer implements GLSurfaceView.Renderer {

    private Object lock = new Object();
    public void setXY(float x, float y) {
        synchronized (lock) {
            xPos = x * w / screenWidth;
            yPos = y * h / screenHeight;
        }
    }

    public void setRatio(float scale){
        r = scale;
    }

    public final void onDrawFrame(GL10 gl) {
        gl.glClear(GLES10.GL_COLOR_BUFFER_BIT);
        tex.prepare(gl, GL10.GL_CLAMP_TO_EDGE);
        gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
        synchronized (lock) {
            tex.draw(gl, xPos, yPos, tex.getWidth()*r, tex.getHeight()*r, 0);
        }
    }

    public final void onSurfaceChanged(GL10 gl, int width, int height) {
        gl.glClearColor(0, 0, 0, 0);

        if(width > height) {
            h = 600;
            w = width * h / height;
        } else {
            w = 600;
            h = height * w / width;
        }
        screenWidth = width;
        screenHeight = height;

        xPos = w/2;
        yPos = h/2;
        r=1;

        gl.glViewport(0, 0, screenWidth, screenHeight);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glOrthof(0, w, h, 0, -1, 1);
        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
    }

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // Set up alpha blending
        gl.glEnable(GL10.GL_ALPHA_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);

        // We are in 2D. Why needs depth?
        gl.glDisable(GL10.GL_DEPTH_TEST);

        // Enable vertex arrays (we'll use them to draw primitives).
        gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

        // Enable texture coordination arrays.
        gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

        tex.load(getContext());
    }

}

}

这是纹理子类:

import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES10;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;

public class Texture {

/**
 * The OpenGL ES texture name associated with this texture.
 */
protected int textureId;

/**
 * The horizontal and vertical dimensions of the image.
 */
protected int width, height;

/**
 * The resource identifier for the image we want to load.
 */
int resourceId;

/**
 * Whether or not we should generate mip maps.
 */
boolean mipmaps;

/**
 * The buffer containing texture mappings.
 */
private FloatBuffer tempTextureBuffer = null;

Texture(int resourceId, boolean mipmaps) {
    this.resourceId = resourceId;
    this.textureId = -1;
    this.mipmaps = mipmaps;
}

Texture(int resourceId) {
    this(resourceId, false);
}

/**
 * Generates a new OpenGL ES texture name (identifier).
 * @return The newly generated texture name.
 */
private static final int newTextureID() {
    int[] temp = new int[1];
    GLES10.glGenTextures(1, temp, 0);
    return temp[0];
}

public final int getWidth() {
    return width;
}

public final int getHeight() {
    return height;
}

public final void load(Context context) {
    // Load the bitmap from resources.
    BitmapFactory.Options opts = new BitmapFactory.Options();
    opts.inScaled = false;
    Bitmap bmp = BitmapFactory.decodeResource(context.getResources(), resourceId, opts);

    // Update this texture instance's width and height.
    width = bmp.getWidth();
    height = bmp.getHeight();

    // Create and bind a new texture name.
    textureId = newTextureID();
    GLES10.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Load the texture into our texture name.
    GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);

    // Set magnification filter to bilinear interpolation.
    GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

    if(mipmaps) {
        // If mipmaps are requested, generate mipmaps and set minification filter to trilinear filtering.
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR_MIPMAP_LINEAR);
    }
    else GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);

    // Recycle the bitmap.
    bmp.recycle();

    // If texture mapping buffer has not been initialized yet, do it now.
    if(tempTextureBuffer == null)
        buildTextureMapping();
}

/**
 * Builds the texture mapping buffer.
 */
private void buildTextureMapping() {
    // The array of texture mapping coordinates.
    final float texture[] = {
            0, 0, // The first vertex
            1, 0, // The second vertex
            0, 1, // The third vertex
            1, 1, // The fourth vertex
    };

    // Create a native buffer out of the above array.
    final ByteBuffer ibb = ByteBuffer.allocateDirect(texture.length * 4);
    ibb.order(ByteOrder.nativeOrder());
    tempTextureBuffer = ibb.asFloatBuffer();
    tempTextureBuffer.put(texture);
    tempTextureBuffer.position(0);
}

/**
 * Deletes the texture name and marks this instance as unloaded.
 */
public final void destroy() {
    GLES10.glDeleteTextures(1, new int[] {textureId}, 0);

    // Setting this value to -1 indicates that it is unloaded.
    textureId = -1;
}

public final boolean isLoaded() {
    return textureId >= 0;
}

public final void prepare(GL10 gl, int wrap) {
    // Enable 2D texture
    gl.glEnable(GL10.GL_TEXTURE_2D);

    // Bind our texture name
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Set texture wrap methods
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, wrap);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, wrap);

    // Enable texture coordinate arrays and load (activate) ours
    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, tempTextureBuffer);
}

public final void draw(GL10 gl, float x, float y, float w, float h, float rot) {
    gl.glPushMatrix();
    gl.glTranslatef(x, y, 0);
    gl.glRotatef(rot, 0, 0, 1);
    gl.glScalef(w, h, 0); // Scaling will be performed first.
    gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
    gl.glPopMatrix();
}

}

1 个答案:

答案 0 :(得分:0)

您拥有计算所需的所有数据。您似乎使用左上角(0,0)和右下角(w,h)的坐标系。必须将触摸坐标转换为相同的系统,例如touchX*(w/screenWidth),类似于垂直坐标。

纹理的位置也用中心,静态坐标和比例定义,这应该足以找到纹理顶点的实际位置。

现在考虑您的点touch和纹理边框值为leftrightbottomtop

bool didHit = touch.x>=left && touch.x<=right && touch.y>=bottom && touch.y<=top;