簡體   English   中英

如何在Android OpenGL-ES中為繪制的紋理設置onTouch Listener

[英]How to set onTouch Listener for drawn texture in Android OpenGL-ES

我的應用程序中有一個紋理,可以用手指拖動它,但是如何為該紋理設置onTouch Listener,當我在手機屏幕上觸摸任何地方時,該紋理移動到我觸摸的位置,如何使其僅在移動時移動?手指碰觸紋理?

任何指導將不勝感激〜

這是我的主要課程:

import android.app.Activity;
import android.os.Bundle;
import android.view.WindowManager;

public class MainActivity extends Activity {

private Stage stage;

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    //screen setting
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);

    setContentView(R.layout.main_layout);
    stage = (Stage)findViewById(R.id.my_stage);
}

@Override
protected void onPause() {
    super.onPause();
    stage.onPause();
}

@Override
protected void onResume() {
    super.onResume();
    stage.onResume();
}
}

這是舞台子類:

import android.content.Context;
import android.opengl.GLES10;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class Stage extends GLSurfaceView{

//Stage width and height
private float w, h;
//Texture position
private float xPos, yPos;
//Scale ratio
private float r, ratio, dist1, dist2;
//Screen width and height
private int screenWidth, screenHeight;
//Our native vertex buffer
private FloatBuffer vertexBuffer;
private Texture tex;
MyRenderer mRenderer;

@Override
public boolean onTouchEvent(MotionEvent event) {
    final int action = event.getAction() & MotionEvent.ACTION_MASK;
    float x, y, x1, x2, y1, y2;
    int pointerIndex;

    if(event.getPointerCount()==2){
        if (action == MotionEvent.ACTION_POINTER_UP) {
            x1 = event.getX(0);
            y1 = event.getY(0);
        } else {
            x1 = event.getX(0);
            y1 = event.getY(0);
        }
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist1 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        } else {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist2 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        }
        ratio = dist2/dist1;
        mRenderer.setRatio(ratio);
        requestRender();
    }
    if(event.getPointerCount()==1){
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
                x = event.getX();
                y = event.getY();
        } else {
            pointerIndex = event.getActionIndex();
            x = event.getX(pointerIndex);
            y = event.getY(pointerIndex);
        }
        mRenderer.setXY(x, y);
        requestRender();
    }
    return true;
}

public Stage(Context context, AttributeSet attrs) {
    super(context, attrs);
    setEGLConfigChooser(8, 8, 8, 8, 0, 0);
    mRenderer = new MyRenderer();
    setRenderer(mRenderer);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    float vertices[] = {
            -0.5f, -0.5f,  0.0f,  // 0. left-bottom
            0.5f, -0.5f,  0.0f,  // 1. right-bottom
            -0.5f,  0.5f,  0.0f,  // 2. left-top
            0.5f,  0.5f,  0.0f   // 3. right-top
    };

    ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
    vbb.order(ByteOrder.nativeOrder());
    vertexBuffer = vbb.asFloatBuffer();
    vertexBuffer.put(vertices);
    vertexBuffer.position(0);

    tex = new Texture(R.drawable.kdk);
}

private class MyRenderer implements GLSurfaceView.Renderer {

    private Object lock = new Object();
    public void setXY(float x, float y) {
        synchronized (lock) {
            xPos = x * w / screenWidth;
            yPos = y * h / screenHeight;
        }
    }

    public void setRatio(float scale){
        r = scale;
    }

    public final void onDrawFrame(GL10 gl) {
        gl.glClear(GLES10.GL_COLOR_BUFFER_BIT);
        tex.prepare(gl, GL10.GL_CLAMP_TO_EDGE);
        gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
        synchronized (lock) {
            tex.draw(gl, xPos, yPos, tex.getWidth()*r, tex.getHeight()*r, 0);
        }
    }

    public final void onSurfaceChanged(GL10 gl, int width, int height) {
        gl.glClearColor(0, 0, 0, 0);

        if(width > height) {
            h = 600;
            w = width * h / height;
        } else {
            w = 600;
            h = height * w / width;
        }
        screenWidth = width;
        screenHeight = height;

        xPos = w/2;
        yPos = h/2;
        r=1;

        gl.glViewport(0, 0, screenWidth, screenHeight);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glOrthof(0, w, h, 0, -1, 1);
        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
    }

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // Set up alpha blending
        gl.glEnable(GL10.GL_ALPHA_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);

        // We are in 2D. Why needs depth?
        gl.glDisable(GL10.GL_DEPTH_TEST);

        // Enable vertex arrays (we'll use them to draw primitives).
        gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

        // Enable texture coordination arrays.
        gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

        tex.load(getContext());
    }

}

}

這是紋理子類:

import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES10;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;

public class Texture {

/**
 * The OpenGL ES texture name associated with this texture.
 */
protected int textureId;

/**
 * The horizontal and vertical dimensions of the image.
 */
protected int width, height;

/**
 * The resource identifier for the image we want to load.
 */
int resourceId;

/**
 * Whether or not we should generate mip maps.
 */
boolean mipmaps;

/**
 * The buffer containing texture mappings.
 */
private FloatBuffer tempTextureBuffer = null;

Texture(int resourceId, boolean mipmaps) {
    this.resourceId = resourceId;
    this.textureId = -1;
    this.mipmaps = mipmaps;
}

Texture(int resourceId) {
    this(resourceId, false);
}

/**
 * Generates a new OpenGL ES texture name (identifier).
 * @return The newly generated texture name.
 */
private static final int newTextureID() {
    int[] temp = new int[1];
    GLES10.glGenTextures(1, temp, 0);
    return temp[0];
}

public final int getWidth() {
    return width;
}

public final int getHeight() {
    return height;
}

public final void load(Context context) {
    // Load the bitmap from resources.
    BitmapFactory.Options opts = new BitmapFactory.Options();
    opts.inScaled = false;
    Bitmap bmp = BitmapFactory.decodeResource(context.getResources(), resourceId, opts);

    // Update this texture instance's width and height.
    width = bmp.getWidth();
    height = bmp.getHeight();

    // Create and bind a new texture name.
    textureId = newTextureID();
    GLES10.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Load the texture into our texture name.
    GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);

    // Set magnification filter to bilinear interpolation.
    GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

    if(mipmaps) {
        // If mipmaps are requested, generate mipmaps and set minification filter to trilinear filtering.
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR_MIPMAP_LINEAR);
    }
    else GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);

    // Recycle the bitmap.
    bmp.recycle();

    // If texture mapping buffer has not been initialized yet, do it now.
    if(tempTextureBuffer == null)
        buildTextureMapping();
}

/**
 * Builds the texture mapping buffer.
 */
private void buildTextureMapping() {
    // The array of texture mapping coordinates.
    final float texture[] = {
            0, 0, // The first vertex
            1, 0, // The second vertex
            0, 1, // The third vertex
            1, 1, // The fourth vertex
    };

    // Create a native buffer out of the above array.
    final ByteBuffer ibb = ByteBuffer.allocateDirect(texture.length * 4);
    ibb.order(ByteOrder.nativeOrder());
    tempTextureBuffer = ibb.asFloatBuffer();
    tempTextureBuffer.put(texture);
    tempTextureBuffer.position(0);
}

/**
 * Deletes the texture name and marks this instance as unloaded.
 */
public final void destroy() {
    GLES10.glDeleteTextures(1, new int[] {textureId}, 0);

    // Setting this value to -1 indicates that it is unloaded.
    textureId = -1;
}

public final boolean isLoaded() {
    return textureId >= 0;
}

public final void prepare(GL10 gl, int wrap) {
    // Enable 2D texture
    gl.glEnable(GL10.GL_TEXTURE_2D);

    // Bind our texture name
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Set texture wrap methods
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, wrap);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, wrap);

    // Enable texture coordinate arrays and load (activate) ours
    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, tempTextureBuffer);
}

public final void draw(GL10 gl, float x, float y, float w, float h, float rot) {
    gl.glPushMatrix();
    gl.glTranslatef(x, y, 0);
    gl.glRotatef(rot, 0, 0, 1);
    gl.glScalef(w, h, 0); // Scaling will be performed first.
    gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
    gl.glPopMatrix();
}

}

您擁有計算所需的所有數據。 看來您在(w,h)左上角(0,0)和右下角使用坐標系。 觸摸坐標必須轉換為相同的系統,例如touchX*(w/screenWidth) ,與垂直坐標類似。

紋理的位置還定義有中心,靜態坐標和比例,這些比例應足以找到紋理頂點的實際位置。

現在考慮您具有點touch並且紋理邊框值分別為leftrightbottomtop

bool didHit = touch.x>=left && touch.x<=right && touch.y>=bottom && touch.y<=top;

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM