我正在尝试通过相交射线测试实现基于触摸坐标的对象拾取。我无法找到有关将触摸坐标转换为世界上使用的坐标系以构建此射线的信息。
到目前为止,我的理解是应用于场景中每个顶点的矩阵是:
projectionMatrix * viewMatrix * modelMatrix
这是我反转该过程的过程,以尝试在场景中找到光线的端点以及我的绘图循环,以防我只是错误地应用了不同的矩阵:
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2 * touchX/windowWidth - 1;
float normalizedY = 1 - 2*touchY/windowHeight;
float[] unviewMatrix = new float[16];
float[] viewMatrix = new float[16];
Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);
float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
float[] modelviewInverse = new float[16];
Matrix.invertM(modelviewInverse, 0, modelView, 0);
float[] cameraPos = new float[4];
cameraPos[0] = modelviewInverse[12];
cameraPos[1] = modelviewInverse[13];
cameraPos[2] = modelviewInverse[14];
cameraPos[3] = modelviewInverse[15];
rayDirection[0] = nearPoint[0] - cameraPos[0];
rayDirection[1] = nearPoint[1] - cameraPos[1];
rayDirection[2] = nearPoint[2] - cameraPos[2];
rayDirection[3] = nearPoint[3] - cameraPos[3];
return rayDirection;
}
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
@Override
public void onDrawFrame(GL10 gl10) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
//Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(triangleVertices);
//Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
//Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
/*
Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
*/
}
private void drawTriangle(final FloatBuffer triangleBuffer)
{
triangleBuffer.position(positionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
triangleBuffer.position(colorOffset);
GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);
mMVMatrix = mMVPMatrix;
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
//Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
}
private void drawIntersectionLine()
{
lineVertices.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
private void moveIntersectionLineEndPoint(float[] lineEndPoint)
{
this.lineEndPoint = lineEndPoint;
float[] lineVerticesData = {
lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
};
lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
lineVertices.put(lineVerticesData).position(0);
}
虽然我很确定我的 4x4 矩阵乘 4d 向量乘法方法是正确的,但为了以防万一,这也是该方法:
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
这个测试应用程序的目标是从几个不同的角度显示场景,以便我可以根据我的代码查看相交线的外观。我想画一条线,从相机的原点开始,到交叉点结束,但它的行为很奇怪。端点似乎在正方向上沿 x 轴被推得比应有的更远,在某些地方,它似乎有点……跳过,好像那个位置有一个洞或什么的。虽然我仍然记得微积分中的一些线性代数,但我记不太清,无法确切知道我在这里做什么,而且我在网上搜索了许多资源,但没有运气。我希望读到这篇文章的人会比我有更多的经验来处理这个问题,并且会很友善地帮助我,或者如果有任何提示,请给我任何提示
变量参考:矩阵都是长度为 16 的浮点数组
mProjectionMatrix = projection matrix
mModelMatrix = model matrix
mMVPMatrix = projection * modelview matrix
mMVMatrix = modelview matrix
private final FloatBuffer triangleVertices;
private FloatBuffer lineVertices;
private final int bytesPerFloat = 4;
private float[] viewMatrix = new float[16];
private static Context context;
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mColorHandle;
private float[] mProjectionMatrix = new float[16];
private float[] mModelMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private float[] mMVMatrix = new float[16];
private final int strideBytes = 7 * bytesPerFloat;
private final int lineStrideBytes = 3 * bytesPerFloat;
private final int positionOffset = 0;
private final int positionDataSize = 3;
private final int colorOffset = 3;
private final int colorDataSize = 4;
private float width, height;
private float[] lineStartPoint = new float[]{0, 0, 1.5f};
private float[] lineEndPoint = new float[]{0, 0, 0};