我创建了一个基于 Vuforia 平台的增强现实应用程序。我正在修改它,以便如果目标丢失,系统将使用目标的最后一个已知位置,以及来自 CoreMotion 的设备方向数据,以将对象保持在正确的位置。
最后一点我需要帮助 - 集成 CoreMotion 数据。我认为最好的方法是根据陀螺输入旋转虚拟相机,但我不是 OpenGL ES 专家。有人可以阐明执行此操作的最佳方法吗?我知道如何获取设备方向数据,这是我需要一些指导的 OpenGL 和矩阵代数。
我的 renderFrame 方法如下。
-(void)renderFrameQCAR {
[self setFramebuffer];
// Clear colour and depth buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Render video background and retrieve tracking state
QCAR::State state = QCAR::Renderer::getInstance().begin();
QCAR::Renderer::getInstance().drawVideoBackground();
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
// Check if any trackables are visible.
int numberOfTrackables = state.getNumActiveTrackables();
QCAR::Matrix44F modelViewMatrix;
// Skip rendering if there is nothing to render.
if (numberOfTrackables > 0 || hasPickedUpTrackablePreviously == YES) {
// If there are none, but one was picked up in the past, use the last pose matrix.
if (numberOfTrackables == 0 && hasPickedUpTrackablePreviously == YES) {
modelViewMatrix = trackablePoseMatrix;
}
else {
// Get the trackable
const QCAR::Trackable* trackable = state.getActiveTrackable(0);
modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackable->getPose());
// Store these variables for use later.
trackablePoseMatrix = modelViewMatrix;
hasPickedUpTrackablePreviously = YES;
}
// Fetch the 3D object to render.
Object3D *obj3D;
if (currentlyChangingTextures == YES || useDummyModel == YES) {
obj3D = dummyObject;
}
else {
obj3D = [objects3D objectAtIndex:0];
}
// Render using the appropriate version of OpenGL
// OpenGL 2
QCAR::Matrix44F modelViewProjection;
// Apply usual transformations here
ShaderUtils::translatePoseMatrix(sideToSideFloat, forwardBackFloat, 0.0f, &modelViewMatrix.data[0]);
ShaderUtils::scalePoseMatrix(kObjectScale * sizeFloat, kObjectScale * sizeFloat, kObjectScale * sizeFloat, &modelViewMatrix.data[0]);
ShaderUtils::rotatePoseMatrix(0.0f + rotationAngleFloat, 0.0f, 0.0f, 1.0f, &modelViewMatrix.data[0]);
// Apply our translation vector here based on gesture info from the buttonOverlayViewController
QCAR::Vec3F translationFromWorldPerspective = SampleMath::Vec3FTransformNormal(translationVectorFromCamerasPerspective, inverseModelViewMatrix);
translationFromWorldPerspective = SampleMath::Vec3FNormalize(translationFromWorldPerspective);
theTranslation.data[0] = theTranslation.data[0] + speed*translationFromWorldPerspective.data[0];
theTranslation.data[1] = theTranslation.data[1] + speed*translationFromWorldPerspective.data[1];
theTranslation.data[2] = 0.0f;
ShaderUtils::translatePoseMatrix(theTranslation.data[0], theTranslation.data[1], theTranslation.data[2], &modelViewMatrix.data[0]);
// Update inverseModelViewMatrix
inverseModelViewMatrix = SampleMath::Matrix44FInverse(modelViewMatrix);
// Multiply modelview and projection matrix as usual
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.vertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.normals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)obj3D.texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, [obj3D.texture textureID]);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (const GLfloat*)&modelViewProjection.data[0]);
glDrawArrays(GL_TRIANGLES, 0, obj3D.numVertices);
ShaderUtils::checkGlError("EAGLView renderFrameQCAR");
}
// Disable these things.
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
QCAR::Renderer::getInstance().end();
[self presentFramebuffer];
}
谢谢!!