JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_Dominoes_DominoesRenderer_renderFrame(JNIEnv* , jobject) { // Clear the color and depth buffers glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); // to no to hide std::vector<const char*> found; // Did we find any trackables this frame? if (state.getNumTrackableResults() > 0) { for(int tIdx = 0; tIdx < state.getNumTrackableResults(); ++tIdx) { // Get the first trackable const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = trackableResult->getTrackable(); found.push_back(trackable.getName()); // The image target specific result: assert(trackableResult->getType() == QCAR::TrackableResult::IMAGE_TARGET_RESULT); const QCAR::ImageTargetResult* imageTargetResult = static_cast<const QCAR::ImageTargetResult*>(trackableResult); // If this is our first time seeing the target, display a tip if (!displayedMessage) { displayMessage("Find marker man!"); displayedMessage = true; } //const QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); //const QCAR::Tracker* tracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER); const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration(); QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, trackableResult->getPose(), QCAR::Vec3F(0,0,0)); QCAR::Vec2F xyPoint = cameraPointToScreenPoint(cameraPoint); showTrackerButton(xyPoint.data[0], xyPoint.data[1], trackable.getName()); } } else { hideTrackerButton(found); } QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject) { //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); #ifdef USE_OPENGL_ES_1_1 // Set GL11 flags: glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_NORMAL_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnable(GL_TEXTURE_2D); glDisable(GL_LIGHTING); #endif glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration(); QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, result->getPose(), QCAR::Vec3F(0,0,0)); QCAR::Vec2F xyPoint = cameraPointToScreenPoint(cameraPoint); // LOG("xyPoint %f, %f ", xyPoint.data[0], xyPoint.data[1] ); if (xyPoint.data[1] > (screenHeight / 2) + tolerancy || xyPoint.data[1] < (screenHeight / 2) - tolerancy) { continue; } // Choose the texture based on the target name: int textureIndex = 0; // LOG("texture = %s", trackable->getName()); // LOG("textureCount %d", textureCount); char trackJpg[strlen(trackable.getName()) + 4]; strcpy(trackJpg, trackable.getName()); strcat(trackJpg, ".jpg"); // LOG("trackJpg %s", trackJpg); char trackPng[strlen(trackable.getName()) + 4]; strcpy(trackPng, trackable.getName()); strcat(trackPng, ".png"); // LOG("trackPng %s", trackPng); for(int i = 0; i < textureCount; i++) { // LOG("textures[i]->mName %s", textures[i]->mName); if (strcmp(textures[i]->mName, trackPng) == 0 || strcmp(textures[i]->mName, trackJpg) == 0) { textureIndex = i; } } const Texture* const thisTexture = textures[textureIndex]; // LOG("thisTexture->mName %s", textures[textureIndex]->mName); #ifdef USE_OPENGL_ES_1_1 // Load projection matrix: glMatrixMode(GL_PROJECTION); glLoadMatrixf(projectionMatrix.data); // Load model view matrix: glMatrixMode(GL_MODELVIEW); glLoadMatrixf(modelViewMatrix.data); glTranslatef(0.f, 0.f, kObjectScale); glScalef(kObjectScale, kObjectScale, kObjectScale); // Draw object: glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]); glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]); glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormals[0]); glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &teapotIndices[0]); #else /* QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotVertices[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &teapotIndices[0]); */ //QCAR::Vec2F targetSize = ((QCAR::ImageTarget *) trackable)->getSize(); //QCAR::Vec2F targetSize = thisTexture->getSize(); const QCAR::ImageTarget& imageTarget = (const QCAR::ImageTarget&) result->getTrackable(); QCAR::Vec2F targetSize = imageTarget.getSize(); QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]); //SampleUtils::scalePoseMatrix(targetSize.data[0], targetSize.data[1], 1.0f, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); SampleUtils::scalePoseMatrix(256, 256, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeVertices[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const GLvoid*) &planeIndices[0]); SampleUtils::checkGlError("ImageTargets renderFrame"); #endif } glDisable(GL_DEPTH_TEST); #ifdef USE_OPENGL_ES_1_1 glDisable(GL_TEXTURE_2D); glDisableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_NORMAL_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); #else glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); #endif QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_VideoPlayback_VideoPlaybackRenderer_renderFrame(JNIEnv *, jobject) { //LOG("Java_com_qualcomm_QCARSamples_VideoPlayback_GLRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); // We must detect if background reflection is active and adjust the culling direction. // If the reflection is active, this means the post matrix has been reflected as well, // therefore standard counter clockwise face culling will result in "inside out" models. glEnable(GL_CULL_FACE); glCullFace(GL_BACK); if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) glFrontFace(GL_CW); //Front camera else glFrontFace(GL_CCW); //Back camera for (int i=0; i<NUM_TARGETS; i++) { isTracking[i] = false; targetPositiveDimensions[i].data[0] = 0.0; targetPositiveDimensions[i].data[1] = 0.0; } // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx); const QCAR::ImageTarget& imageTarget = (const QCAR::ImageTarget&) trackableResult->getTrackable(); int currentTarget; // We store the modelview matrix to be used later by the tap calculation if (strcmp(imageTarget.getName(), "stones") == 0) currentTarget=STONES; else currentTarget=CHIPS; modelViewMatrix[currentTarget] = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); isTracking[currentTarget] = true; targetPositiveDimensions[currentTarget] = imageTarget.getSize(); // The pose delivers the center of the target, thus the dimensions // go from -width/2 to width/2, same for height targetPositiveDimensions[currentTarget].data[0] /= 2.0f; targetPositiveDimensions[currentTarget].data[1] /= 2.0f; // If the movie is ready to start playing or it has reached the end // of playback we render the keyframe if ((currentStatus[currentTarget] == READY) || (currentStatus[currentTarget] == REACHED_END) || (currentStatus[currentTarget] == NOT_READY) || (currentStatus[currentTarget] == ERROR)) { QCAR::Matrix44F modelViewMatrixKeyframe = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); QCAR::Matrix44F modelViewProjectionKeyframe; SampleUtils::translatePoseMatrix(0.0f, 0.0f, targetPositiveDimensions[currentTarget].data[0], &modelViewMatrixKeyframe.data[0]); // Here we use the aspect ratio of the keyframe since it // is likely that it is not a perfect square float ratio=1.0; if (textures[currentTarget]->mSuccess) ratio = keyframeQuadAspectRatio[currentTarget]; else ratio = targetPositiveDimensions[currentTarget].data[1] / targetPositiveDimensions[currentTarget].data[0]; SampleUtils::scalePoseMatrix(targetPositiveDimensions[currentTarget].data[0], targetPositiveDimensions[currentTarget].data[0]*ratio, targetPositiveDimensions[currentTarget].data[0], &modelViewMatrixKeyframe.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrixKeyframe.data[0] , &modelViewProjectionKeyframe.data[0]); glUseProgram(keyframeShaderID); // Prepare for rendering the keyframe glVertexAttribPointer(keyframeVertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadVertices[0]); glVertexAttribPointer(keyframeNormalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadNormals[0]); glVertexAttribPointer(keyframeTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadTexCoords[0]); glEnableVertexAttribArray(keyframeVertexHandle); glEnableVertexAttribArray(keyframeNormalHandle); glEnableVertexAttribArray(keyframeTexCoordHandle); glActiveTexture(GL_TEXTURE0); // The first loaded texture from the assets folder is the keyframe glBindTexture(GL_TEXTURE_2D, textures[currentTarget]->mTextureID); glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjectionKeyframe.data[0] ); glUniform1i(keyframeTexSampler2DHandle, 0 /*GL_TEXTURE0*/); // Render glDrawElements(GL_TRIANGLES, NUM_QUAD_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &quadIndices[0]); glDisableVertexAttribArray(keyframeVertexHandle); glDisableVertexAttribArray(keyframeNormalHandle); glDisableVertexAttribArray(keyframeTexCoordHandle); glUseProgram(0); } else // In any other case, such as playing or paused, we render the actual contents { QCAR::Matrix44F modelViewMatrixVideo = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); QCAR::Matrix44F modelViewProjectionVideo; SampleUtils::translatePoseMatrix(0.0f, 0.0f, targetPositiveDimensions[currentTarget].data[0], &modelViewMatrixVideo.data[0]); // Here we use the aspect ratio of the video frame SampleUtils::scalePoseMatrix(targetPositiveDimensions[currentTarget].data[0], targetPositiveDimensions[currentTarget].data[0]*videoQuadAspectRatio[currentTarget], targetPositiveDimensions[currentTarget].data[0], &modelViewMatrixVideo.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrixVideo.data[0] , &modelViewProjectionVideo.data[0]); glUseProgram(videoPlaybackShaderID); // Prepare for rendering the keyframe glVertexAttribPointer(videoPlaybackVertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadVertices[0]); glVertexAttribPointer(videoPlaybackNormalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadNormals[0]); if (strcmp(imageTarget.getName(), "stones") == 0) glVertexAttribPointer(videoPlaybackTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &videoQuadTextureCoordsTransformedStones[0]); else glVertexAttribPointer(videoPlaybackTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &videoQuadTextureCoordsTransformedChips[0]); glEnableVertexAttribArray(videoPlaybackVertexHandle); glEnableVertexAttribArray(videoPlaybackNormalHandle); glEnableVertexAttribArray(videoPlaybackTexCoordHandle); glActiveTexture(GL_TEXTURE0); // IMPORTANT: // Notice here that the texture that we are binding is not the // typical GL_TEXTURE_2D but instead the GL_TEXTURE_EXTERNAL_OES glBindTexture(GL_TEXTURE_EXTERNAL_OES, videoPlaybackTextureID[currentTarget]); glUniformMatrix4fv(videoPlaybackMVPMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjectionVideo.data[0]); glUniform1i(videoPlaybackTexSamplerOESHandle, 0 /*GL_TEXTURE0*/); // Render glDrawElements(GL_TRIANGLES, NUM_QUAD_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &quadIndices[0]); glDisableVertexAttribArray(videoPlaybackVertexHandle); glDisableVertexAttribArray(videoPlaybackNormalHandle); glDisableVertexAttribArray(videoPlaybackTexCoordHandle); glUseProgram(0); } // The following section renders the icons. The actual textures used // are loaded from the assets folder if ((currentStatus[currentTarget] == READY) || (currentStatus[currentTarget] == REACHED_END) || (currentStatus[currentTarget] == PAUSED) || (currentStatus[currentTarget] == NOT_READY) || (currentStatus[currentTarget] == ERROR)) { // If the movie is ready to be played, pause, has reached end or is not // ready then we display one of the icons QCAR::Matrix44F modelViewMatrixButton = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); QCAR::Matrix44F modelViewProjectionButton; glDepthFunc(GL_LEQUAL); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // The inacuracy of the rendering process in some devices means that // even if we use the "Less or Equal" version of the depth function // it is likely that we will get ugly artifacts // That is the translation in the Z direction is slightly different // Another posibility would be to use a depth func "ALWAYS" but // that is typically not a good idea SampleUtils::translatePoseMatrix(0.0f, 0.0f, targetPositiveDimensions[currentTarget].data[1]/1.98f, &modelViewMatrixButton.data[0]); SampleUtils::scalePoseMatrix((targetPositiveDimensions[currentTarget].data[1]/2.0f), (targetPositiveDimensions[currentTarget].data[1]/2.0f), (targetPositiveDimensions[currentTarget].data[1]/2.0f), &modelViewMatrixButton.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrixButton.data[0] , &modelViewProjectionButton.data[0]); glUseProgram(keyframeShaderID); glVertexAttribPointer(keyframeVertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadVertices[0]); glVertexAttribPointer(keyframeNormalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadNormals[0]); glVertexAttribPointer(keyframeTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadTexCoords[0]); glEnableVertexAttribArray(keyframeVertexHandle); glEnableVertexAttribArray(keyframeNormalHandle); glEnableVertexAttribArray(keyframeTexCoordHandle); glActiveTexture(GL_TEXTURE0); // Depending on the status in which we are we choose the appropriate // texture to display. Notice that unlike the video these are regular // GL_TEXTURE_2D textures switch (currentStatus[currentTarget]) { case READY: glBindTexture(GL_TEXTURE_2D, textures[2]->mTextureID); break; case REACHED_END: glBindTexture(GL_TEXTURE_2D, textures[2]->mTextureID); break; case PAUSED: glBindTexture(GL_TEXTURE_2D, textures[2]->mTextureID); break; case NOT_READY: glBindTexture(GL_TEXTURE_2D, textures[3]->mTextureID); break; case ERROR: glBindTexture(GL_TEXTURE_2D, textures[4]->mTextureID); break; default: glBindTexture(GL_TEXTURE_2D, textures[3]->mTextureID); break; } glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjectionButton.data[0] ); glUniform1i(keyframeTexSampler2DHandle, 0 /*GL_TEXTURE0*/); // Render glDrawElements(GL_TRIANGLES, NUM_QUAD_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &quadIndices[0]); glDisableVertexAttribArray(keyframeVertexHandle); glDisableVertexAttribArray(keyframeNormalHandle); glDisableVertexAttribArray(keyframeTexCoordHandle); glUseProgram(0); // Finally we return the depth func to its original state glDepthFunc(GL_LESS); glDisable(GL_BLEND); } SampleUtils::checkGlError("VideoPlayback renderFrame"); } glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *env, jobject obj) { //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); #ifdef USE_OPENGL_ES_1_1 // Set GL11 flags: glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_NORMAL_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnable(GL_TEXTURE_2D); glDisable(GL_LIGHTING); #endif glEnable(GL_DEPTH_TEST); // We must detect if background reflection is active and adjust the culling direction. // If the reflection is active, this means the post matrix has been reflected as well, // therefore standard counter clockwise face culling will result in "inside out" models. glEnable(GL_CULL_FACE); glCullFace(GL_BACK); if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) glFrontFace(GL_CW);//Front camera else glFrontFace(GL_CCW);//Back camera // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); // Choose the texture based on the target name: int textureIndex; if (strcmp(trackable.getName(), "chips") == 0) { textureIndex = 0; } else if (strcmp(trackable.getName(), "stones") == 0) { textureIndex = 1; } else { textureIndex = 2; } const Texture* const thisTexture = textures[textureIndex]; #ifdef USE_OPENGL_ES_1_1 // Load projection matrix: glMatrixMode(GL_PROJECTION); glLoadMatrixf(projectionMatrix.data); // Load model view matrix: glMatrixMode(GL_MODELVIEW); glLoadMatrixf(modelViewMatrix.data); glTranslatef(0.f, 0.f, kObjectScale); glScalef(kObjectScale, kObjectScale, kObjectScale); // Draw object: glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); //glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]); glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &cubeVerts[0]); glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &cubeNormals[0]); glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &teapotIndices[0]); #else QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, bananaVerts); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, bananaNormals); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, bananaTexCoords); // glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotVertices[0]); // glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotNormals[0]); // glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); // glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, // (const GLvoid*) &teapotIndices[0]); // glDrawArrays(GL_TRIANGLES, 0, bananaNumVerts); //glDrawArrays(GL_TRIANGLES, 0, cubeNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); #endif //jq mark jclass javaClass = env->GetObjectClass(obj); jfloatArray modelviewArray = env->NewFloatArray(16); env->SetFloatArrayRegion(modelviewArray, 0, 16, modelViewMatrix.data); jmethodID method = env->GetMethodID(javaClass, "updateModelviewMatrix", "([F)V"); env->CallVoidMethod(obj,method,modelviewArray); env->DeleteLocalRef(modelviewArray); //jq mark jclass javaClass2 = env->GetObjectClass(obj); jfloatArray modelviewArray2 = env->NewFloatArray(16); env->SetFloatArrayRegion(modelviewArray2, 0, 16, projectionMatrix.data); jmethodID method2 = env->GetMethodID(javaClass2, "projectionMatrix", "([F)V"); env->CallVoidMethod(obj,method2,modelviewArray2); env->DeleteLocalRef(modelviewArray2); } glDisable(GL_DEPTH_TEST); #ifdef USE_OPENGL_ES_1_1 glDisable(GL_TEXTURE_2D); glDisableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_NORMAL_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); #endif QCAR::Renderer::getInstance().end(); }
// ---------------------------------------------------------------------------- // renderFrame Method - Takes care of drawing in the different render states // ---------------------------------------------------------------------------- JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_CloudRecognition_CloudRecoRenderer_renderFrame(JNIEnv *, jobject) { // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); if (deleteCurrentProductTexture) { // Deletes the product texture if necessary if (productTexture != 0) { glDeleteTextures(1, &(productTexture->mTextureID)); delete productTexture; productTexture = 0; } deleteCurrentProductTexture = false; } // If the render state indicates that the texture is generated it generates // the OpenGL texture for start drawing the plane with the book data if (renderState == RS_TEXTURE_GENERATED) { generateProductTextureInOpenGL(); } // Did we find any trackables this frame? if (state.getNumTrackableResults() > 0) { trackingStarted = true; // If we are already tracking something we don't need // to wait any frame before starting the 2D transition // when the target gets lost pthread_mutex_lock(&framesToSkipMutex); framesToSkipBeforeRenderingTransition = 0; pthread_mutex_unlock(&framesToSkipMutex); // Gets current trackable result const QCAR::TrackableResult* trackableResult = state.getTrackableResult(0); if (trackableResult == NULL) { return; } modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); // Get the size of the ImageTarget QCAR::ImageTargetResult *imageResult = (QCAR::ImageTargetResult *)trackableResult; targetSize = imageResult->getTrackable().getSize(); // Renders the Augmentation View with the 3D Book data Panel renderAugmentation(trackableResult); } else { // Manages the 3D to 2D Transition initialization if (!scanningMode && showAnimation3Dto2D && renderState == RS_NORMAL && framesToSkipBeforeRenderingTransition == 0) { startTransitionTo2D(); } // Reduces the number of frames to wait before triggering // the transition by 1 if( framesToSkipBeforeRenderingTransition > 0 && renderState == RS_NORMAL) { pthread_mutex_lock(&framesToSkipMutex); framesToSkipBeforeRenderingTransition -= 1; pthread_mutex_unlock(&framesToSkipMutex); } } // Logic for rendering Transition to 2D if (renderState == RS_TRANSITION_TO_2D && showAnimation3Dto2D) { renderTransitionTo2D(); } // Logic for rendering Transition to 3D if (renderState == RS_TRANSITION_TO_3D ) { renderTransitionTo3D(); } // Get the tracker manager: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); // Get the image tracker: QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER)); // Get the target finder: QCAR::TargetFinder* finder = imageTracker->getTargetFinder(); // Renders the current state - User process Feedback if (finder->isRequesting()) { // Requesting State - Show Requesting text in Status Bar setStatusBarText("Requesting"); showStatusBar(); } else { // Hiding Status Bar hideStatusBar(); } glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_rajawali_vuforia_RajawaliVuforiaRenderer_renderFrame(JNIEnv* env, jobject object, jint frameBufferId, int frameBufferTextureId) { //LOG("Java_com_qualcomm_QCARSamples_FrameMarkers_GLRenderer_renderFrame"); jclass ownerClass = env->GetObjectClass(object); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); glBindFramebuffer(GL_FRAMEBUFFER, frameBufferId); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, frameBufferTextureId, 0); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); jfloatArray modelViewMatrixOut = env->NewFloatArray(16); // Did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* trackableResult = state.getTrackableResult( tIdx); const QCAR::Trackable& trackable = trackableResult->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix( trackableResult->getPose()); if (isActivityInPortraitMode) Utils::rotatePoseMatrix(90.0f, 0, 1.0f, 0, &modelViewMatrix.data[0]); Utils::rotatePoseMatrix(-90.0f, 1.0f, 0, 0, &modelViewMatrix.data[0]); if (trackable.isOfType(QCAR::Marker::getClassType())) { jmethodID foundFrameMarkerMethod = env->GetMethodID(ownerClass, "foundFrameMarker", "(I[F)V"); env->SetFloatArrayRegion(modelViewMatrixOut, 0, 16, modelViewMatrix.data); env->CallVoidMethod(object, foundFrameMarkerMethod, (jint) trackable.getId(), modelViewMatrixOut); } else if (trackable.isOfType(QCAR::CylinderTarget::getClassType()) || trackable.isOfType(QCAR::ImageTarget::getClassType()) || trackable.isOfType(QCAR::MultiTarget::getClassType())) { jmethodID foundImageMarkerMethod = env->GetMethodID(ownerClass, "foundImageMarker", "(Ljava/lang/String;[F)V"); env->SetFloatArrayRegion(modelViewMatrixOut, 0, 16, modelViewMatrix.data); const char* trackableName = trackable.getName(); jstring trackableNameJava = env->NewStringUTF(trackableName); env->CallVoidMethod(object, foundImageMarkerMethod, trackableNameJava, modelViewMatrixOut); } } env->DeleteLocalRef(modelViewMatrixOut); if (state.getNumTrackableResults() == 0) { jmethodID noFrameMarkersFoundMethod = env->GetMethodID(ownerClass, "noFrameMarkersFound", "()V"); env->CallVoidMethod(object, noFrameMarkersFoundMethod); } glBindFramebuffer(GL_FRAMEBUFFER, 0); QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_codered_ared_TextRecoRenderer_renderFrame(JNIEnv * env, jobject obj) { //LOG("JJava_com_codered_ared_TextRecoRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); // We need Front Face, CW for the back camera and Front Face CCW for the front camera... // or more accuratly, we need CW for 0 and 2 reflections and CCW for 1 reflection glEnable(GL_CULL_FACE); glCullFace(GL_FRONT); if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) { glFrontFace(GL_CCW); //Front camera } else { glFrontFace(GL_CW); //Back camera } // Enable blending to support transparency glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); jclass rendererJavaClass = env->GetObjectClass(obj); env->CallVoidMethod(obj, env->GetMethodID(rendererJavaClass, "wordsStartLoop", "()V")); NbWordsFound = 0; // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); QCAR::Vec2F wordBoxSize(0, 0); if (result->getType() == QCAR::TrackableResult::WORD_RESULT) { const QCAR::WordResult* wordResult = (const QCAR::WordResult*) result; // Get the word const QCAR::Word& word = wordResult->getTrackable(); const QCAR::Obb2D& obb = wordResult->getObb(); wordBoxSize = word.getSize(); if (word.getStringU()) { // in portrait, the obb coordinate is based on // a 0,0 position being in the upper right corner // with : // X growing from top to bottom and // Y growing from right to left // // we convert those coordinates to be more natural // with our application: // - 0,0 is the upper left corner // - X grows from left to right // - Y grows from top to bottom float wordx = - obb.getCenter().data[1]; float wordy = obb.getCenter().data[0]; // For debugging purposes convert the string to 7bit ASCII // (if possible) and log it. char* stringA = 0; if (unicodeToAscii(word, stringA)) { // we store the word if (NbWordsFound < MAX_NB_WORDS) { struct WordDesc * word = & WordsFound[NbWordsFound]; NbWordsFound++; strncpy(word->text, stringA, MAX_WORD_LENGTH - 1); word->text[MAX_WORD_LENGTH - 1] = '\0'; word->Ax = wordx - (int)(wordBoxSize.data[0] / 2); word->Ay = wordy - (int)(wordBoxSize.data[1] / 2); word->Bx = wordx + (int)(wordBoxSize.data[0] / 2); word->By = wordy + (int)(wordBoxSize.data[1] / 2); } delete[] stringA; } } } else { LOG("Unexpected detection:%d", result->getType()); continue; } QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, 0.0f, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(wordBoxSize.data[0] + TEXTBOX_PADDING, wordBoxSize.data[1] + TEXTBOX_PADDING, 1.0f, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(lineShaderProgramID); glLineWidth(3.0f); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &quadVertices[0]); glEnableVertexAttribArray(vertexHandle); glUniform1f(lineOpacityHandle, 1.0f); // FF7200 glUniform3f(lineColorHandle, 1.0f, 0.447f, 0.0f); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawElements(GL_LINES, NUM_QUAD_OBJECT_INDICES, GL_UNSIGNED_SHORT, (const GLvoid*) &quadIndices[0]); // Disable the vertex array handle glDisableVertexAttribArray(vertexHandle); // Restore default line width glLineWidth(1.0f); // Unbind shader program glUseProgram(0); } if (NbWordsFound > 0) { jmethodID method = env->GetMethodID(rendererJavaClass, "addWord", "(Ljava/lang/String;)V"); // we order the words per line and left to right qsort(& WordsFound[0], NbWordsFound, sizeof(struct WordDesc), wordDescCompare); for(int i = 0 ; i < NbWordsFound ; i++) { struct WordDesc * word = & WordsFound[i]; jstring js = env->NewStringUTF(word->text); env->CallVoidMethod(obj, method, js); } } env->CallVoidMethod(obj, env->GetMethodID(rendererJavaClass, "wordsEndLoop", "()V")); SampleUtils::checkGlError("TextReco renderFrame - words post-render"); glDisable(GL_DEPTH_TEST); drawRegionOfInterest(ROICenterX, ROICenterY, ROIWidth, ROIHeight); // Disable blending (restore default state) glDisable(GL_BLEND); SampleUtils::checkGlError("TextReco renderFrame - post-drawROI"); QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_tvc_supastriker_SupaStrikerRenderer_renderFrame(JNIEnv* env, jobject obj){ LOG("Java_com_tvc_supastriker_SupaStrikerRenderer_renderFrame"); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); QCAR::State state = QCAR::Renderer::getInstance().begin(); QCAR::Renderer::getInstance().drawVideoBackground(); #ifdef USE_OPENGL_ES_1_1 glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_NORMAL_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnable(GL_TEXTURE_2D); //glDisable(GL_LIGHTING); glEnable(GL_LIGHTING); #endif glEnable(GL_DEPTH_TEST); //glEnable(GL_CULL_FACE); glDisable(GL_CULL_FACE); glCullFace(GL_BACK); if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) glFrontFace(GL_CCW); else glFrontFace(GL_CCW); for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++){ const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); int textureIndex; if(strcmp(trackable.getName(), "SupaStrika") == 0){ textureIndex = 0; } const Texture* const thisTexture = textures[textureIndex]; #ifdef USE_OPENGL_ES_1_1 //load projection matrix glMatrixMode(GL_PROJECTION); glLoadMatrixf(projectionMatrix.data); //load model view matrix glMatrixMode(GL_MODELVIEW); glLoadMatrixf(modelViewMatrix.data); glTranslatef(0.f, 0.f, kObjectScale); glScalef(kObjectScale, kObjectScale, kObjectScale); //draw object glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]); glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]); glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormals[0]); //glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, // (const GLvoid*) &teapotIndices[0]); glDrawArrays(GL_TRIANGLES, 0, NUM_TEAPOT_OBJECT_VERTEX); #else QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotVertices[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*) &modelViewProjection.data[0]); glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &teapotIndices[0]); SampleUtils::checkGlError("SupaStriker renderFrame"); #endif } glDisable(GL_DEPTH_TEST); #ifdef USE_OPENGL_ES_1_1 glDisable(GL_TEXTURE_2D); glDisableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_NORMAL_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); #else glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); #endif QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_wheelphone_targetNavigation_WheelphoneTargetNavigation_getTrackInfo(JNIEnv *env, jobject obj) { // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); //QCAR::Vec2F markerSize; jint jx[4] = {0}; jint jy[4] = {0}; jfloat distance[4] = {0}; jfloat cam_x[4] = {0}; jfloat cam_y[4] = {0}; jfloat cam_z[4] = {0}; jfloat target_pose_x[4] = {0}; // x, y, z coordinates of the targets with respect to the camera frame jfloat target_pose_y[4] = {0}; jfloat target_pose_z[4] = {0}; jboolean detected[4] = {false}; jclass javaClass = env->GetObjectClass(obj); // obj is the java class object calling the "renderFrame" method, that is an FrameMarkersRenderer object //jclass javaClass = env->FindClass("Lcom/wheelphone/targetNavigation/WheelphoneTargetNavigation;"); // doesn't work! jmethodID method = env->GetMethodID(javaClass, "updateMarkersInfo", "(IZIIFFFF)V"); // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx); if(trackableResult == NULL) { continue; } QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); //if(modelViewMatrix == NULL) { // continue; //} // Choose the texture based on the target name: int textureIndex = 0; // Check the type of the trackable: assert(trackableResult->getType() == QCAR::TrackableResult::MARKER_RESULT); const QCAR::MarkerResult* markerResult = static_cast< const QCAR::MarkerResult*>(trackableResult); if(markerResult == NULL) { continue; } const QCAR::Marker& marker = markerResult->getTrackable(); //if(marker == NULL) { // continue; //} textureIndex = marker.getMarkerId(); //markerSize = marker.getSize(); // this is the size specified during marker creation! Not the current size! assert(textureIndex < textureCount); // Select which model to draw: const GLvoid* vertices = 0; const GLvoid* normals = 0; const GLvoid* indices = 0; const GLvoid* texCoords = 0; int numIndices = 0; QCAR::Vec2F result(0,0); const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration(); QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, trackableResult->getPose(), QCAR::Vec3F(0, 0, 0)); QCAR::VideoMode videoMode = QCAR::CameraDevice::getInstance().getVideoMode(QCAR::CameraDevice::MODE_OPTIMIZE_QUALITY); //MODE_DEFAULT); QCAR::VideoBackgroundConfig config = QCAR::Renderer::getInstance().getVideoBackgroundConfig(); //if(config == NULL) { // continue; //} int xOffset = ((int) screenWidth - config.mSize.data[0]) / 2.0f + config.mPosition.data[0]; int yOffset = ((int) screenHeight - config.mSize.data[1]) / 2.0f - config.mPosition.data[1]; if (isActivityInPortraitMode) { // camera image is rotated 90 degrees int rotatedX = videoMode.mHeight - cameraPoint.data[1]; int rotatedY = cameraPoint.data[0]; result = QCAR::Vec2F(rotatedX * config.mSize.data[0] / (float) videoMode.mHeight + xOffset, rotatedY * config.mSize.data[1] / (float) videoMode.mWidth + yOffset); } else { result = QCAR::Vec2F(cameraPoint.data[0] * config.mSize.data[0] / (float) videoMode.mWidth + xOffset, cameraPoint.data[1] * config.mSize.data[1] / (float) videoMode.mHeight + yOffset); } jx[textureIndex] = (int)result.data[0]; jy[textureIndex] = (int)result.data[1]; // get position and orientation of the target respect to the camera reference frame QCAR::Matrix34F pose = trackableResult->getPose(); target_pose_x[textureIndex] = pose.data[3]; target_pose_y[textureIndex] = pose.data[7]; target_pose_z[textureIndex] = pose.data[11]; QCAR::Vec3F position(pose.data[3], pose.data[7], pose.data[11]); // dist = modulo del vettore traslazione = sqrt(x*x + y*y + z*z) distance[textureIndex] = sqrt(position.data[0] * position.data[0] + position.data[1] * position.data[1] + position.data[2] * position.data[2]); QCAR::Matrix44F inverseMV = SampleMath::Matrix44FInverse(modelViewMatrix); QCAR::Matrix44F invTranspMV = SampleMath::Matrix44FTranspose(inverseMV); // position of the camera and orientation axis with coordinates represented in the reference frame of the trackable //cam_x[textureIndex] = invTranspMV.data[4]; //cam_y[textureIndex] = invTranspMV.data[5]; cam_z[textureIndex] = invTranspMV.data[6]; detected[textureIndex] = true; //jstring js = env->NewStringUTF(marker.getName()); //jclass javaClass = env->GetObjectClass(obj); //jmethodID method = env->GetMethodID(javaClass, "displayMessage", "(Ljava/lang/String;)V"); //env->CallVoidMethod(obj, method, js); } // put outside the previous loop because we want to warn the java object of the state of the detection in all cases (both when detected and when not detected) for(int i=0; i<4; i++) { // obj, method, marker id, detected, x screen coord, y screen coord, distance, robot orientation component, robot to target angle component y, robot to target angle component z); env->CallVoidMethod(obj, method, i, detected[i], jx[i], jy[i], distance[i], cam_z[i], target_pose_y[i], target_pose_z[i]); } QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_ar4android_rayPickingJME_RayPickingJME_updateTracking(JNIEnv *env, jobject obj) { //LOG("Java_com_ar4android_rayPickingJME_RayPickingJMEActivity_GLRenderer_renderFrame"); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background // QCAR::Renderer::getInstance().drawVideoBackground(); // if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); //const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); //get the camera transformation QCAR::Matrix44F inverseMV = MathUtil::Matrix44FInverse(modelViewMatrix); //QCAR::Matrix44F invTranspMV = modelViewMatrix; QCAR::Matrix44F invTranspMV = MathUtil::Matrix44FTranspose(inverseMV); //get position float cam_x = invTranspMV.data[12]; float cam_y = invTranspMV.data[13]; float cam_z = invTranspMV.data[14]; //get rotation float cam_right_x = invTranspMV.data[0]; float cam_right_y = invTranspMV.data[1]; float cam_right_z = invTranspMV.data[2]; float cam_up_x = invTranspMV.data[4]; float cam_up_y = invTranspMV.data[5]; float cam_up_z = invTranspMV.data[6]; float cam_dir_x = invTranspMV.data[8]; float cam_dir_y = invTranspMV.data[9]; float cam_dir_z = invTranspMV.data[10]; //get perspective transformation float nearPlane = 1.0f; float farPlane = 1000.0f; const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration(); QCAR::VideoBackgroundConfig config = QCAR::Renderer::getInstance().getVideoBackgroundConfig(); float viewportWidth = config.mSize.data[0]; float viewportHeight = config.mSize.data[1]; QCAR::Vec2F size = cameraCalibration.getSize(); QCAR::Vec2F focalLength = cameraCalibration.getFocalLength(); float fovRadians = 2 * atan(0.5f * size.data[1] / focalLength.data[1]); float fovDegrees = fovRadians * 180.0f / M_PI; float aspectRatio=size.data[0]/size.data[1]; //adjust for screen vs camera size distorsion float viewportDistort=1.0; if (viewportWidth != screenWidth) { viewportDistort = viewportWidth / (float) screenWidth; fovDegrees=fovDegrees*viewportDistort; aspectRatio=aspectRatio/viewportDistort; } if (viewportHeight != screenHeight) { viewportDistort = viewportHeight / (float) screenHeight; fovDegrees=fovDegrees/viewportDistort; aspectRatio=aspectRatio*viewportDistort; } //JNIEnv *env; //jvm->AttachCurrentThread((void **)&env, NULL); jclass activityClass = env->GetObjectClass(obj); jmethodID setCameraPerspectiveMethod = env->GetMethodID(activityClass,"setCameraPerspectiveNative", "(FF)V"); env->CallVoidMethod(obj,setCameraPerspectiveMethod,fovDegrees,aspectRatio); // jclass activityClass = env->GetObjectClass(obj); jmethodID setCameraViewportMethod = env->GetMethodID(activityClass,"setCameraViewportNative", "(FFFF)V"); env->CallVoidMethod(obj,setCameraViewportMethod,viewportWidth,viewportHeight,cameraCalibration.getSize().data[0],cameraCalibration.getSize().data[1]); // jclass activityClass = env->GetObjectClass(obj); jmethodID setCameraPoseMethod = env->GetMethodID(activityClass,"setCameraPoseNative", "(FFF)V"); env->CallVoidMethod(obj,setCameraPoseMethod,cam_x,cam_y,cam_z); //jclass activityClass = env->GetObjectClass(obj); jmethodID setCameraOrientationMethod = env->GetMethodID(activityClass,"setCameraOrientationNative", "(FFFFFFFFF)V"); env->CallVoidMethod(obj,setCameraOrientationMethod,cam_right_x,cam_right_y,cam_right_z, cam_up_x,cam_up_y,cam_up_z,cam_dir_x,cam_dir_y,cam_dir_z); // jvm->DetachCurrentThread(); } QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL //Java_com_miosys_finder_ui_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject) Java_com_miosys_finder_ui_ImageTargetsRenderer_renderFrame(JNIEnv* env, jobject obj) { LOG("Java_com_miosys_finder_ui_PfinderTargets_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); // We must detect if background reflection is active and adjust the culling direction. // If the reflection is active, this means the post matrix has been reflected as well, // therefore standard counter clockwise face culling will result in "inside out" models. glEnable(GL_CULL_FACE); glCullFace(GL_BACK); if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) glFrontFace(GL_CW); //Front camera else glFrontFace(GL_CCW); //Back camera // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); if(!isExtendedTrackingActivated) { /*java method switch to listview activity after detected objects*/ jstring jstrTargetName = env->NewStringUTF(trackable.getName()); jclass renderClass = env->GetObjectClass(obj); jmethodID switchToListViewID = env->GetMethodID(renderClass,"switchToListView", "(Ljava/lang/String;)V"); env->CallVoidMethod(obj, switchToListViewID, jstrTargetName); /* // Choose the texture based on the target name: int textureIndex; if (strcmp(trackable.getName(), "chips") == 0) { textureIndex = 0; } else if (strcmp(trackable.getName(), "stones") == 0) { textureIndex = 1; } else { textureIndex = 2; } // if(strcmp(trackable.getName(), "P1_01") == 0){ // textureIndex = 0; // } // else if(strcmp(trackable.getName(),"P1_02") == 1){ // textureIndex = 1; // } // else { // textureIndex = 2; // } const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotVertices[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*) &teapotIndices[0]); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); } else { const Texture* const thisTexture = textures[3]; QCAR::Matrix44F modelViewProjection; SampleUtils::translatePoseMatrix(0.0f, 0.0f, kBuildingsObjectScale, &modelViewMatrix.data[0]); SampleUtils::rotatePoseMatrix(90.0f, 1.0f, 0.0f, 0.0f, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kBuildingsObjectScale, kBuildingsObjectScale, kBuildingsObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &buildingsVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &buildingsNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &buildingsTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0, buildingsNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); */ } } glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); }
void MSRenderer::renderFrame() { // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose()); MSRenderer::scalePoseMatrix(MSController::getFrameRatio(), 1, 1, &modelViewMatrix.data[0]); // get the target info void *userData = trackableResult->getTrackable().getUserData(); MSTargetInfo *info = static_cast<MSTargetInfo *>(userData); MSTexture *tex = info->getTexture(); MSModel *model = info->getModel(); // Bind texture to OpenGL if not done yet if (!tex->mHasID) { glGenTextures(1, &(tex->mTextureID)); glBindTexture(GL_TEXTURE_2D, tex->mTextureID); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tex->mWidth, tex->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid*) tex->mData); tex->mHasID = true; } MSRenderer::multiplyMatrix(&modelViewMatrix.data[0], info->getPose(), &modelViewMatrix.data[0]); QCAR::Matrix44F modelViewProjection; MSRenderer::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, model->vertices); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, model->normals); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, model->texCoords); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, tex->mTextureID); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)modelViewProjection.data); glUniform1i(texSampler2DHandle, 0); glDrawElements(GL_TRIANGLES, 3*model->nFaces, GL_UNSIGNED_SHORT, model->faces); } glDisable(GL_DEPTH_TEST); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); glDisable(GL_BLEND); QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_siu_android_arapp_vuforia_ImageTargetsRenderer_renderFrame(JNIEnv* env, jobject object) { //LOG("Java_com_siu_android_arapp_vuforia_GLRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); #ifdef USE_OPENGL_ES_1_1 // Set GL11 flags: glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_NORMAL_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnable(GL_TEXTURE_2D); glDisable(GL_LIGHTING); #endif // glEnable(GL_DEPTH_TEST); // // // We must detect if background reflection is active and adjust the culling direction. // // If the reflection is active, this means the post matrix has been reflected as well, // // therefore standard counter clockwise face culling will result in "inside out" models. // glEnable(GL_CULL_FACE); // glCullFace(GL_BACK); // if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) // glFrontFace(GL_CW); //Front camera // else // glFrontFace(GL_CCW); //Back camera // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix34F pose = result->getPose(); QCAR::Vec3F position(pose.data[3], pose.data[7], pose.data[11]); float distance = sqrt(position.data[0] * position.data[0] + position.data[1] * position.data[1] + position.data[2] * position.data[2]); //LOG("DISTANCE: %f", distance); jclass clazz = env->FindClass("com/siu/android/arapp/vuforia/ImageTargetsRenderer"); if (clazz == 0) { LOG("FindClass error"); return; } jmethodID jmethod = env->GetMethodID(clazz, "objectDetected", "(Ljava/lang/String;F)V"); if (jmethod == 0) { LOG("GetMethodID error"); return; } jstring s = env->NewStringUTF(trackable.getName()); env->CallVoidMethod(object, jmethod, s, distance); // QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); // // // Choose the texture based on the target name: // int textureIndex; // if (strcmp(trackable.getName(), "chips") == 0) // { // textureIndex = 0; // } // else if (strcmp(trackable.getName(), "stones") == 0) // { // textureIndex = 1; // } // else // { // textureIndex = 2; // } // // const Texture* const thisTexture = textures[textureIndex]; // //#ifdef USE_OPENGL_ES_1_1 // // Load projection matrix: // glMatrixMode(GL_PROJECTION); // glLoadMatrixf(projectionMatrix.data); // // // Load model view matrix: // glMatrixMode(GL_MODELVIEW); // glLoadMatrixf(modelViewMatrix.data); // glTranslatef(0.f, 0.f, kObjectScale); // glScalef(kObjectScale, kObjectScale, kObjectScale); // // // Draw object: // glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); // glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]); // glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]); // glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormals[0]); // glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, // (const GLvoid*) &teapotIndices[0]); //#else // // QCAR::Matrix44F modelViewProjection; // // SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, // &modelViewMatrix.data[0]); // SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, // &modelViewMatrix.data[0]); // SampleUtils::multiplyMatrix(&projectionMatrix.data[0], // &modelViewMatrix.data[0] , // &modelViewProjection.data[0]); // // glUseProgram(shaderProgramID); // // glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotVertices[0]); // glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotNormals[0]); // glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, // (const GLvoid*) &teapotTexCoords[0]); // // glEnableVertexAttribArray(vertexHandle); // glEnableVertexAttribArray(normalHandle); // glEnableVertexAttribArray(textureCoordHandle); // // glActiveTexture(GL_TEXTURE0); // glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); // glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/); // glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, // (GLfloat*)&modelViewProjection.data[0] ); // glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, // (const GLvoid*) &teapotIndices[0]); // // glDisableVertexAttribArray(vertexHandle); // glDisableVertexAttribArray(normalHandle); // glDisableVertexAttribArray(textureCoordHandle); // // SampleUtils::checkGlError("ImageTargets renderFrame"); //#endif } // // glDisable(GL_DEPTH_TEST); // //#ifdef USE_OPENGL_ES_1_1 // glDisable(GL_TEXTURE_2D); // glDisableClientState(GL_VERTEX_ARRAY); // glDisableClientState(GL_NORMAL_ARRAY); // glDisableClientState(GL_TEXTURE_COORD_ARRAY); //#endif // // QCAR::Renderer::getInstance().end(); }
JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject) { //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame"); // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Get the state from QCAR and mark the beginning of a rendering section QCAR::State state = QCAR::Renderer::getInstance().begin(); // Explicitly render the Video Background QCAR::Renderer::getInstance().drawVideoBackground(); glEnable(GL_DEPTH_TEST); // We must detect if background reflection is active and adjust the culling direction. // If the reflection is active, this means the post matrix has been reflected as well, // therefore standard counter clockwise face culling will result in "inside out" models. glEnable(GL_CULL_FACE); glCullFace(GL_BACK); if (QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) glFrontFace(GL_CW); //Front camera else glFrontFace(GL_CCW); //Back camera // Did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); // Choose the texture based on the target name: int textureIndex; modeltype = 11; if (strcmp(trackable.getName(), "huitailang") == 0||strcmp(trackable.getName(), "stones") == 0||strcmp(trackable.getName(), "chips") == 0) { modeltype = 2; textureIndex = 0; const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); animateteapot(modelViewMatrix); //1.35f*120.0f SampleUtils::translatePoseMatrix(0.0f,-0.50f*120.0f,1.35f*120.0f, &modelViewMatrix.data[0]); //-90.0f SampleUtils::rotatePoseMatrix(objectx, objecty,0.0f, 0, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &teapotTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 ); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0,teapotNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); } else if (strcmp(trackable.getName(), "heroin") == 0) { textureIndex = 0; modeltype = 1; const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); animateteapot(modelViewMatrix); //1.35f*120.0f SampleUtils::translatePoseMatrix(0.0f,-0.50f*120.0f,1.35f*120.0f, &modelViewMatrix.data[0]); //-90.0f SampleUtils::rotatePoseMatrix(objectx, objecty,0.0f, 0, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &HeroinVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &HeroinNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &HeroinTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 ); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0,HeroinNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); } else if (strcmp(trackable.getName(), "stones") == 0) { textureIndex = 2; } else if (strcmp(trackable.getName(), "chips") == 0) { ; } else if (strcmp(trackable.getName(), "chhhh") == 0) { textureIndex = 0; modeltype = 3; const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); animateteapot(modelViewMatrix); //1.35f*120.0f SampleUtils::translatePoseMatrix(0.0f,-0.50f*120.0f,1.35f*120.0f, &modelViewMatrix.data[0]); //-90.0f SampleUtils::rotatePoseMatrix(objectx, objecty,0.0f, 0, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &chhhhVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &chhhhNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &chhhhTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 ); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0,chhhhNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); } else if (strcmp(trackable.getName(), "hh") == 0) { textureIndex = 0; modeltype = 4; const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); animateteapot(modelViewMatrix); //1.35f*120.0f SampleUtils::translatePoseMatrix(0.0f,-0.50f*120.0f,1.35f*120.0f, &modelViewMatrix.data[0]); //-90.0f SampleUtils::rotatePoseMatrix(objectx, objecty,0.0f, 0, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &hhVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &hhNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &hhTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 ); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0,hhNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); } else if (strcmp(trackable.getName(), "coo") == 0) { textureIndex = 0; modeltype = 5; const Texture* const thisTexture = textures[textureIndex]; QCAR::Matrix44F modelViewProjection; modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); animateteapot(modelViewMatrix); //1.35f*120.0f SampleUtils::translatePoseMatrix(0.0f,-0.50f*120.0f,1.35f*120.0f, &modelViewMatrix.data[0]); //-90.0f SampleUtils::rotatePoseMatrix(objectx, objecty,0.0f, 0, &modelViewMatrix.data[0]); SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &cooVerts[0]); glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &cooNormals[0]); glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &cooTexCoords[0]); glEnableVertexAttribArray(vertexHandle); glEnableVertexAttribArray(normalHandle); glEnableVertexAttribArray(textureCoordHandle); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID); glUniform1i(texSampler2DHandle, 0 ); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] ); glDrawArrays(GL_TRIANGLES, 0,cooNumVerts); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); SampleUtils::checkGlError("ImageTargets renderFrame"); glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end(); } else { textureIndex = 6; } } /*glDisable(GL_DEPTH_TEST); QCAR::Renderer::getInstance().end();*/ }