예제 #1
0
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_Dominoes_DominoesRenderer_renderFrame(JNIEnv* , jobject)
{
    // Clear the color and depth buffers
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
    // Get the state from QCAR and mark the beginning of a rendering section
    QCAR::State state = QCAR::Renderer::getInstance().begin();
    
    // Explicitly render the Video Background
    QCAR::Renderer::getInstance().drawVideoBackground();
    
    // to no to hide
	std::vector<const char*> found;

    // Did we find any trackables this frame?
    if (state.getNumTrackableResults() > 0) {

    	for(int tIdx = 0; tIdx < state.getNumTrackableResults(); ++tIdx) {

			// Get the first trackable
			const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx);
			const QCAR::Trackable& trackable = trackableResult->getTrackable();

			found.push_back(trackable.getName());

			// The image target specific result:
			assert(trackableResult->getType() == QCAR::TrackableResult::IMAGE_TARGET_RESULT);
			const QCAR::ImageTargetResult* imageTargetResult =
				static_cast<const QCAR::ImageTargetResult*>(trackableResult);

			// If this is our first time seeing the target, display a tip
			if (!displayedMessage) {
				displayMessage("Find marker man!");
				displayedMessage = true;
			}


			//const QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
			//const QCAR::Tracker* tracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
			const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration();

			QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, trackableResult->getPose(), QCAR::Vec3F(0,0,0));
			QCAR::Vec2F xyPoint = cameraPointToScreenPoint(cameraPoint);

			showTrackerButton(xyPoint.data[0], xyPoint.data[1], trackable.getName());

		}

    } else {
    	hideTrackerButton(found);
    }
    
    QCAR::Renderer::getInstance().end();
}
예제 #2
0
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject)
{
    //LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame");

    // Clear color and depth buffer 
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // Get the state from QCAR and mark the beginning of a rendering section
    QCAR::State state = QCAR::Renderer::getInstance().begin();
    
    // Explicitly render the Video Background
    QCAR::Renderer::getInstance().drawVideoBackground();
       
#ifdef USE_OPENGL_ES_1_1
    // Set GL11 flags:
    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_NORMAL_ARRAY);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);

    glEnable(GL_TEXTURE_2D);
    glDisable(GL_LIGHTING);
        
#endif

    glEnable(GL_DEPTH_TEST);
    glEnable(GL_CULL_FACE);

    // Did we find any trackables this frame?
    for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
    {
        // Get the trackable:
    	const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
    	const QCAR::Trackable& trackable = result->getTrackable();
    	QCAR::Matrix44F modelViewMatrix =
            QCAR::Tool::convertPose2GLMatrix(result->getPose());

        const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration();
        QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, result->getPose(), QCAR::Vec3F(0,0,0));
        QCAR::Vec2F xyPoint = cameraPointToScreenPoint(cameraPoint);
        // LOG("xyPoint %f, %f ", xyPoint.data[0], xyPoint.data[1] );
        if (xyPoint.data[1] > (screenHeight / 2) + tolerancy || xyPoint.data[1] < (screenHeight / 2) - tolerancy) {
        	continue;
        }
        // Choose the texture based on the target name:
        int textureIndex = 0;
        // LOG("texture = %s", trackable->getName());
        // LOG("textureCount %d", textureCount);

        char trackJpg[strlen(trackable.getName()) + 4];
        strcpy(trackJpg, trackable.getName());
        strcat(trackJpg, ".jpg");
        // LOG("trackJpg %s", trackJpg);

        char trackPng[strlen(trackable.getName()) + 4];
        strcpy(trackPng, trackable.getName());
        strcat(trackPng, ".png");
        // LOG("trackPng %s", trackPng);

        for(int i = 0; i < textureCount; i++) {
            // LOG("textures[i]->mName %s", textures[i]->mName);
        	if (strcmp(textures[i]->mName, trackPng) == 0 ||
        			strcmp(textures[i]->mName, trackJpg) == 0) {
        		textureIndex = i;
        	}
        }
        const Texture* const thisTexture = textures[textureIndex];
        // LOG("thisTexture->mName %s", textures[textureIndex]->mName);

#ifdef USE_OPENGL_ES_1_1
        // Load projection matrix:
        glMatrixMode(GL_PROJECTION);
        glLoadMatrixf(projectionMatrix.data);

        // Load model view matrix:
        glMatrixMode(GL_MODELVIEW);
        glLoadMatrixf(modelViewMatrix.data);
        glTranslatef(0.f, 0.f, kObjectScale);
        glScalef(kObjectScale, kObjectScale, kObjectScale);

        // Draw object:
        glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
        glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]);
        glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]);
        glNormalPointer(GL_FLOAT, 0,  (const GLvoid*) &teapotNormals[0]);
        glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
                       (const GLvoid*) &teapotIndices[0]);
#else
        /*
        QCAR::Matrix44F modelViewProjection;

        SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
                                         &modelViewMatrix.data[0]);
        SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
                                     &modelViewMatrix.data[0]);
        SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
                                    &modelViewMatrix.data[0] ,
                                    &modelViewProjection.data[0]);

        glUseProgram(shaderProgramID);
         
        glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
                              (const GLvoid*) &teapotVertices[0]);
        glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
                              (const GLvoid*) &teapotNormals[0]);
        glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
                              (const GLvoid*) &teapotTexCoords[0]);
        
        glEnableVertexAttribArray(vertexHandle);
        glEnableVertexAttribArray(normalHandle);
        glEnableVertexAttribArray(textureCoordHandle);
        
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
        glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
                           (GLfloat*)&modelViewProjection.data[0] );
        glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
                       (const GLvoid*) &teapotIndices[0]);
		*/
        //QCAR::Vec2F targetSize = ((QCAR::ImageTarget *) trackable)->getSize();
        //QCAR::Vec2F targetSize = thisTexture->getSize();

        const QCAR::ImageTarget& imageTarget = (const QCAR::ImageTarget&) result->getTrackable();

        QCAR::Vec2F targetSize = imageTarget.getSize();

        QCAR::Matrix44F modelViewProjection;

        SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale, &modelViewMatrix.data[0]);

        //SampleUtils::scalePoseMatrix(targetSize.data[0], targetSize.data[1], 1.0f, &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]);
        SampleUtils::scalePoseMatrix(256, 256, kObjectScale,  &modelViewMatrix.data[0]); SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &modelViewMatrix.data[0] , &modelViewProjection.data[0]);

        glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeVertices[0]);
        glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeNormals[0]);

        glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &planeTexCoords[0]);

        glEnableVertexAttribArray(vertexHandle);

        glEnableVertexAttribArray(normalHandle);

        glEnableVertexAttribArray(textureCoordHandle);

        glActiveTexture(GL_TEXTURE0);

        glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
        glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjection.data[0] );
        glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const GLvoid*) &planeIndices[0]);

        SampleUtils::checkGlError("ImageTargets renderFrame");
#endif

    }

    glDisable(GL_DEPTH_TEST);

#ifdef USE_OPENGL_ES_1_1        
    glDisable(GL_TEXTURE_2D);
    glDisableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_NORMAL_ARRAY);
    glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
    glDisableVertexAttribArray(vertexHandle);
    glDisableVertexAttribArray(normalHandle);
    glDisableVertexAttribArray(textureCoordHandle);
#endif

    QCAR::Renderer::getInstance().end();
}
예제 #3
0
JNIEXPORT jboolean JNICALL
Java_com_snda_sdar_ImageTargetsRenderer_renderFrame(JNIEnv *env, jobject obj)
{
	//LOG("Java_com_snda_sdar_ImageTargets_GLRenderer_renderFrame");

	// Clear color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	jboolean isDetected = false;

	// Render video background:
	QCAR::State state = QCAR::Renderer::getInstance().begin();

	// Explicitly render the Video Background
	QCAR::Renderer::getInstance().drawVideoBackground();

#ifdef USE_OPENGL_ES_1_1
	// Set GL11 flags:
	glEnableClientState(GL_VERTEX_ARRAY);
	glEnableClientState(GL_NORMAL_ARRAY);
	glEnableClientState(GL_TEXTURE_COORD_ARRAY);

	glEnable(GL_TEXTURE_2D);
	glDisable(GL_LIGHTING);

#endif

	glEnable(GL_DEPTH_TEST);
	glEnable(GL_CULL_FACE);

	// Did we find any trackables this frame?
	for(int tIdx = 0; tIdx < state.getNumActiveTrackables(); tIdx++)
	{
		isDetected = true;

		// Get the trackable:
		const QCAR::Trackable* trackable = state.getActiveTrackable(tIdx);
		QCAR::Matrix44F modelViewMatrix =
		QCAR::Tool::convertPose2GLMatrix(trackable->getPose());

		// Choose the texture based on the target name:
		//int textureIndex = (!strcmp(trackable->getName(), "stones")) ? 0 : 1;

		const Texture* const thisTexture = textures[textureIndex];
		const Texture* const tagTexture = textures[textureCount - 1];

#ifdef USE_OPENGL_ES_1_1
		// Load projection matrix:
		glMatrixMode(GL_PROJECTION);
		glLoadMatrixf(projectionMatrix.data);

		// Load model view matrix:
		glMatrixMode(GL_MODELVIEW);
		glLoadMatrixf(modelViewMatrix.data);
		glTranslatef(0.f, 0.f, kObjectScale);
		glScalef(kObjectScale, kObjectScale, kObjectScale);

		// Draw object:
		glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
		glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &planeTexCoords[0]);
		glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &planeVertices[0]);
		glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &planeNormals[0]);
		glDrawElements(GL_TRIANGLES, NUM_PLANE_OBJECT_INDEX, GL_UNSIGNED_SHORT,
				(const GLvoid*) &planeIndices[0]);

		//		// Load model view matrix:
		//		glMatrixMode(GL_MODELVIEW);
		//		glLoadMatrixf(modelViewMatrix.data);
		//		glTranslatef(50.f, 50.f, kObjectScale);
		//		glScalef(1, 1, 1);
		//
		//		// Draw object:
		//		glBindTexture(GL_TEXTURE_2D, tagTexture->mTextureID);
		//		glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &planeTexCoords[0]);
		//		glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &planeVertices[0]);
		//		glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &planeNormals[0]);
		//		glDrawElements(GL_TRIANGLES, NUM_PLANE_OBJECT_INDEX, GL_UNSIGNED_SHORT,
		//				(const GLvoid*) &planeIndices[0]);
#else

		QCAR::Matrix44F modelViewMatrix2;

		for (int i = 0; i < 16; i++)
		{
			modelViewMatrix2.data[i] = modelViewMatrix.data[i];
		}

		QCAR::Matrix44F modelViewProjection;
		QCAR::Matrix44F modelViewProjection2;

		SampleUtils::translatePoseMatrix(translateX, translateY, kObjectScale,
				&modelViewMatrix.data[0]);
		SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
				&modelViewMatrix.data[0]);

		SampleUtils::rotatePoseMatrix(angleX, 0, 1, 0,
				&modelViewMatrix.data[0]);
		SampleUtils::rotatePoseMatrix(angleY, 1, 0, 0,
				&modelViewMatrix.data[0]);

		SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
				&modelViewMatrix.data[0] ,
				&modelViewProjection.data[0]);

		glUseProgram(shaderProgramID);

		glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
				(const GLvoid*) &planeVertices[0]);
		glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
				(const GLvoid*) &planeNormals[0]);
		glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
				(const GLvoid*) &planeTexCoords[0]);

		glEnableVertexAttribArray(vertexHandle);
		glEnableVertexAttribArray(normalHandle);
		glEnableVertexAttribArray(textureCoordHandle);

		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
		glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
				(GLfloat*)&modelViewProjection.data[0] );
		glDrawElements(GL_TRIANGLES, NUM_PLANE_OBJECT_INDEX, GL_UNSIGNED_SHORT,
				(const GLvoid*) &planeIndices[0]);

		/////////////////////////////////////////////////////////////////////////////

		SampleUtils::translatePoseMatrix(50, 50, kObjectScale,
				&modelViewMatrix2.data[0]);
		SampleUtils::scalePoseMatrix(1, 1, 1,
				&modelViewMatrix2.data[0]);

		SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
				&modelViewMatrix2.data[0] ,
				&modelViewProjection2.data[0]);

		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, tagTexture->mTextureID);
		glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
				(GLfloat*)&modelViewProjection2.data[0] );
		glDrawElements(GL_TRIANGLES, NUM_PLANE_OBJECT_INDEX, GL_UNSIGNED_SHORT,
				(const GLvoid*) &planeIndices[0]);

		jclass renderClass = env->GetObjectClass(obj);

		jmethodID screenPointMethodId = env->GetMethodID(renderClass , "screenPoint", "(FF)V");
		if (!screenPointMethodId)
		{
			LOG("Function screenPoint(float, float) not found.");
			return 0;
		}

		const QCAR::CameraCalibration& cameraCalibration =
		QCAR::CameraDevice::getInstance().getCameraCalibration();
		QCAR::Vec2F cameraPoint = QCAR::Tool::projectPoint(cameraCalibration, trackable->getPose(), QCAR::Vec3F(0, 0, 0));

		QCAR::Vec2F screenPoint = cameraPointToScreenPoint(cameraPoint, true);

		env->CallObjectMethod(obj, screenPointMethodId, screenPoint.data[0], screenPoint.data[1]);

		SampleUtils::checkGlError("ImageTargets renderFrame");
#endif

	}

	glDisable(GL_DEPTH_TEST);

#ifdef USE_OPENGL_ES_1_1        
	glDisable(GL_TEXTURE_2D);
	glDisableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_NORMAL_ARRAY);
	glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
	glDisableVertexAttribArray(vertexHandle);
	glDisableVertexAttribArray(normalHandle);
	glDisableVertexAttribArray(textureCoordHandle);
#endif

	QCAR::Renderer::getInstance().end();

	return isDetected;
}