Пример #1
0
	void 
	MultiTarget::update(ARMarkerInfo* targetInfo, int targetCount) 
	{

		if (_active == false) 
		{
			// If the target isn't active, then it can't be valid, and should not be updated either.
			_valid = false;
			return;
		}

		// Sanity check
		if (!targetInfo) 
		{
			_valid = false;
			return;
		}

		_valid = (arMultiGetTransMat(targetInfo, targetCount, m_multi) >= 0);

		if (_valid) {
			double modelView[16];
			arglCameraViewRH(m_multi->trans, modelView, 1.0f);
			updateTransform(osg::Matrix(modelView));
		}
	}
Пример #2
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
	arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
	glLoadMatrixf(p);
#else
	glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

        // Before drawing any geometry, mask out the area occupied by the cube marker itself.
        // This makes for a nicer visual presentation, but obviously only applies for this particular
        // shape of marker.
        DrawCubeMarkerMask();
        
		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
Пример #3
0
void ARTApp::display(ARUint8 *arImage, ARParamLT *arParam, ARGL_CONTEXT_SETTINGS_REF arSettings, GLMmodel *objModel, const ARdouble pattTrans[3][4])
{
	ARdouble p[16];
	ARdouble m[16];

	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

	arglDispImage(arImage, &(arParam->param), 1.0, arSettings);	// zoom = 1.0.

	// Projection transformation.
	arglCameraFrustumRH(&(arParam->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
	
	glLoadMatrixd(p);

	glMatrixMode(GL_MODELVIEW);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	glEnable(GL_LIGHTING);
	glEnable(GL_LIGHT0);
	glEnable(GL_DEPTH_TEST);

	if (objModel) 
	{
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(pattTrans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		glPushMatrix(); // Save world coordinate system.
		glTranslatef(0.0f, 0.0f, pattWidth / 2.0); // Place base of object on marker surface.

		glmDraw(objModel, GLM_SMOOTH | GLM_MATERIAL);
		glPopMatrix();    // Restore world coordinate system.
	} 

	// Any 2D overlays go here.
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glDisable(GL_LIGHTING);
	glDisable(GL_DEPTH_TEST);

	glutSwapBuffers();
}
Пример #4
0
int ActuatorARTKSM::showActuatorItens(){
	GLdouble m[16];

	glPushMatrix();
		glLoadIdentity();
		arglCameraViewRH(this->markerTrans,m,VIEW_SCALEFACTOR_1);
		glLoadMatrixd(m);
		
		// DRAW MARKER COVER
		if( cover != 0)
			switch((*cover).modelType){
			case 1: { // VRML
				iVrml* model = static_cast<iVrml*>(cover);
				(*model).draw();
				break;}
			case 2: { // Assimp
				break;}
			default: break;
		};

		// DRAW SYMBOLIC OBJECT
		if( symbol != 0)
			switch((*symbol).modelType){
			case 1: { // VRML
				iVrml* model = static_cast<iVrml*>(symbol);
				(*model).draw();
				break;}
			case 2: { // Assimp
				break;}
			default: break;
		};

		// DRAW INTERACTION POINT OBJECT
		glTranslated(this->ipTra[0],this->ipTra[1],this->ipTra[2]);
		if( this->interactionPoint != 0)
			switch((*interactionPoint).modelType){
			case 1: { // VRML
				iVrml* model = static_cast<iVrml*>(interactionPoint);
				(*model).draw();
				break;}
			case 2: { // Assimp
				break;}
			default: break;
		};

	glPopMatrix();

	return 1;

}
Пример #5
0
//========================
// モデルビュー行列の取得
//========================
void cARTK::getModelViewMat( double dMat[16] )
{
	double	dmatTemp[16];

	arglCameraViewRH( m_dPattTransMat, dmatTemp, m_dViewScaleFactor );

	for( int i = 0 ; i < 4 ; i++ )
	{
		int		idx = i << 2;
		dMat[idx + 0] =	m_dmatRotX[idx] * dmatTemp[0] + m_dmatRotX[idx + 1] * dmatTemp[4] + m_dmatRotX[idx + 2] * dmatTemp[ 8] + m_dmatRotX[idx + 3] * dmatTemp[12];
		dMat[idx + 1] =	m_dmatRotX[idx] * dmatTemp[1] + m_dmatRotX[idx + 1] * dmatTemp[5] + m_dmatRotX[idx + 2] * dmatTemp[ 9] + m_dmatRotX[idx + 3] * dmatTemp[13];
		dMat[idx + 2] =	m_dmatRotX[idx] * dmatTemp[2] + m_dmatRotX[idx + 1] * dmatTemp[6] + m_dmatRotX[idx + 2] * dmatTemp[10] + m_dmatRotX[idx + 3] * dmatTemp[14];
		dMat[idx + 3] =	m_dmatRotX[idx] * dmatTemp[3] + m_dmatRotX[idx + 1] * dmatTemp[7] + m_dmatRotX[idx + 2] * dmatTemp[11] + m_dmatRotX[idx + 3] * dmatTemp[15];
	}
}
Пример #6
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
	int i;
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
				
	if (gPatt_found) {
		// Projection transformation.
		arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
		glMatrixMode(GL_PROJECTION);
		glLoadMatrixd(p);
		glMatrixMode(GL_MODELVIEW);
		
		// Viewing transformation.
		glLoadIdentity();
		// Lighting and geometry that moves with the camera should go here.
		// (I.e. must be specified before viewing transformations.)
		//none
		
		// All other lighting and geometry goes here.
		// Calculate the camera position for each object and draw it.
		for (i = 0; i < gObjectDataCount; i++) {
			if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) {
				//fprintf(stderr, "About to draw object %i\n", i);
				arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR_4);
				glLoadMatrixd(m);

				arVrmlDraw(gObjectData[i].vrml_id);
			}			
		}
	} // gPatt_found
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
Пример #7
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
	GLdouble m[16];

	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().

	for(std::list<Piece*>::iterator it = pieces.begin(); it != pieces.end(); it++)
	{
		if ((*it)->patt_found) // gPatt_found
		{
			// Projection transformation.
			arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
			glMatrixMode(GL_PROJECTION);
			glLoadMatrixd(p);
			glMatrixMode(GL_MODELVIEW);

			// Viewing transformation.
			glLoadIdentity();
			// Lighting and geometry that moves with the camera should go here.
			// (I.e. must be specified before viewing transformations.)
			//none

			// ARToolKit supplied distance in millimetres, but I want OpenGL to work in my units.
			arglCameraViewRH((*it)->patt_trans, m, VIEW_SCALEFACTOR);
			glLoadMatrixd(m);

			// All other lighting and geometry goes here.

			(*it)->Draw();

		} 
		
		// Any 2D overlays go here.
		//none
	}

	glutSwapBuffers();
}
Пример #8
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
    GLdouble m[16];

    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

    //	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
    arVideoCapNext();
    gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().

    // Projection transformation.
    arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
    glMatrixMode(GL_PROJECTION);
    glLoadMatrixd(p);
    glMatrixMode(GL_MODELVIEW);

    // Viewing transformation.
    glLoadIdentity();
    // Lighting and geometry that moves with the camera should go here.
    // (I.e. must be specified before viewing transformations.)
    //none

    if (gPatt_found) {

        // Calculate the camera position relative to the marker.
        // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
        arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
        glLoadMatrixd(m);

        // All lighting and geometry to be drawn relative to the marker goes here.
        DrawCube();

    } // gPatt_found

    // Any 2D overlays go here.
    //none

    glutSwapBuffers();
}
Пример #9
0
static int renderScene(AppState* state)
{

    if (gPatt_found) {


        GLdouble projectionMatrix[16];
        GLdouble modelViewMatrix[16];

        // Projection transformation.
        arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, projectionMatrix);

        glMatrixMode(GL_PROJECTION);
        glLoadMatrixd(projectionMatrix);

        glMatrixMode(GL_MODELVIEW);


        // Calculate the camera position relative to the marker.
        // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
        arglCameraViewRH(gPatt_trans, modelViewMatrix, VIEW_SCALEFACTOR);
        glLoadMatrixd(modelViewMatrix);

        // All lighting and geometry to be drawn relative to the marker goes here.
//        DrawCube();


    } // gPatt_found


    {
        GLuint gle = glGetError();
        if (gle) {
            fprintf(stderr,"GL error 0x%x\n");
        }
    }

}
Пример #10
0
	void SingleTarget::update(ARMarkerInfo* targetInfo, bool useHistory)
	{

		if (_active == false) 
		{
			// If the target isn't active, then it can't be valid, and should not be updated either.
			_valid = false;
			return;
		}

		if (targetInfo == 0L) 
		{
			// Invalid target info cannot be used for update
			_valid = false;
			return;
		} 

		// Valid target info means the tracker detected and tracked the target
		_valid = true;

		// Use history-based arGetTransMatCont if flag is set and we have inital data from a call to arGetTransMat
		if (useHistory && mInitialData) 
		{
			arGetTransMatCont(targetInfo, patt_trans, patt_center, patt_width, patt_trans);
		} 
		else 
		{
			arGetTransMat(targetInfo, patt_center, patt_width, patt_trans);
			mInitialData = true; // Need to get inital data before arGetTransMatCont can be used
		}

		_confidence = targetInfo->cf;

		double modelView[16];
		arglCameraViewRH(patt_trans, modelView, 1.0f);
		updateTransform(osg::Matrix(modelView));

	}
Пример #11
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;


    int             i, j, k;
	
	// Calculate time delta.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	ms_prev = ms;
	
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
		
        // Calculate FPS every 30 frames.
        if (gCallCountMarkerDetect % 30 == 0) {
            gFPS = 30.0/arUtilTimer();
            arUtilTimerReset();
            gCallCountMarkerDetect = 0;
        }
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		

        // Run marker detection on frame
        if (threadHandle) {
            // Perform NFT tracking.
            float            err;
            int              ret;
            int              pageNo;
            
            if( detectedPage == -2 ) {
                trackingInitStart( threadHandle, gARTImage );
                detectedPage = -1;
            }
            if( detectedPage == -1 ) {
                ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo);
                if( ret == 1 ) {
                    if (pageNo >= 0 && pageNo < surfaceSetCount) {
                        ARLOGd("Detected page %d.\n", pageNo);
                        detectedPage = pageNo;
                        ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans);
                    } else {
                        ARLOGe("Detected bad page %d.\n", pageNo);
                        detectedPage = -2;
                    }
                } else if( ret < 0 ) {
                    ARLOGd("No page detected.\n");
                    detectedPage = -2;
                }
            }
            if( detectedPage >= 0 && detectedPage < surfaceSetCount) {
                if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) {
                    ARLOGd("Tracking lost.\n");
                    detectedPage = -2;
                } else {
                    ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1);
                }
            }
        } else {
            ARLOGe("Error: threadHandle\n");
            detectedPage = -2;
        }
        
        // Update markers.
        for (i = 0; i < markersNFTCount; i++) {
            markersNFT[i].validPrev = markersNFT[i].valid;
            if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) {
                markersNFT[i].valid = TRUE;
                for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k];
            }
            else markersNFT[i].valid = FALSE;
            if (markersNFT[i].valid) {
                
                // Filter the pose estimate.
                if (markersNFT[i].ftmi) {
                    if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) {
                        ARLOGe("arFilterTransMat error with marker %d.\n", i);
                    }
                }
                
                if (!markersNFT[i].validPrev) {
                    // Marker has become visible, tell any dependent objects.
                    VirtualEnvironmentHandleARMarkerAppeared(i);
                }
                
                // We have a new pose, so set that.
                arglCameraViewRH(markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR);
                // Tell any dependent objects about the update.
                VirtualEnvironmentHandleARMarkerWasUpdated(i, markersNFT[i].pose);
                
            } else {
                
                if (markersNFT[i].validPrev) {
                    // Marker has ceased to be visible, tell any dependent objects.
                    VirtualEnvironmentHandleARMarkerDisappeared(i);
                }
            }                    
        }

		// Tell GLUT the display has changed.
		glutPostRedisplay();
	} else {
		arUtilSleep(2);
	}
    
}
Пример #12
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;

    // NFT results.
    static int detectedPage = -2; // -2 Tracking not inited, -1 tracking inited OK, >= 0 tracking online on page.
    static float trackingTrans[3][4];
    

    int             i, j, k;
	
	// Find out how long since mainLoop() last ran.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
	ms_prev = ms;
	
	// Update drawing.
	DrawCubeUpdate(s_elapsed);
	
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
		
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		

        // Run marker detection on frame
        if (threadHandle) {
            // Perform NFT tracking.
            float            err;
            int              ret;
            int              pageNo;
            
            if( detectedPage == -2 ) {
                trackingInitStart( threadHandle, gARTImage );
                detectedPage = -1;
            }
            if( detectedPage == -1 ) {
                ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo);
                if( ret == 1 ) {
                    if (pageNo >= 0 && pageNo < surfaceSetCount) {
                        ARLOGd("Detected page %d.\n", pageNo);
                        detectedPage = pageNo;
                        ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans);
                    } else {
                        ARLOGe("Detected bad page %d.\n", pageNo);
                        detectedPage = -2;
                    }
                } else if( ret < 0 ) {
                    ARLOGd("No page detected.\n");
                    detectedPage = -2;
                }
            }
            if( detectedPage >= 0 && detectedPage < surfaceSetCount) {
                if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) {
                    ARLOGd("Tracking lost.\n");
                    detectedPage = -2;
                } else {
                    ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1);
                }
            }
        } else {
            ARLOGe("Error: threadHandle\n");
            detectedPage = -2;
        }
        
        // Update markers.
        for (i = 0; i < markersNFTCount; i++) {
            markersNFT[i].validPrev = markersNFT[i].valid;
            if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) {
                markersNFT[i].valid = TRUE;
                for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k];
            }
            else markersNFT[i].valid = FALSE;
            if (markersNFT[i].valid) {
                
                // Filter the pose estimate.
                if (markersNFT[i].ftmi) {
                    if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) {
                        ARLOGe("arFilterTransMat error with marker %d.\n", i);
                    }
                }
                
                if (!markersNFT[i].validPrev) {
                    // Marker has become visible, tell any dependent objects.
                    // --->
                }
                
                // We have a new pose, so set that.
                arglCameraViewRH(markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR);
                // Tell any dependent objects about the update.
                // --->
                
            } else {
                
                if (markersNFT[i].validPrev) {
                    // Marker has ceased to be visible, tell any dependent objects.
                    // --->
                }
            }                    
        }

		// Tell GLUT the display has changed.
		glutPostRedisplay();
	}
}
Пример #13
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
    double zoom;
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
        
        // Draw the movie frame.
        if (gMovieImage) {
            glPushMatrix();
            glRotatef(90.0f, 1.0f, 0.0f, 0.0f); // Place movie in x-z plane instead of x-y plane.
            glTranslated(-gPatt_width*0.5, 0.0f, 0.0f); // Movie origin is at lower-left of movie frame. Place this at the edge of the marker .
            zoom = 1.0/gMovieCparam.xsize * gPatt_width; // Scale the movie frame so that it is the same width as the marker.
            arglPixelBufferDataUpload(gMovieArglSettings, gMovieImage);
            arglDispImageStateful(gMovieArglSettings); // Show the movie frame.
            glPopMatrix();
        }
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
Пример #14
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    int i;
    GLdouble p[16];
    GLdouble m[16];

    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

    arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
    gARTImage = NULL; // Invalidate image data.

    // Projection transformation.
    arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
    glMatrixMode(GL_PROJECTION);
    glLoadMatrixd(p);
    glMatrixMode(GL_MODELVIEW);

    glEnable(GL_DEPTH_TEST);

    // Viewing transformation.
    glLoadIdentity();
    // Lighting and geometry that moves with the camera should go here.
    // (I.e. must be specified before viewing transformations.)
    //none

    for (i = 0; i < gObjectDataCount; i++) {

        if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) {

            // Calculate the camera position for the object and draw it.
            // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
            arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR);
            glLoadMatrixd(m);

            // All lighting and geometry to be drawn relative to the marker goes here.
            //ARLOGe("About to draw object %i\n", i);
            arVrmlDraw(gObjectData[i].vrml_id);
        }
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }

    glutSwapBuffers();
}
Пример #15
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
				
	// Projection transformation.
	arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixd(p);
	glMatrixMode(GL_MODELVIEW);
		
	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	//Read leap dump
	int koo = read_leap_dump();

	float dx = px1 - px0;
	float dy = py1 - py0;
	float dz = pz1 - pz0;

	printf("dx %f\n", dx);

	px0 = px1;
	py0 = py1;
	pz0 = pz1;

	float stepX = 100.0f / glutGet(GLUT_WINDOW_X);
	float stepY = 100.0f/ glutGet(GLUT_WINDOW_Y);

	PX = stepX*dx + PX;
	PY = stepY*dy + PY;
	PZ = stepX*dz + PZ;

	if (snapToMarker && gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawItem();
	
	} // gPatt_found
	else {
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawItem();

	}
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
Пример #16
0
void CWorld::SortByDepth(CDesktop* desktop)
{
	int i, degree = objectList.GetDegree();

	if(!degree) return;

	CComponent3D* object;
	double m[16];
	double* depthList;
	CVirtualWebCam* vWebCam;

	depthList = new double[degree];
	int* idList = new int[degree];

	glMatrixMode(GL_MODELVIEW);
	glPushMatrix();
		if(CARDeskTop::mode == CARDeskTop::kAR) {
			arglCameraViewRH(desktop->trans, m, VIEW_SCALEFACTOR);
			glLoadMatrixd(m);
			glMultMatrixd(matrix);
		} else if(CARDeskTop::mode == CARDeskTop::kVR) {
			glLoadMatrixd(matrix);
			vWebCam = (CVirtualWebCam*) CARDeskTop::arDeskTop->window->webCamList[0];
		}

		objectList.GoToFirst();
		for(i = 0; i < degree; i++) {
			object = (CComponent3D*) objectList.GetKey();
			idList[i] = object->GetID();

			glPushMatrix();
				glMultMatrixd(object->matrix);
				glGetDoublev(GL_MODELVIEW_MATRIX, m);
				if(CARDeskTop::mode == CARDeskTop::kAR) {
					depthList[i] = m[12] * m[12] + m[13] * m[13] + m[14] * m[14];
				} else if(CARDeskTop::mode == CARDeskTop::kVR) {
					depthList[i] = -(vWebCam->newPos[0] * m[12] + vWebCam->newPos[1] * m[13] + vWebCam->newPos[2] * m[14]);
				//	depthList[i] = -m[14];
				}
			glPopMatrix();

			objectList.GoToNext();
		}
	glPopMatrix();


	BubbleSortd(depthList, idList, degree);

	CList list;
	int j, degree2;

	objectList.MoveAllLinkTo(&list);

	for(i = 0; i < degree; i++) {
		list.GoToFirst();
		degree2 = list.GetDegree();
		for(j = 0; j < degree2; j++) {
			object = (CComponent3D*) list.GetKey();
			if(idList[i] == object->GetID()) {
				objectList.Push(list.Remove(object));
				break;
			}
			list.GoToNext();
		}
	}

	delete depthList;
	delete idList;
}
Пример #17
0
static void DrawCube(void)
{
	if (guss == 99){

	}
	else if (gCharacter[0].patt_found){
	

		glPushMatrix();
		//Applying // coordinate transformation matrix
		arglCameraViewRH(gCharacter[0].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();
		glScalef(7.8, 4.8, 4.8);

		//glTranslated(3, 0, -30);
		//glRotated(90, 1, 0, 0);
		//sky();
		//glTranslated(-3, 0, 20);
		//glScalef(xx, yy, z);
		//drawElephant2();
		if (wire){
			OnDraw();
		}
else
		OnDraw1();
		glPopMatrix();

		glPopMatrix();
		if (skyflag){
			
			
		}
		

	//FSOUND_Stream_Play(0, g_mp3_stream1);
		//PlaySound(data1, NULL, SND_ASYNC | SND_NOSTOP );*/
	}
	else if (gCharacter[4].patt_found){


		glPushMatrix();
		//Applying // coordinate transformation matrix
		arglCameraViewRH(gCharacter[4].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();
		
		//sky();
		glTranslated(-3, 0, 20);
	    glScalef(xx, yy, z);
		//drawElephant2();
	

		FSOUND_Stream_Play(0, g_mp3_stream1);
		//PlaySound(data1, NULL, SND_ASYNC | SND_NOSTOP );*/
	}
	
	else if(gCharacter[1].patt_found){

		glPushMatrix();
		
		arglCameraViewRH(gCharacter[1].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();
		glLightModelfv(GL_LIGHT_MODEL_AMBIENT, LIGHT_MODEL_AMBIENT);
		glEnable(GL_LIGHTING);
		//gluPerspective(60.0, ASPECT_RATIO, 0.2, 100.0);
		gluLookAt(LOOK_AT_POSITION[0] + ViewerDistance * sin(viewerZenith) * sin(viewerAzimuth),
			LOOK_AT_POSITION[1] + ViewerDistance * cos(viewerZenith),
			LOOK_AT_POSITION[2] + ViewerDistance * sin(viewerZenith) * cos(viewerAzimuth),
			LOOK_AT_POSITION[0], LOOK_AT_POSITION[1], LOOK_AT_POSITION[2],
			0.0, 1.0, 0.20);
		glScalef(xx, yy, z);
		glShadeModel(GL_SMOOTH);
		glEnable(GL_DEPTH_TEST);
		glDepthFunc(GL_LEQUAL);
		glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);	
		glEnable(GL_NORMALIZE);
		glCullFace(GL_BACK);
		glEnable(GL_CULL_FACE);

		glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

		FSOUND_Stream_Play(0, g_mp3_stream2);
		
	
		glRotated(90, 1, 0, 0);
		glScaled(2, 2, 2);
		UpdateLight();
		drawEarthAndMoon();
	
		
		
		drawAllPlanets();
		drawSaturnRing();
		drawSun();
		


		if (blendFlag){ 
			glEnable(GL_BLEND);
		glDepthMask(GL_FALSE);
			glBlendFunc(GL_SRC_COLOR, GL_ONE);
		}

		if (particleFlag)
		drawAllParticles();
		
		



		glDepthMask(GL_TRUE);
		glDisable(GL_BLEND);
		glDisable(GL_LIGHTING);
		
		//drawElephant();
	
		
	
	
		//glTranslated(0, 0, -2);


		glPopMatrix();
		
		glPopMatrix();
		//OnDraw2();
	
	}
	else if (gCharacter[3].patt_found)
	{
		glPushMatrix();
	
		arglCameraViewRH(gCharacter[3].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();

		glLightModelfv(GL_LIGHT_MODEL_AMBIENT, LIGHT_MODEL_AMBIENT);
		glEnable(GL_LIGHTING);
		//gluPerspective(60.0, ASPECT_RATIO, 0.2, 100.0);
		gluLookAt(LOOK_AT_POSITION[0] + ViewerDistance * sin(viewerZenith) * sin(viewerAzimuth),
			LOOK_AT_POSITION[1] + ViewerDistance * cos(viewerZenith),
			LOOK_AT_POSITION[2] + ViewerDistance * sin(viewerZenith) * cos(viewerAzimuth),
			LOOK_AT_POSITION[0], LOOK_AT_POSITION[1], LOOK_AT_POSITION[2],
			0.0, 1.0, 0.020);
		glShadeModel(GL_SMOOTH);
		glEnable(GL_DEPTH_TEST);
		glDepthFunc(GL_LEQUAL);
		glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
		glEnable(GL_NORMALIZE);
		glCullFace(GL_BACK);
	glEnable(GL_CULL_FACE);

		glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

		glTranslated(0, 0, 0);
		glScalef(xx, yy, z);
		UpdateLight();
		glRotated(90, 1, 0, 0);
		drawOne();







		glDepthMask(GL_TRUE);
		glDisable(GL_BLEND);
		glDisable(GL_LIGHTING);
		guss1 = 1;
		glPopMatrix();
		glPopMatrix();
	}
	/* if (gCharacter[4].patt_found){

		glPushMatrix();
	
		arglCameraViewRH(gCharacter[4].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();


		glScaled(xx, yy, z);
		glColor3d(1, 1, 1);
		glBegin(GL_QUADS);

		glVertex2f(-3, 3);


		glVertex2f(-3, -3);


		glVertex2f(3, -3);


		glVertex2f(3, 3);
		glEnd();
		glPopMatrix();
		glPushMatrix();
		glTranslated(0, 0, 2);
		glScaled(25, 25, 25);
		glBegin(GL_TRIANGLES);

		glColor3f(0.0f, 0.0f, 0.0f);
		glVertex3f(0.0f, 1.0f, 0.0f);

		glVertex3f(-1.0f, -1.0f, 0.0f);

		glVertex3f(1.0f, -1.0f, 0.0f);
		glEnd();
		glPopMatrix();
		glPopMatrix();
		//printf("found");
	}
	 if (gCharacter[5].patt_found){

		glPushMatrix();
	
		arglCameraViewRH(gCharacter[5].trans, m, gViewScaleFactor);
		glLoadMatrixd(m);

		glPushMatrix();


		glScaled(xx, yy, z);
		glColor3d(1, 1, 1);
		glBegin(GL_QUADS);

		glVertex2f(-3, 3);


		glVertex2f(-3, -3);


		glVertex2f(3, -3);


		glVertex2f(3, 3);
		glEnd();
		glPopMatrix();
		glPushMatrix();
		glTranslated(0, 0, 2);
		glScaled(25, 25, 25);
		glBegin(GL_TRIANGLES);

		glColor3f(0.0f, 0.0f, 0.0f);
		glVertex3f(0.0f, 1.0f, 0.0f);

		glVertex3f(-1.0f, -1.0f, 0.0f);

		glVertex3f(1.0f, -1.0f, 0.0f);
		glEnd();
		glPopMatrix();
		glPopMatrix();
		//printf("found");
	
	}*/else
	 if (gCharacter[5].patt_found){
		 glPushMatrix();

		 arglCameraViewRH(gCharacter[6].trans, m, gViewScaleFactor);
		 glLoadMatrixd(m);

		 glPushMatrix();
		 glScalef(13, 13, 13);
		 glColor3f(1, 1, 1);
		 glTranslated(-10, 0, 0);
		 cube();
		 FSOUND_Stream_Play(0, g_mp3_stream16);
		 glPopMatrix();

		 glPopMatrix();
	 }
		/* if (gCharacter[7].patt_found){
			 glPushMatrix();

			 arglCameraViewRH(gCharacter[7].trans, m, gViewScaleFactor);
			 glLoadMatrixd(m);

			 glPushMatrix();


			 glScaled(xx, yy, z);
			 glColor3d(1, 1, 1);
			 glBegin(GL_QUADS);

			 glVertex2f(-3, 3);


			 glVertex2f(-3, -3);


			 glVertex2f(3, -3);


			 glVertex2f(3, 3);
			 glEnd();
			 glPopMatrix();
			 if (pause){
				 glPushMatrix();
				 glTranslated(0, 0, 2);
				 glScaled(25, 25, 25);
				 glBegin(GL_TRIANGLES);

				 glColor3f(0.0f, 0.0f, 0.0f);
				 glVertex3f(0.0f, 1.0f, 0.0f);

				 glVertex3f(-1.0f, -1.0f, 0.0f);

				 glVertex3f(1.0f, -1.0f, 0.0f);
				 glEnd();
				 glPopMatrix();
			 }
			 else
			 if (!pause){
				 glPushMatrix();
				 glScaled(8, 8, 8);
				 glTranslated(0, -1
					 , 3);
				 glRotated(90, 0, 0, 1);
				 glColor3d(0, 1, 1);
				 glBegin(GL_QUADS);

				 glVertex2f(-0.6, 3);


				 glVertex2f(-0.6, -3);


				 glVertex2f(0.6, -3);


				 glVertex2f(0.6, 3);
				 glEnd();
				 glPopMatrix();
				 glPushMatrix();
				 glScaled(8, 8, 8);
				 glTranslated(0, 1
					 , 3);
				 glRotated(90, 0, 0, 1);
				 glColor3d(0, 1, 1);
				 glBegin(GL_QUADS);

				 glVertex2f(-0.6, 3);


				 glVertex2f(-0.6, -3);


				 glVertex2f(0.6, -3);


				 glVertex2f(0.6, 3);
				 glEnd();
				 glPopMatrix();

			 }

			 glPopMatrix();
		 }*/
	 





}
Пример #18
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
    ARdouble m[16];

#ifdef ARDOUBLE_IS_FLOAT
    GLdouble p0[16];
    GLdouble m0[16];
#endif
    int                 i, j, k;
    GLfloat             w, bw, bh, vertices[6][2];
    GLubyte             pixels[300];
    char                text[256];
    GLdouble            winX, winY, winZ;
    int                 showMErr[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MX[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MY[CHECK_ID_MULTIMARKERS_MAX];
    int                 pattDetectMode;
    AR_MATRIX_CODE_TYPE matrixCodeType;


    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);     // Clear the buffers for new frame.

    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);

    if (gMultiConfigCount)
    {
        arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
        glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(p);
#else
        glLoadMatrixd(p);
#endif
        glMatrixMode(GL_MODELVIEW);
        glEnable(GL_DEPTH_TEST);

        // If we have multi-configs, show their origin onscreen.
        for (k = 0; k < gMultiConfigCount; k++)
        {
            showMErr[k] = FALSE;
            if (gMultiConfigs[k]->prevF != 0)
            {
                arglCameraViewRH((const ARdouble (*)[4])gMultiConfigs[k]->trans, m, 1.0);
#ifdef ARDOUBLE_IS_FLOAT
                glLoadMatrixf(m);
#else
                glLoadMatrixd(m);
#endif
                drawAxes();
#ifdef ARDOUBLE_IS_FLOAT
                for (i = 0; i < 16; i++)
                    m0[i] = (GLdouble)m[i];

                for (i = 0; i < 16; i++)
                    p0[i] = (GLdouble)p[i];

                if (gluProject(0, 0, 0, m0, p0, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#else
                if (gluProject(0, 0, 0, m, p, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#endif
                {
                    showMErr[k] = TRUE;
                    MX[k]       = winX; MY[k] = winY;
                }
            }
        } // for k
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    arGetPatternDetectionMode(gARHandle, &pattDetectMode);
    arGetMatrixCodeType(gARHandle, &matrixCodeType);

    // For all markers, draw onscreen position.
    // Colour based on cutoffPhase.
    glLoadIdentity();
    glVertexPointer(2, GL_FLOAT, 0, vertices);
    glEnableClientState(GL_VERTEX_ARRAY);
    glLineWidth(2.0f);

    for (j = 0; j < gARHandle->marker_num; j++)
    {
        glColor3ubv(cutoffPhaseColours[gARHandle->markerInfo[j].cutoffPhase].colour);

        for (i = 0; i < 5; i++)
        {
            int dir = gARHandle->markerInfo[j].dir;
            vertices[i][0] = (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][0] * (float)windowWidth / (float)gARHandle->xsize;
            vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][1]) * (float)windowHeight / (float)gARHandle->ysize;
        }

        vertices[i][0] = (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize;
        vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize;
        glDrawArrays(GL_LINE_STRIP, 0, 6);
        // For markers that have been identified, draw the ID number.
        if (gARHandle->markerInfo[j].id >= 0)
        {
            glColor3ub(255, 0, 0);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID && (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX))
                snprintf(text, sizeof(text), "%llu (err=%d)", gARHandle->markerInfo[j].globalID, gARHandle->markerInfo[j].errorCorrected);
            else
                snprintf(text, sizeof(text), "%d", gARHandle->markerInfo[j].id);

            print(text, (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize, ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize, 0, 0);
        }
    }

    glDisableClientState(GL_VERTEX_ARRAY);

    // For matrix mode, draw the pattern image of the largest marker.
    if (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX)
    {
        int area = 0, biggestMarker = -1;

        for (j = 0; j < gARHandle->marker_num; j++)
            if (gARHandle->markerInfo[j].area > area)
            {
                area          = gARHandle->markerInfo[j].area;
                biggestMarker = j;
            }

        if (area >= AR_AREA_MIN)
        {
            int      imageProcMode;
            ARdouble pattRatio;
            ARUint8  ext_patt[AR_PATT_SIZE2_MAX * AR_PATT_SIZE2_MAX * 3]; // Holds unwarped pattern extracted from image.
            int      size;
            int      zoom = 4;
            ARdouble vertexUpright[4][2];

            // Reorder vertices based on dir.
            for (i = 0; i < 4; i++)
            {
                int dir = gARHandle->markerInfo[biggestMarker].dir;
                vertexUpright[i][0] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][0];
                vertexUpright[i][1] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][1];
            }

            arGetImageProcMode(gARHandle, &imageProcMode);
            arGetPattRatio(gARHandle, &pattRatio);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID)
            {
                size = 14;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, (ARdouble)14 / (ARdouble)(14 + 2), ext_patt);
            }
            else
            {
                size = matrixCodeType & AR_MATRIX_CODE_TYPE_SIZE_MASK;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, pattRatio, ext_patt);
            }

            glRasterPos2f((float)(windowWidth - size * zoom) - 4.0f, (float)(size * zoom) + 4.0f);
            glPixelZoom((float)zoom, (float)-zoom);
            glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
            glDrawPixels(size, size, GL_LUMINANCE, GL_UNSIGNED_BYTE, ext_patt);
            glPixelZoom(1.0f, 1.0f);
        }
    }


    // Draw error value for multimarker pose.
    for (k = 0; k < gMultiConfigCount; k++)
    {
        if (showMErr[k])
        {
            snprintf(text, sizeof(text), "err=%0.3f", gMultiErrs[k]);
            print(text, MX[k], MY[k], 0, 0);
        }
    }

    //
    // Draw help text and mode.
    //
    glLoadIdentity();
    if (gShowMode)
    {
        printMode();
    }

    if (gShowHelp)
    {
        if (gShowHelp == 1)
        {
            printHelpKeys();
        }
        else if (gShowHelp == 2)
        {
            bw = 0.0f;

            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char*)arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase]);
                if (w > bw)
                    bw = w;
            }

            bw += 12.0f; // Space for color block.
            bh  = AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT * 10.0f /* character height */ + (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1) * 2.0f /* line spacing */;
            drawBackground(bw, bh, 2.0f, 2.0f);

            // Draw the colour block and text, line by line.
            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                for (j = 0; j < 300; j += 3)
                {
                    pixels[j]     = cutoffPhaseColours[i].colour[0];
                    pixels[j + 1] = cutoffPhaseColours[i].colour[1];
                    pixels[j + 2] = cutoffPhaseColours[i].colour[2];
                }

                glRasterPos2f(2.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f);
                glPixelZoom(1.0f, 1.0f);
                glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
                glDrawPixels(10, 10, GL_RGB, GL_UNSIGNED_BYTE, pixels);
                print(arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase], 14.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f, 0, 0);
            }
        }
    }

    glutSwapBuffers();
}
Пример #19
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;
    ARMarkerInfo* markerInfo;
    int markerNum;
	ARdouble err;
    int             i, j, k;
	
	// Calculate time delta.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	ms_prev = ms;
	
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
		
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARHandle, gARTImage) < 0) {
			exit(-1);
		}
		
		// Get detected markers
		markerInfo = arGetMarker(gARHandle);
		markerNum = arGetMarkerNum(gARHandle);
	
		// Update markers.
		for (i = 0; i < markersSquareCount; i++) {
			markersSquare[i].validPrev = markersSquare[i].valid;
            
            
			// Check through the marker_info array for highest confidence
			// visible marker matching our preferred pattern.
			k = -1;
			if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) {
				for (j = 0; j < markerNum; j++) {
					if (markersSquare[i].patt_id == markerInfo[j].idPatt) {
						if (k == -1) {
							if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
						} else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected.
					}
				}
				if (k != -1) {
					markerInfo[k].id = markerInfo[k].idPatt;
					markerInfo[k].cf = markerInfo[k].cfPatt;
					markerInfo[k].dir = markerInfo[k].dirPatt;
				}
			} else {
				for (j = 0; j < markerNum; j++) {
					if (markersSquare[i].patt_id == markerInfo[j].idMatrix) {
						if (k == -1) {
							if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
						} else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected.
					}
				}
				if (k != -1) {
					markerInfo[k].id = markerInfo[k].idMatrix;
					markerInfo[k].cf = markerInfo[k].cfMatrix;
					markerInfo[k].dir = markerInfo[k].dirMatrix;
				}
			}

			if (k != -1) {
				markersSquare[i].valid = TRUE;
				ARLOGd("Marker %d matched pattern %d.\n", i, markerInfo[k].id);
				// Get the transformation between the marker and the real camera into trans.
				if (markersSquare[i].validPrev && useContPoseEstimation) {
					err = arGetTransMatSquareCont(gAR3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans);
				} else {
					err = arGetTransMatSquare(gAR3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans);
				}
			} else {
				markersSquare[i].valid = FALSE;
			}
	   
			if (markersSquare[i].valid) {
			
				// Filter the pose estimate.
				if (markersSquare[i].ftmi) {
					if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) {
						ARLOGe("arFilterTransMat error with marker %d.\n", i);
					}
				}
			
				if (!markersSquare[i].validPrev) {
					// Marker has become visible, tell any dependent objects.
                    VirtualEnvironmentHandleARMarkerAppeared(i);
				}
	
				// We have a new pose, so set that.
				arglCameraViewRH((const ARdouble (*)[4])markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/);
				// Tell any dependent objects about the update.
				VirtualEnvironmentHandleARMarkerWasUpdated(i, markersSquare[i].pose);
			
			} else {
			
				if (markersSquare[i].validPrev) {
					// Marker has ceased to be visible, tell any dependent objects.
					VirtualEnvironmentHandleARMarkerDisappeared(i);
				}
			}                    
		}
		
		// Tell GLUT the display has changed.
		glutPostRedisplay();
	} else {
		arUtilSleep(2);
	}
    
}
Пример #20
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}