コード例 #1
0
ファイル: mk_patt.c プロジェクト: SeanXiao1988/artoolkit
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().

	if (gTarget != NULL) {
		glDisable(GL_DEPTH_TEST);
		glDisable(GL_LIGHTING);
		glDisable(GL_TEXTURE_2D);
		beginOrtho2D(gARTCparam.xsize, gARTCparam.ysize);
        glLineWidth(2.0f);
        glColor3d(0.0, 1.0, 0.0);
        lineSeg(gTarget->vertex[0][0], gTarget->vertex[0][1],
				gTarget->vertex[1][0], gTarget->vertex[1][1], gArglSettings, gARTCparam, 1.0);
        lineSeg(gTarget->vertex[3][0], gTarget->vertex[3][1],
				gTarget->vertex[0][0], gTarget->vertex[0][1], gArglSettings, gARTCparam, 1.0);
        glColor3d(1.0, 0.0, 0.0);
        lineSeg(gTarget->vertex[1][0], gTarget->vertex[1][1],
				gTarget->vertex[2][0], gTarget->vertex[2][1], gArglSettings, gARTCparam, 1.0);
        lineSeg(gTarget->vertex[2][0], gTarget->vertex[2][1],
				gTarget->vertex[3][0], gTarget->vertex[3][1], gArglSettings, gARTCparam, 1.0);
		endOrtho2D();
    }

	glutSwapBuffers();
}
コード例 #2
0
ファイル: nftSimple.c プロジェクト: GitHubGenLi/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    int i;
    
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
	arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
	glLoadMatrixf(cameraLens);
#else
	glLoadMatrixd(cameraLens);
#endif
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glEnable(GL_DEPTH_TEST);

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before marker pose transform.)
    // --->
    
    for (i = 0; i < markersNFTCount; i++) {
        
        if (markersNFT[i].valid) {
        
#ifdef ARDOUBLE_IS_FLOAT
            glLoadMatrixf(markersNFT[i].pose.T);
#else
            glLoadMatrixd(markersNFT[i].pose.T);
#endif
            // All lighting and geometry to be drawn relative to the marker goes here.
            // --->
            DrawCube();
        }
    }
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)gWindowW, 0, (GLdouble)gWindowH, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    // Add your own 2D overlays here.
    // --->
    
	glutSwapBuffers();
}
コード例 #3
0
ファイル: ARTK.cpp プロジェクト: lazypanda/ARTK-Projects
//==============
// 描画時の処理
//==============
void cARTK::display( void )
{
	// カメラ画像を描画
	arglDispImage( m_pARTImage, &m_sCameraParam, 1.0, m_pArglSettings );	// zoom = 1.0.

	// 次のカメラ画像のキャプチャを開始
	arVideoCapNext();

	m_pARTImage = NULL; // arVideoCapNext()の呼出し後はイメージデータポインタは無効になる
}
コード例 #4
0
ファイル: multiCube.c プロジェクト: GitHubGenLi/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
	arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
	glLoadMatrixf(p);
#else
	glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

        // Before drawing any geometry, mask out the area occupied by the cube marker itself.
        // This makes for a nicer visual presentation, but obviously only applies for this particular
        // shape of marker.
        DrawCubeMarkerMask();
        
		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
コード例 #5
0
ファイル: DIY.cpp プロジェクト: imbinwang/simpleARDIY
void ARTApp::display(ARUint8 *arImage, ARParamLT *arParam, ARGL_CONTEXT_SETTINGS_REF arSettings, GLMmodel *objModel, const ARdouble pattTrans[3][4])
{
	ARdouble p[16];
	ARdouble m[16];

	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

	arglDispImage(arImage, &(arParam->param), 1.0, arSettings);	// zoom = 1.0.

	// Projection transformation.
	arglCameraFrustumRH(&(arParam->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
	
	glLoadMatrixd(p);

	glMatrixMode(GL_MODELVIEW);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	glEnable(GL_LIGHTING);
	glEnable(GL_LIGHT0);
	glEnable(GL_DEPTH_TEST);

	if (objModel) 
	{
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(pattTrans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		glPushMatrix(); // Save world coordinate system.
		glTranslatef(0.0f, 0.0f, pattWidth / 2.0); // Place base of object on marker surface.

		glmDraw(objModel, GLM_SMOOTH | GLM_MATERIAL);
		glPopMatrix();    // Restore world coordinate system.
	} 

	// Any 2D overlays go here.
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glDisable(GL_LIGHTING);
	glDisable(GL_DEPTH_TEST);

	glutSwapBuffers();
}
コード例 #6
0
ファイル: CWebCam.cpp プロジェクト: zphilip/VirtualTS
void CWebCam::Display(ARGL_CONTEXT_SETTINGS_REF arglSettings)
{
	if(!ARTImage) return;

	if(dispImage)
		arglDispImage(ARTImage, &ARTCparam, 1.0, arglSettings);

	if(pattFound) {
		glMatrixMode(GL_PROJECTION);
		glLoadMatrixd(projectionMat);

		this->Draw();
	}

	ar2VideoCapNext(ARTVideo);

}
コード例 #7
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
	int i;
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
				
	if (gPatt_found) {
		// Projection transformation.
		arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
		glMatrixMode(GL_PROJECTION);
		glLoadMatrixd(p);
		glMatrixMode(GL_MODELVIEW);
		
		// Viewing transformation.
		glLoadIdentity();
		// Lighting and geometry that moves with the camera should go here.
		// (I.e. must be specified before viewing transformations.)
		//none
		
		// All other lighting and geometry goes here.
		// Calculate the camera position for each object and draw it.
		for (i = 0; i < gObjectDataCount; i++) {
			if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) {
				//fprintf(stderr, "About to draw object %i\n", i);
				arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR_4);
				glLoadMatrixd(m);

				arVrmlDraw(gObjectData[i].vrml_id);
			}			
		}
	} // gPatt_found
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
コード例 #8
0
ファイル: Main.cpp プロジェクト: marceloprates/ARShogi
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
	GLdouble m[16];

	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().

	for(std::list<Piece*>::iterator it = pieces.begin(); it != pieces.end(); it++)
	{
		if ((*it)->patt_found) // gPatt_found
		{
			// Projection transformation.
			arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
			glMatrixMode(GL_PROJECTION);
			glLoadMatrixd(p);
			glMatrixMode(GL_MODELVIEW);

			// Viewing transformation.
			glLoadIdentity();
			// Lighting and geometry that moves with the camera should go here.
			// (I.e. must be specified before viewing transformations.)
			//none

			// ARToolKit supplied distance in millimetres, but I want OpenGL to work in my units.
			arglCameraViewRH((*it)->patt_trans, m, VIEW_SCALEFACTOR);
			glLoadMatrixd(m);

			// All other lighting and geometry goes here.

			(*it)->Draw();

		} 
		
		// Any 2D overlays go here.
		//none
	}

	glutSwapBuffers();
}
コード例 #9
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
				
	// Projection transformation.
	arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixd(p);
	glMatrixMode(GL_MODELVIEW);
		
	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
コード例 #10
0
ファイル: nftBook.cpp プロジェクト: AdJion/artoolkit5
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeDrawFrame(JNIEnv* env, jobject obj))
{
	float width, height;
    
    if (!videoInited) {
#ifdef DEBUG
        LOGI("nativeDrawFrame !VIDEO\n");
#endif        
        return; // No point in trying to draw until video is inited.
    }
#ifdef DEBUG
    LOGI("nativeDrawFrame\n");
#endif        
    if (!gARViewInited) {
        if (!initARView()) return;
    }
    if (gARViewLayoutRequired) layoutARView();
    
    // Upload new video frame if required.
    if (videoFrameNeedsPixelBufferDataUpload) {
        arglPixelBufferDataUploadBiPlanar(gArglSettings, gVideoFrame, gVideoFrame + videoWidth*videoHeight);
        videoFrameNeedsPixelBufferDataUpload = false;
    }
    
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    // Display the current frame
    arglDispImage(gArglSettings);
    
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf(cameraLens);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glStateCacheEnableDepthTest();

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->
    
    VirtualEnvironmentHandleARViewDrawPreCamera();
    
    if (cameraPoseValid) {
        
        glMultMatrixf(cameraPose);
        
        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
        VirtualEnvironmentHandleARViewDrawPostCamera();
    }
        
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
	width = (float)viewPort[viewPortIndexWidth];
	height = (float)viewPort[viewPortIndexHeight];
	glOrthof(0.0f, width, 0.0f, height, -1.0f, 1.0f);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glStateCacheDisableDepthTest();

    // Add your own 2D overlays here.
    // --->
    
    VirtualEnvironmentHandleARViewDrawOverlay();
    
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();

#ifdef DEBUG
    // Example of 2D drawing. It just draws a white border line. Change the 0 to 1 to enable.
    const GLfloat square_vertices [4][2] = { {0.5f, 0.5f}, {0.5f, height - 0.5f}, {width - 0.5f, height - 0.5f}, {width - 0.5f, 0.5f} };
    glStateCacheDisableLighting();
    glStateCacheDisableTex2D();
    glVertexPointer(2, GL_FLOAT, 0, square_vertices);
    glStateCacheEnableClientStateVertexArray();
    glColor4ub(255, 255, 255, 255);
    glDrawArrays(GL_LINE_LOOP, 0, 4);
#endif
}
コード例 #11
0
ファイル: simpleLite.c プロジェクト: OpenSorceress/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
コード例 #12
0
ファイル: ARNative.cpp プロジェクト: AadityaDev/artoolkit5
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeDrawFrame(JNIEnv* env, jobject obj))
{
	float width, height;
    float viewProjection[16];

    if (!videoInited) {
#ifdef DEBUG
        LOGI("nativeDrawFrame !VIDEO\n");
#endif        
        return; // No point in trying to draw until video is inited.
    }
#ifdef DEBUG
    LOGI("nativeDrawFrame\n");
#endif        
    if (!gARViewInited) {
        if (!initARView()) return;
    }
    if (gARViewLayoutRequired) layoutARView();
    
    // Upload new video frame if required.
    if (videoFrameNeedsPixelBufferDataUpload) {
        arglPixelBufferDataUploadBiPlanar(gArglSettings, gVideoFrame, gVideoFrame + videoWidth*videoHeight);
        videoFrameNeedsPixelBufferDataUpload = false;
    }
    
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    // Display the current frame
    arglDispImage(gArglSettings);
    
    if (!program) {
        GLuint vertShader = 0, fragShader = 0;
        // A simple shader pair which accepts just a vertex position and colour, no lighting.
        const char vertShaderString[] =
            "attribute vec4 position;\n"
            "attribute vec4 colour;\n"
            "uniform mat4 modelViewProjectionMatrix;\n"
            "varying vec4 colourVarying;\n"
            "void main()\n"
            "{\n"
                "gl_Position = modelViewProjectionMatrix * position;\n"
                "colourVarying = colour;\n"
            "}\n";
        const char fragShaderString[] =
            "#ifdef GL_ES\n"
            "precision mediump float;\n"
            "#endif\n"
            "varying vec4 colourVarying;\n"
            "void main()\n"
            "{\n"
                "gl_FragColor = colourVarying;\n"
            "}\n";

        if (program) arglGLDestroyShaders(0, 0, program);
        program = glCreateProgram();
        if (!program) {
            ARLOGe("drawCube: Error creating shader program.\n");
            arglGLDestroyShaders(vertShader, fragShader, program);
            return;
        }

        if (!arglGLCompileShaderFromString(&vertShader, GL_VERTEX_SHADER, vertShaderString)) {
            ARLOGe("drawCube: Error compiling vertex shader.\n");
            arglGLDestroyShaders(vertShader, fragShader, program);
            return;
        }
        if (!arglGLCompileShaderFromString(&fragShader, GL_FRAGMENT_SHADER, fragShaderString)) {
            ARLOGe("drawCube: Error compiling fragment shader.\n");
            arglGLDestroyShaders(vertShader, fragShader, program);
            return;
        }
        glAttachShader(program, vertShader);
        glAttachShader(program, fragShader);

        glBindAttribLocation(program, ATTRIBUTE_VERTEX, "position");
        glBindAttribLocation(program, ATTRIBUTE_COLOUR, "colour");
        if (!arglGLLinkProgram(program)) {
            ARLOGe("drawCube: Error linking shader program.\n");
            arglGLDestroyShaders(vertShader, fragShader, program);
            return;
        }
        arglGLDestroyShaders(vertShader, fragShader, 0); // After linking, shader objects can be deleted.

        // Retrieve linked uniform locations.
        uniforms[UNIFORM_MODELVIEW_PROJECTION_MATRIX] = glGetUniformLocation(program, "modelViewProjectionMatrix");
    }
    glUseProgram(program);

    // Set up 3D mode.
    mtxLoadIdentityf(viewProjection);
    mtxMultMatrixf(viewProjection, cameraLens);
    glStateCacheEnableDepthTest();

    // Set any initial per-frame GL state you require here.
    // --->

    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->

    // Draw an object on all valid markers.
    for (int i = 0; i < markersSquareCount; i++) {
        if (markersSquare[i].valid) {
        	float viewProjection2[16];
        	mtxLoadMatrixf(viewProjection2, viewProjection);
            mtxMultMatrixf(viewProjection2, markersSquare[i].pose.T);
            drawCube(viewProjection2, 40.0f, 0.0f, 0.0f, 20.0f);
        }
    }

    if (cameraPoseValid) {

        mtxMultMatrixf(viewProjection, cameraPose);

        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
    }

    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();
    
    // Set up 2D mode.
    mtxLoadIdentityf(viewProjection);
	width = (float)viewPort[viewPortIndexWidth];
	height = (float)viewPort[viewPortIndexHeight];
	mtxOrthof(viewProjection, 0.0f, width, 0.0f, height, -1.0f, 1.0f);
    glStateCacheDisableDepthTest();

    // Add your own 2D overlays here.
    // --->

    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();

#ifdef DEBUG
    // Example of 2D drawing. It just draws a white border line. Change the 0 to 1 to enable.
    const GLfloat square_vertices [4][3] = { {0.5f, 0.5f, 0.0f}, {0.5f, height - 0.5f, 0.0f}, {width - 0.5f, height - 0.5f, 0.0f}, {width - 0.5f, 0.5f, 0.0f} };
    const GLubyte square_vertex_colors_white [4][4] = {
        {255, 255, 255, 255}, {255, 255, 255, 255}, {255, 255, 255, 255}, {255, 255, 255, 255}};

    glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEW_PROJECTION_MATRIX], 1, GL_FALSE, viewProjection);

 	glVertexAttribPointer(ATTRIBUTE_VERTEX, 3, GL_FLOAT, GL_FALSE, 0, square_vertices);
	glEnableVertexAttribArray(ATTRIBUTE_VERTEX);
	glVertexAttribPointer(ATTRIBUTE_COLOUR, 4, GL_UNSIGNED_BYTE, GL_TRUE, 0, square_vertex_colors_white);
    glEnableVertexAttribArray(ATTRIBUTE_COLOUR);

    if (!arglGLValidateProgram(program)) {
        ARLOGe("Error: shader program %d validation failed.\n", program);
        return;
    }

    glDrawArrays(GL_LINE_LOOP, 0, 4);
#endif

#ifdef DEBUG
    CHECK_GL_ERROR();
#endif
}
コード例 #13
0
ファイル: main.c プロジェクト: phoxelua/ar_sculpting
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings);	// zoom = 1.0.
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
				
	// Projection transformation.
	arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixd(p);
	glMatrixMode(GL_MODELVIEW);
		
	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	//Read leap dump
	int koo = read_leap_dump();

	float dx = px1 - px0;
	float dy = py1 - py0;
	float dz = pz1 - pz0;

	printf("dx %f\n", dx);

	px0 = px1;
	py0 = py1;
	pz0 = pz1;

	float stepX = 100.0f / glutGet(GLUT_WINDOW_X);
	float stepY = 100.0f/ glutGet(GLUT_WINDOW_Y);

	PX = stepX*dx + PX;
	PY = stepY*dy + PY;
	PZ = stepX*dz + PZ;

	if (snapToMarker && gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawItem();
	
	} // gPatt_found
	else {
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
		glLoadMatrixd(m);

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawItem();

	}
	
	// Any 2D overlays go here.
	//none
	
	glutSwapBuffers();
}
コード例 #14
0
ファイル: ARMovie.cpp プロジェクト: AdJion/artoolkit5
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeDrawFrame(JNIEnv* env, jobject obj, jint movieWidth, jint movieHeight, jint movieTextureID, jfloatArray movieTextureMtx))
{
	float width, height;
	
	// Get the array contents.
	//jsize movieTextureMtxLen = env->GetArrayLength(movieTextureMtx);
	float movieTextureMtxUnpacked[16];
    env->GetFloatArrayRegion(movieTextureMtx, 0, /*movieTextureMtxLen*/ 16, movieTextureMtxUnpacked);
        
    if (!videoInited) {
#ifdef DEBUG
        LOGI("nativeDrawFrame !VIDEO\n");
#endif        
        return; // No point in trying to draw until video is inited.
    }
    if (!nftDataLoaded && nftDataLoadingThreadHandle) {
        // Check if NFT data loading has completed.
        if (threadGetStatus(nftDataLoadingThreadHandle) > 0) {
            nftDataLoaded = true;
            threadWaitQuit(nftDataLoadingThreadHandle);
            threadFree(&nftDataLoadingThreadHandle); // Clean up.
        } else {
#ifdef DEBUG
            LOGI("nativeDrawFrame !NFTDATA\n");
#endif        
            return; // No point in trying to draw until NFT data is loaded.
        }
    }
#ifdef DEBUG
    LOGI("nativeDrawFrame\n");
#endif        
    if (!gARViewInited) {
        if (!initARView()) return;
    }
    if (gARViewLayoutRequired) layoutARView();
    
    // Upload new video frame if required.
    if (videoFrameNeedsPixelBufferDataUpload) {
        pthread_mutex_lock(&gVideoFrameLock);
        arglPixelBufferDataUploadBiPlanar(gArglSettings, gVideoFrame, gVideoFrame + videoWidth*videoHeight);
        videoFrameNeedsPixelBufferDataUpload = false;
        pthread_mutex_unlock(&gVideoFrameLock);
    }
    
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    // Display the current frame
    arglDispImage(gArglSettings);
    
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf(cameraLens);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glStateCacheEnableDepthTest();

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->
        
    // Draw an object on all valid markers.
    for (int i = 0; i < markersNFTCount; i++) {
        if (markersNFT[i].valid) {
            glLoadMatrixf(markersNFT[i].pose.T);
            
            //
            // Draw a rectangular surface textured with the movie texture.
            //
            float w = 80.0f;
            float h = w * (float)movieHeight/(float)movieWidth;
            GLfloat vertices[4][2] = { {0.0f, 0.0f}, {w, 0.0f}, {w, h}, {0.0f, h} };
            GLfloat normals[4][3] = { {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f} };
            GLfloat texcoords[4][2] = { {0.0f, 0.0f},  {1.0f, 0.0f},  {1.0f, 1.0f},  {0.0f, 1.0f} };

            glStateCacheActiveTexture(GL_TEXTURE0);

            glMatrixMode(GL_TEXTURE);
            glPushMatrix();
            glLoadMatrixf(movieTextureMtxUnpacked);
            glMatrixMode(GL_MODELVIEW);
            
            glVertexPointer(2, GL_FLOAT, 0, vertices);
            glNormalPointer(GL_FLOAT, 0, normals);
            glStateCacheClientActiveTexture(GL_TEXTURE0);
            glTexCoordPointer(2, GL_FLOAT, 0, texcoords);
            glStateCacheEnableClientStateVertexArray();
            glStateCacheEnableClientStateNormalArray();
            glStateCacheEnableClientStateTexCoordArray();
            glStateCacheBindTexture2D(0);
            glStateCacheDisableTex2D();
            glStateCacheDisableLighting();

            glEnable(GL_TEXTURE_EXTERNAL_OES);
            glBindTexture(GL_TEXTURE_EXTERNAL_OES, movieTextureID);

            glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

            glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
            glDisable(GL_TEXTURE_EXTERNAL_OES);

            glMatrixMode(GL_TEXTURE);
            glPopMatrix();
            glMatrixMode(GL_MODELVIEW);
            //
            // End.
            //
        }
    }
    
    if (cameraPoseValid) {
        
        glMultMatrixf(cameraPose);
        
        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
    }
        
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
	width = (float)viewPort[viewPortIndexWidth];
	height = (float)viewPort[viewPortIndexHeight];
	glOrthof(0.0f, width, 0.0f, height, -1.0f, 1.0f);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glStateCacheDisableDepthTest();

    // Add your own 2D overlays here.
    // --->
    
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();

#ifdef DEBUG
    // Example of 2D drawing. It just draws a white border line. Change the 0 to 1 to enable.
    const GLfloat square_vertices [4][2] = { {0.5f, 0.5f}, {0.5f, height - 0.5f}, {width - 0.5f, height - 0.5f}, {width - 0.5f, 0.5f} };
    glStateCacheDisableLighting();
    glStateCacheDisableTex2D();
    glVertexPointer(2, GL_FLOAT, 0, square_vertices);
    glStateCacheEnableClientStateVertexArray();
    glColor4ub(255, 255, 255, 255);
    glDrawArrays(GL_LINE_LOOP, 0, 4);

    CHECK_GL_ERROR();
#endif
}
コード例 #15
0
ファイル: ARTK.cpp プロジェクト: binzhang/ARTK_MMD
//==============
// 描画時の処理
//==============
void cARTK::display( void )
{
	// カメラ画像を描画
	arglDispImage( m_pARTImage, &m_sCameraParam, 1.0, m_pArglSettings );	// zoom = 1.0.
}
コード例 #16
0
ファイル: simpleMovie.c プロジェクト: afauch/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
    double zoom;
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
        
        // Draw the movie frame.
        if (gMovieImage) {
            glPushMatrix();
            glRotatef(90.0f, 1.0f, 0.0f, 0.0f); // Place movie in x-z plane instead of x-y plane.
            glTranslated(-gPatt_width*0.5, 0.0f, 0.0f); // Movie origin is at lower-left of movie frame. Place this at the edge of the marker .
            zoom = 1.0/gMovieCparam.xsize * gPatt_width; // Scale the movie frame so that it is the same width as the marker.
            arglPixelBufferDataUpload(gMovieArglSettings, gMovieImage);
            arglDispImageStateful(gMovieArglSettings); // Show the movie frame.
            glPopMatrix();
        }
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
コード例 #17
0
ファイル: simpleVRML.c プロジェクト: kolzar/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    int i;
    GLdouble p[16];
    GLdouble m[16];

    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

    arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
    gARTImage = NULL; // Invalidate image data.

    // Projection transformation.
    arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
    glMatrixMode(GL_PROJECTION);
    glLoadMatrixd(p);
    glMatrixMode(GL_MODELVIEW);

    glEnable(GL_DEPTH_TEST);

    // Viewing transformation.
    glLoadIdentity();
    // Lighting and geometry that moves with the camera should go here.
    // (I.e. must be specified before viewing transformations.)
    //none

    for (i = 0; i < gObjectDataCount; i++) {

        if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) {

            // Calculate the camera position for the object and draw it.
            // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
            arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR);
            glLoadMatrixd(m);

            // All lighting and geometry to be drawn relative to the marker goes here.
            //ARLOGe("About to draw object %i\n", i);
            arVrmlDraw(gObjectData[i].vrml_id);
        }
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }

    glutSwapBuffers();
}
コード例 #18
0
ファイル: simpleOSG.c プロジェクト: Belial2010/artoolkit5
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
	glLoadMatrixf(cameraLens);
#else
	glLoadMatrixd(cameraLens);
#endif
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glEnable(GL_DEPTH_TEST);

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->
    
    VirtualEnvironmentHandleARViewDrawPreCamera();
    
    if (cameraPoseValid) {
        
#ifdef ARDOUBLE_IS_FLOAT
        glMultMatrixf(cameraPose);
#else
        glMultMatrixd(cameraPose);
#endif
        
        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
        VirtualEnvironmentHandleARViewDrawPostCamera();
    }
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)gWindowW, 0, (GLdouble)gWindowH, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    // Add your own 2D overlays here.
    // --->
    
    VirtualEnvironmentHandleARViewDrawOverlay();

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
コード例 #19
0
ファイル: check_id.c プロジェクト: hyyh619/ARToolKit_5.3.1
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
    ARdouble m[16];

#ifdef ARDOUBLE_IS_FLOAT
    GLdouble p0[16];
    GLdouble m0[16];
#endif
    int                 i, j, k;
    GLfloat             w, bw, bh, vertices[6][2];
    GLubyte             pixels[300];
    char                text[256];
    GLdouble            winX, winY, winZ;
    int                 showMErr[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MX[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MY[CHECK_ID_MULTIMARKERS_MAX];
    int                 pattDetectMode;
    AR_MATRIX_CODE_TYPE matrixCodeType;


    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);     // Clear the buffers for new frame.

    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);

    if (gMultiConfigCount)
    {
        arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
        glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(p);
#else
        glLoadMatrixd(p);
#endif
        glMatrixMode(GL_MODELVIEW);
        glEnable(GL_DEPTH_TEST);

        // If we have multi-configs, show their origin onscreen.
        for (k = 0; k < gMultiConfigCount; k++)
        {
            showMErr[k] = FALSE;
            if (gMultiConfigs[k]->prevF != 0)
            {
                arglCameraViewRH((const ARdouble (*)[4])gMultiConfigs[k]->trans, m, 1.0);
#ifdef ARDOUBLE_IS_FLOAT
                glLoadMatrixf(m);
#else
                glLoadMatrixd(m);
#endif
                drawAxes();
#ifdef ARDOUBLE_IS_FLOAT
                for (i = 0; i < 16; i++)
                    m0[i] = (GLdouble)m[i];

                for (i = 0; i < 16; i++)
                    p0[i] = (GLdouble)p[i];

                if (gluProject(0, 0, 0, m0, p0, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#else
                if (gluProject(0, 0, 0, m, p, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#endif
                {
                    showMErr[k] = TRUE;
                    MX[k]       = winX; MY[k] = winY;
                }
            }
        } // for k
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    arGetPatternDetectionMode(gARHandle, &pattDetectMode);
    arGetMatrixCodeType(gARHandle, &matrixCodeType);

    // For all markers, draw onscreen position.
    // Colour based on cutoffPhase.
    glLoadIdentity();
    glVertexPointer(2, GL_FLOAT, 0, vertices);
    glEnableClientState(GL_VERTEX_ARRAY);
    glLineWidth(2.0f);

    for (j = 0; j < gARHandle->marker_num; j++)
    {
        glColor3ubv(cutoffPhaseColours[gARHandle->markerInfo[j].cutoffPhase].colour);

        for (i = 0; i < 5; i++)
        {
            int dir = gARHandle->markerInfo[j].dir;
            vertices[i][0] = (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][0] * (float)windowWidth / (float)gARHandle->xsize;
            vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][1]) * (float)windowHeight / (float)gARHandle->ysize;
        }

        vertices[i][0] = (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize;
        vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize;
        glDrawArrays(GL_LINE_STRIP, 0, 6);
        // For markers that have been identified, draw the ID number.
        if (gARHandle->markerInfo[j].id >= 0)
        {
            glColor3ub(255, 0, 0);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID && (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX))
                snprintf(text, sizeof(text), "%llu (err=%d)", gARHandle->markerInfo[j].globalID, gARHandle->markerInfo[j].errorCorrected);
            else
                snprintf(text, sizeof(text), "%d", gARHandle->markerInfo[j].id);

            print(text, (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize, ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize, 0, 0);
        }
    }

    glDisableClientState(GL_VERTEX_ARRAY);

    // For matrix mode, draw the pattern image of the largest marker.
    if (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX)
    {
        int area = 0, biggestMarker = -1;

        for (j = 0; j < gARHandle->marker_num; j++)
            if (gARHandle->markerInfo[j].area > area)
            {
                area          = gARHandle->markerInfo[j].area;
                biggestMarker = j;
            }

        if (area >= AR_AREA_MIN)
        {
            int      imageProcMode;
            ARdouble pattRatio;
            ARUint8  ext_patt[AR_PATT_SIZE2_MAX * AR_PATT_SIZE2_MAX * 3]; // Holds unwarped pattern extracted from image.
            int      size;
            int      zoom = 4;
            ARdouble vertexUpright[4][2];

            // Reorder vertices based on dir.
            for (i = 0; i < 4; i++)
            {
                int dir = gARHandle->markerInfo[biggestMarker].dir;
                vertexUpright[i][0] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][0];
                vertexUpright[i][1] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][1];
            }

            arGetImageProcMode(gARHandle, &imageProcMode);
            arGetPattRatio(gARHandle, &pattRatio);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID)
            {
                size = 14;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, (ARdouble)14 / (ARdouble)(14 + 2), ext_patt);
            }
            else
            {
                size = matrixCodeType & AR_MATRIX_CODE_TYPE_SIZE_MASK;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, pattRatio, ext_patt);
            }

            glRasterPos2f((float)(windowWidth - size * zoom) - 4.0f, (float)(size * zoom) + 4.0f);
            glPixelZoom((float)zoom, (float)-zoom);
            glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
            glDrawPixels(size, size, GL_LUMINANCE, GL_UNSIGNED_BYTE, ext_patt);
            glPixelZoom(1.0f, 1.0f);
        }
    }


    // Draw error value for multimarker pose.
    for (k = 0; k < gMultiConfigCount; k++)
    {
        if (showMErr[k])
        {
            snprintf(text, sizeof(text), "err=%0.3f", gMultiErrs[k]);
            print(text, MX[k], MY[k], 0, 0);
        }
    }

    //
    // Draw help text and mode.
    //
    glLoadIdentity();
    if (gShowMode)
    {
        printMode();
    }

    if (gShowHelp)
    {
        if (gShowHelp == 1)
        {
            printHelpKeys();
        }
        else if (gShowHelp == 2)
        {
            bw = 0.0f;

            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char*)arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase]);
                if (w > bw)
                    bw = w;
            }

            bw += 12.0f; // Space for color block.
            bh  = AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT * 10.0f /* character height */ + (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1) * 2.0f /* line spacing */;
            drawBackground(bw, bh, 2.0f, 2.0f);

            // Draw the colour block and text, line by line.
            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                for (j = 0; j < 300; j += 3)
                {
                    pixels[j]     = cutoffPhaseColours[i].colour[0];
                    pixels[j + 1] = cutoffPhaseColours[i].colour[1];
                    pixels[j + 2] = cutoffPhaseColours[i].colour[2];
                }

                glRasterPos2f(2.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f);
                glPixelZoom(1.0f, 1.0f);
                glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
                glDrawPixels(10, 10, GL_RGB, GL_UNSIGNED_BYTE, pixels);
                print(arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase], 14.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f, 0, 0);
            }
        }
    }

    glutSwapBuffers();
}