static void dispFunc( void ) { int x, y; int i, j; char str[32]; glClear( GL_COLOR_BUFFER_BIT ); #if AR2_CAPABLE_ADAPTIVE_TEMPLATE argViewportSetPixFormat( vp[page/AR2_BLUR_IMAGE_MAX], AR_PIXEL_FORMAT_MONO ); argDrawMode2D( vp[page/AR2_BLUR_IMAGE_MAX] ); argDrawImage( imageSet->scale[page/AR2_BLUR_IMAGE_MAX]->imgBWBlur[page%AR2_BLUR_IMAGE_MAX] ); #else argViewportSetPixFormat( vp[page], AR_PIXEL_FORMAT_MONO ); argDrawMode2D( vp[page] ); argDrawImage( imageSet->scale[page]->imgBW ); #endif if (display_fset) { for( i = 0; i < featureSet->list[page].num; i++ ) { x = featureSet->list[page].coord[i].x; y = featureSet->list[page].coord[i].y; drawFeatureRect( x, y, AR2_DEFAULT_TS1, AR2_DEFAULT_TS2 ); sprintf(str, "%d", i); glColor3f( 0.0f, 0.0f, 1.0f ); argDrawStringsByObservedPos(str, x, y); } ARLOG("fset: Num of feature points: %d\n", featureSet->list[page].num); } if (display_fset2) { for( i = j = 0; i < refDataSet->num; i++ ) { if( refDataSet->refPoint[i].refImageNo != page ) continue; x = refDataSet->refPoint[i].coord2D.x; y = refDataSet->refPoint[i].coord2D.y; glColor3f( 0.0f, 1.0f, 0.0f ); argDrawLineByObservedPos(x-5, y-5, x+5, y+5); argDrawLineByObservedPos(x+5, y-5, x-5, y+5); j++; } ARLOG("fset2: Num of feature points: %d\n", j); #if 0 for (i = 0; i < refDataSet->pageNum; i++) { for (j = 0; j < refDataSet->pageInfo[i].imageNum; j++) { if (refDataSet->pageInfo[i].imageInfo[j].imageNo == page) { ARLOG("fset2: Image size: %dx%d\n", refDataSet->pageInfo[i].imageInfo[j].width, refDataSet->pageInfo[i].imageInfo[j].height); } } } #endif } argSwapBuffers(); }
static void dispFunc(void) { glClear(GL_COLOR_BUFFER_BIT); #if AR2_CAPABLE_ADAPTIVE_TEMPLATE argViewportSetPixFormat(vp[page / AR2_BLUR_IMAGE_MAX], AR_PIXEL_FORMAT_MONO); argDrawMode2D(vp[page / AR2_BLUR_IMAGE_MAX]); argDrawImage(imageSet->scale[page / AR2_BLUR_IMAGE_MAX]->imgBWBlur[page % AR2_BLUR_IMAGE_MAX]); #else argViewportSetPixFormat(vp[page], AR_PIXEL_FORMAT_MONO); argDrawMode2D(vp[page]); argDrawImage(imageSet->scale[page]->imgBW); #endif argSwapBuffers(); }
static void dispImage(void) { AR2VideoBufferT *buff; double x, y; int ssx, eex, ssy, eey; int i; if( status == 0 ) { while (!(buff = arVideoGetImage()) || !buff->fillFlag) arUtilSleep(2); argDrawMode2D( vp ); argDrawImage(buff->buff); } else if( status == 1 ) { argDrawMode2D( vp ); argDrawImage( patt.savedImage[patt.loop_num-1] ); for( i = 0; i < point_num; i++ ) { x = patt.point[patt.loop_num-1][i].x_coord; y = patt.point[patt.loop_num-1][i].y_coord; glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByObservedPos( x-10, y, x+10, y ); argDrawLineByObservedPos( x, y-10, x, y+10 ); } if( sx != -1 && sy != -1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } dispClipImage( ssx, ssy, eex-ssx+1, eey-ssy+1, clipImage ); } } else if( status == 2 ) { argDrawMode2D( vp ); argDrawImage( patt.savedImage[check_num] ); for( i = 0; i < patt.h_num*patt.v_num; i++ ) { x = patt.point[check_num][i].x_coord; y = patt.point[check_num][i].y_coord; glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByObservedPos( x-10, y, x+10, y ); argDrawLineByObservedPos( x, y-10, x, y+10 ); } draw_line(); } argSwapBuffers(); }
static void init( void ) { ARParam wparam; char name1[256], name2[256]; printf("Enter camera parameter filename"); printf("(Data/camera_para.dat): "); if( fgets(name1, 256, stdin) == NULL ) exit(0); if( sscanf(name1, "%s", name2) != 1 ) { strcpy( name2, "Data/camera_para.dat"); } if( arParamLoad(name2, 1, &wparam) < 0 ) { printf("Parameter load error !!\n"); exit(0); } if( arVideoOpen(vconf) < 0 ) exit(0); if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0); arMalloc( image, ARUint8, xsize*ysize*AR_PIX_SIZE ); printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); arParamChangeSize( &wparam, xsize, ysize, ¶m ); arParamDisp( ¶m ); arInitCparam( ¶m ); argInit( ¶m, 1.0, 0, 0, 0, 0 ); argDrawMode2D(); }
/* main loop */ static void mainLoop(void) { int i, j, k; if( count == 100 ) { printf("*** %f (frame/sec)\n", (double)count/arUtilTimer()); count = 0; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0, 0 ); /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug ) { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 3.0 ); for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id < 0 ) continue; argDrawSquare( marker_info[i].vertex, 2, 1 ); } glLineWidth( 1.0 ); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } //additions from sheyne, trying to understand how it is working. //from documentation on http://artoolkit.sourceforge.net/apidoc/structARMarkerInfo.html //also look at: http://www.hitl.washington.edu/artoolkit/documentation/devframework.htm printf("pos: %f,%f\n", marker_info->pos[0], marker_info->pos[1]); for (j=0; j<4; j++) { printf("\t vertex: %f, %f\n", marker_info->vertex[j][0],marker_info->vertex[j][1]); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; if (dataPtr = GetNextImage()) { if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { printf("could'nt find any marker"); cleanup(); exit(0); } //else printf("i found the marker"); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); if (geometry == 0) { draw(); } else { geometryout(); } argSwapBuffers(); } else { printf("Done - press any key \n"); getchar(); exit(0); } }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
/* draw the user object */ static int draw_object(int obj_id, double gl_para[16]) { GLfloat mat_ambient[] = { 0.0, 0.0, 1.0, 1.0 }; GLfloat mat_ambient_collide[] = { 1.0, 0.0, 0.0, 1.0 }; GLfloat mat_flash[] = { 0.0, 0.0, 1.0, 1.0 }; GLfloat mat_flash_collide[] = { 1.0, 0.0, 0.0, 1.0 }; GLfloat mat_flash_shiny[] = { 50.0 }; GLfloat light_position[] = { 100.0, -200.0, 200.0, 0.0 }; GLfloat ambi[] = { 0.1, 0.1, 0.1, 0.1 }; GLfloat lightZeroColor[] = { 0.9, 0.9, 0.9, 0.1 }; argDrawMode3D(); argDraw3dCamera(0, 0); glMatrixMode(GL_MODELVIEW); glLoadMatrixd(gl_para); /* set the material */ glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glLightfv(GL_LIGHT0, GL_POSITION, light_position); glLightfv(GL_LIGHT0, GL_AMBIENT, ambi); glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor); glMaterialfv(GL_FRONT, GL_SHININESS, mat_flash_shiny); switch (obj_id){ case 0: glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); glColor3f(1.0, 2.0, 0.0); //glTranslated(10.0, 20.0, -100.0); draw_scene(); //glTranslated(-10.0, -20.0, 100.0); break; case 1: glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); glColor3f(1.0, 0.0, 0.0); //draw piano left break; case 2: glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); glColor3f(1.0, 2.0, 0.0); draw_controller(distX/4, distY/4); break; } argDrawMode2D(); return 0; }
static void mainLoop() { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //Get video frame if ((dataPtr = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } if (count == 0) { arUtilTimerReset(); } count++; //Display video stream argDrawMode2D(); argDispImage(dataPtr, 0, 0); /* detect the markers in the video frame */ if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) { cleanup(); exit(0); } //Get next video frame arVideoCapNext(); /* check for object visibility */ k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } //Don't bother drawing or calculating orientations if no patterns are found if (k == -1) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
/* draw the targets */ static void draw( targetInfo myTarget, double BaseTrans[3][4]) { double gl_para[16]; GLfloat light_position[] = {100.0,-200.0,200.0,0.0}; GLfloat ambi[] = {0.1, 0.1, 0.1, 0.1}; GLfloat lightZeroColor[] = {0.9, 0.9, 0.9, 0.1}; GLfloat mat_ambient2[] = {0.0, 0.0, 1.0, 1.0}; GLfloat mat_ambient[] = {1.0, 0.0, 0.0, 1.0}; GLfloat mat_flash2[] = {0.0, 1.0, 1.0, 1.0}; GLfloat mat_flash_shiny2[]= {50.0}; argDrawMode3D(); argDraw3dCamera( 0, 0 ); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LEQUAL); /* load the camera transformation matrix */ glMatrixMode(GL_MODELVIEW); argConvGlpara(BaseTrans, gl_para); glLoadMatrixd( gl_para ); /* set the lighting and the materials */ glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glLightfv(GL_LIGHT0, GL_POSITION, light_position); glLightfv(GL_LIGHT0, GL_AMBIENT, ambi); glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor); glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash2); glMaterialfv(GL_FRONT, GL_SHININESS, mat_flash_shiny2); if(myTarget.state == TOUCHED) glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); else glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient2); glMatrixMode(GL_MODELVIEW); glTranslatef( myTarget.pos[0], myTarget.pos[1], myTarget.pos[2] ); glutSolidCube(40.0); if(myTarget.state == TOUCHED){ glColor3f(1.0,1.0,1.0); glLineWidth(6.0); glutWireCube(60.0); glLineWidth(1.0); } glDisable( GL_LIGHTING ); glDisable( GL_DEPTH_TEST ); argDrawMode2D(); }
static void mainLoop(void) { //maincount++; //if (maincount > 2) exit(0); argDrawMode2D(); argDispImage( dataPtr, 0,0 ); draw(); argSwapBuffers(); sleep(1); return; // (0); }
/* draw the items on the ground */ void drawItems(double trans[3][4], ItemList* itlist) { int i; double gl_para[16]; GLfloat light_position[] = {100.0,-200.0,200.0,0.0}; GLfloat ambi[] = {0.1, 0.1, 0.1, 0.1}; GLfloat lightZeroColor[] = {0.9, 0.9, 0.9, 0.1}; GLfloat mat_ambient[] = {0.0, 1.0, 0.0, 1.0}; GLfloat mat_flash2[] = {0.0, 1.0, 1.0, 1.0}; GLfloat mat_flash_shiny2[]= {50.0}; argDrawMode3D(); argDraw3dCamera( 0, 0 ); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LEQUAL); glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glLightfv(GL_LIGHT0, GL_POSITION, light_position); glLightfv(GL_LIGHT0, GL_AMBIENT, ambi); glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor); glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash2); glMaterialfv(GL_FRONT, GL_SHININESS, mat_flash_shiny2); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); /* load the camera transformation matrix */ glMatrixMode(GL_MODELVIEW); argConvGlpara(trans, gl_para); glLoadMatrixd( gl_para ); for(i = 0; i < itlist->itemnum; i ++ ) { if (!itlist->item[i].onpaddle) { glPushMatrix(); glTranslatef(itlist->item[i].pos[0],itlist->item[i].pos[1], 10.0 ); glColor3f(0.0,1.0,0.0); glutSolidSphere(10,10,10); glPopMatrix(); } } glDisable( GL_LIGHTING ); glDisable( GL_DEPTH_TEST ); argDrawMode2D(); }
static void mainLoop(void) { ARUint8 *dataPtr; int cornerCount; char buf[256]; int i; if ((dataPtr = arVideoGetImage()) == NULL) { arUtilSleep(2); return; } glClear(GL_COLOR_BUFFER_BIT); argDrawMode2D(vp); argDrawImage(dataPtr); // Convert to grayscale, results will go to arIPI->image, which also provides the backing for calibImage. arImageProcLuma(arIPI, dataPtr); cornerFlag = cvFindChessboardCorners(calibImage, cvSize(chessboardCornerNumY, chessboardCornerNumX), corners, &cornerCount, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FILTER_QUADS); if (cornerFlag) glColor4ub(255, 0, 0, 255); else glColor4ub(0, 255, 0, 255); glLineWidth(2.0f); // ARLOG("Detected corners = %d\n", cornerCount); for (i = 0; i < cornerCount; i++) { argDrawLineByObservedPos(corners[i].x - 5, corners[i].y - 5, corners[i].x + 5, corners[i].y + 5); argDrawLineByObservedPos(corners[i].x - 5, corners[i].y + 5, corners[i].x + 5, corners[i].y - 5); // ARLOG(" %f, %f\n", corners[i].x, corners[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, corners[i].x, corners[i].y + 20); } sprintf(buf, "Captured Image: %2d/%2d\n", capturedImageNum, calibImageNum); argDrawStringsByObservedPos(buf, 10, 30); argSwapBuffers(); }
/* draw the user object */ static int draw_object( int obj_id, double gl_para[16], int collide_flag ) { GLfloat mat_ambient[] = {0.0, 0.0, 1.0, 1.0}; GLfloat mat_ambient_collide[] = {1.0, 0.0, 0.0, 1.0}; GLfloat mat_flash[] = {0.0, 0.0, 1.0, 1.0}; GLfloat mat_flash_collide[] = {1.0, 0.0, 0.0, 1.0}; GLfloat mat_flash_shiny[] = {50.0}; GLfloat light_position[] = {100.0,-200.0,200.0,0.0}; GLfloat ambi[] = {0.1, 0.1, 0.1, 0.1}; GLfloat lightZeroColor[] = {0.9, 0.9, 0.9, 0.1}; argDrawMode3D(); argDraw3dCamera( 0, 0 ); glMatrixMode(GL_MODELVIEW); glLoadMatrixd( gl_para ); /* set the material */ glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glLightfv(GL_LIGHT0, GL_POSITION, light_position); glLightfv(GL_LIGHT0, GL_AMBIENT, ambi); glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor); glMaterialfv(GL_FRONT, GL_SHININESS, mat_flash_shiny); if(collide_flag) { glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash_collide); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient_collide); /* draw a cube */ glTranslatef( 0.0, 0.0, 30.0 ); glutSolidSphere(30,12,6); } else { glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash); glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient); /* draw a cube */ glTranslatef( 0.0, 0.0, 30.0 ); glutSolidCube(60); } argDrawMode2D(); return 0; }
void MainLoop(void) { ARUint8 *image; ARMarkerInfo *marker_info; int marker_num; int j, k; if ((image = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } argDrawMode2D(); argDispImage(image, 0, 0); if (arDetectMarker(image, thresh, &marker_info, &marker_num) < 0) { Cleanup(); exit(0); } arVideoCapNext(); k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } if (k != -1) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); DrawObject(); } argSwapBuffers(); }
/* // Draw Module */ static int draw_scene() { draw_grid(distX , distY ); /* glBegin(GL_QUADS); glVertex2d(distX, 0.0); glVertex2d(distX, -distY); glVertex2d(0.0, -distY); glVertex2d(0.0, 0.0); glEnd(); */ glTranslated((distX/2)-250, -distY/2, 0.0); draw_signal(); glTranslated(-(distX / 2)-250, +distY/2 , 0.0); //glEnd(); ///// argDrawMode2D(); return 0; }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //update new data g_MyKinect.Update(); #ifdef USE_USERDETECTOR if(g_MyKinect.userStatus.isPlayerVisible()) { XV3 tmp = g_MyKinect.userDetector->getSkeletonJointPosition(XN_SKEL_RIGHT_HAND); printf("Right hand position: %.2f %.2f %.2f\n", tmp.X, tmp.Y, tmp.Z); } #endif if(drawFromKinect) { //get image data to detect marker if( (dataPtr = (ARUint8 *)g_MyKinect.GetBGRA32Image()) == NULL ) { arUtilSleep(2); return; } } else { /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } if(drawFromKinect) { //option . You can choose many display mode. image, Depth by Color, depth mixed image if(displayMode == 2) dataPtr = (ARUint8 *)g_MyKinect.GetDepthDrewByColor(); else if(displayMode == 3) dataPtr = (ARUint8 *)g_MyKinect.GetDepthMixedImage(); } argDrawMode2D(); argDispImage( dataPtr, 0,0 ); arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int imageProcMode; int debugMode; int j, k; /* grab a video frame */ if ((dataPtr = (ARUint8*)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, dataPtr) < 0) { cleanup(); exit(0); } argSetWindow(w1); argDrawMode2D(vp1); arGetDebugMode(arHandle, &debugMode); if (debugMode == 0) { argDrawImage(dataPtr); } else { arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) { argDrawImage(arHandle->labelInfo.bwImage); } else { argDrawImageHalf(arHandle->labelInfo.bwImage); } } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(dataPtr); argSetWindow(w1); if (count % 10 == 0) { sprintf(fps, "%f[fps]", 10.0 / arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); markerNum = arGetMarkerNum(arHandle); if (markerNum == 0) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker(arHandle); k = -1; for (j = 0; j < markerNum; j++) { // ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if (patt_id == markerInfo[j].id) { if (k == -1) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if (k == -1) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); argDrawStringsByIdealPos(errValue, 10, ysize - 60); // ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* draw the paddle, base and menu */ if( paddleInfo->active ){ draw_paddle( paddleInfo); } /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } //printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 20, 150.0f, 105.0f, 0.0f); /* find the paddle position relative to the base */ findPaddlePosition(curPaddlePos, paddleInfo->trans, config->trans); /* check for collisions with targets */ for(i=0;i<TARGET_NUM;i++){ myTarget[i].state = NOT_TOUCHED; if(checkCollision(curPaddlePos, myTarget[i].pos, 20.0f)) { myTarget[i].state = TOUCHED; fprintf(stderr,"touched !!\n"); } } /* draw the targets */ for(i=0;i<TARGET_NUM;i++){ draw(myTarget[i],config->trans); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ for( i = 0; i < PTT_NUM; i++){ k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { /* not find marker */ object[i].visible = 0; isFirst[i] = 1; } else{ /* get the transformation between the marker and the real camera */ if( isFirst[i]){ arGetTransMat(&marker_info[k], object[i].patt_center, object[i].patt_width, object[i].patt_trans); }else{ arGetTransMatCont(&marker_info[k], object[i].patt_trans, object[i].patt_center, object[i].patt_width, object[i].patt_trans); } object[i].visible = 1; isFirst[i] = 0; /* 追加 */ if(i == PTT2_MARK_ID){ arUtilMatInv( object[PTT2_MARK_ID].patt_trans, itrans2); // 逆行列の計算 } } } //Initialize(); // fix me draw(); argSwapBuffers(); }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } glClearColor( 0.0, 0.0, 0.0, 0.0 ); glClearDepth( 1.0 ); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); argDrawMode2D(); if( disp_mode ) { argDispImage( dataPtr, 0, 0 ); } else { argDispImage( dataPtr, 1, 1 ); } /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug ) { if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); } /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( marker_info[j].id == target_id ) { if( k == -1 ) k = j; else { if( marker_info[k].cf < marker_info[j].cf ) k = j; } } } if( k != -1 ) { glDisable(GL_DEPTH_TEST); switch( outputMode ) { case 0: getResultRaw( &marker_info[k] ); break; case 1: getResultQuat( &marker_info[k] ); break; } } argSwapBuffers(); }
/* draw the paddle */ int draw_paddle( ARPaddleInfo *paddleInfo ) { double gl_para[16]; int i; argDrawMode3D(); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LEQUAL); argDraw3dCamera( 0, 0 ); argConvGlpara(paddleInfo->trans, gl_para); glMatrixMode(GL_MODELVIEW); glLoadMatrixd( gl_para ); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth(4.0); glBegin(GL_LINE_LOOP); glVertex2f( -25.0, -25.0 ); glVertex2f( 25.0, -25.0 ); glVertex2f( 25.0, 25.0 ); glVertex2f( -25.0, 25.0 ); glEnd(); glColor3f( 0.0, 0.0, 1.0); glBegin(GL_LINE_LOOP); for( i = 0; i < 16; i++ ) { double x, y; x = PADDLE_RADIUS * cos(i*3.141592*2/16); y = PADDLE_RADIUS * sin(i*3.141592*2/16); glVertex2d( x, y ); } glEnd(); glBegin(GL_LINE_LOOP); glVertex2f( -7.5, 0.0 ); glVertex2f( 7.5, 0.0 ); glVertex2f( 7.5, -105.0 ); glVertex2f( -7.5, -105.0 ); glEnd(); glEnable(GL_BLEND); glBlendFunc(GL_ZERO,GL_ONE); glColor4f(1,1,1,0); glBegin(GL_POLYGON); for( i = 0; i < 16; i++ ) { double x, y; x = 40.0 * cos(i*3.141592*2/16); y = 40.0 * sin(i*3.141592*2/16); glVertex2d( x, y ); } glEnd(); glBegin(GL_POLYGON); glVertex2f( -7.5, 0.0 ); glVertex2f( 7.5, 0.0 ); glVertex2f( 7.5, -105.0 ); glVertex2f( -7.5, -105.0 ); glEnd(); glDisable(GL_BLEND); glDisable(GL_DEPTH_TEST); argDrawMode2D(); return 0; }
static void mainLoop(void) { AR2VideoBufferT *buff; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int debugMode; int j, k; /* grab a video frame */ buff = arVideoGetImage(); if (!buff || !buff->fillFlag) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if( arDetectMarker(arHandle, buff) < 0 ) { cleanup(); exit(0); } argSetWindow(w1); arGetDebugMode(arHandle, &debugMode); if (debugMode == AR_DEBUG_ENABLE) { int imageProcMode; argViewportSetPixFormat(vp1, AR_PIXEL_FORMAT_MONO); // Drawing the debug image. argDrawMode2D(vp1); arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) argDrawImage(arHandle->labelInfo.bwImage); else argDrawImageHalf(arHandle->labelInfo.bwImage); } else { AR_PIXEL_FORMAT pixFormat; arGetPixelFormat(arHandle, &pixFormat); argViewportSetPixFormat(vp1, pixFormat); // Drawing the input image. argDrawMode2D(vp1); argDrawImage(buff->buff); } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(buff->buff); argSetWindow(w1); if( count % 10 == 0 ) { sprintf(fps, "%f[fps]", 10.0/arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); markerNum = arGetMarkerNum( arHandle ); if( markerNum == 0 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker( arHandle ); k = -1; for( j = 0; j < markerNum; j++ ) { //ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if( patt_id == markerInfo[j].id ) { if( k == -1 ) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if( k == -1 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); argDrawStringsByIdealPos(errValue, 10, ysize-60); //ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* check for object visibility */ for( i = 0; i < 2; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } object[i].visible = k; if( k >= 0 ) { arGetTransMat(&marker_info[k], object[i].center, object[i].width, object[i].trans); draw( object[i].model_id, object[i].trans ); } } argSwapBuffers(); if( object[0].visible >= 0 && object[1].visible >= 0 ) { double wmat1[3][4], wmat2[3][4]; arUtilMatInv(object[0].trans, wmat1); arUtilMatMul(wmat1, object[1].trans, wmat2); for( j = 0; j < 3; j++ ) { for( i = 0; i < 4; i++ ) printf("%8.4f ", wmat2[j][i]); printf("\n"); } printf("\n\n"); } }
/************************************************************************************* ** ** drawGroundGrid - draws a ground plane ** ***************************************************************************************/ int drawGroundGrid( double trans[3][4], int divisions, float x, float y, float height) { double gl_para[16]; int i; float x0,x1,y0,y1; float deltaX, deltaY; argDrawMode3D(); argDraw3dCamera( 0, 0 ); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LEQUAL); /* load the camera transformation matrix */ glMatrixMode(GL_MODELVIEW); argConvGlpara(trans, gl_para); glLoadMatrixd( gl_para ); glTranslatef(x/2.,-y/2.,0.); //draw the grid glColor3f(1,0,0); glLineWidth(6.0); glBegin(GL_LINE_LOOP); glVertex3f( -x, y, height ); glVertex3f( x, y, height ); glVertex3f( x, -y, height ); glVertex3f( -x, -y, height ); glEnd(); glLineWidth(3.0); //draw a grid of lines //X direction x0 = -x; x1 = -x; y0 = -y; y1 = y; deltaX = (2*x)/divisions; for(i=0;i<divisions;i++){ x0 = x0 + deltaX; glBegin(GL_LINES); glVertex3f(x0,y0,height); glVertex3f(x0,y1,height); glEnd(); } x0 = -x; x1 = x; deltaY = (2*y)/divisions; for(i=0;i<divisions;i++){ y0 = y0 + deltaY; glBegin(GL_LINES); glVertex3f(x0,y0,height); glVertex3f(x1,y0,height); glEnd(); } glLineWidth(1.0); glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glLightfv(GL_LIGHT0, GL_POSITION, light_position); glLightfv(GL_LIGHT0, GL_AMBIENT, ambi); glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor); glDisable( GL_LIGHTING ); glDisable( GL_DEPTH_TEST ); argDrawMode2D(); return 0; }
static void argCalibMainFunc(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j; double cfmax; double err; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } target_visible = 0; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { (*gCalibPostFunc)( NULL, NULL ); arFittingMode = arFittingModeBak; #ifndef ANDROID glutKeyboardFunc( gKeyFunc ); glutMouseFunc( gMouseFunc ); glutIdleFunc( gMainFunc ); glutDisplayFunc( gMainFunc ); #endif return; } arVideoCapNext(); glClearColor( 0.0, 0.0, 0.0, 0.0 ); #ifndef ANDROID glClear(GL_COLOR_BUFFER_BIT); #endif /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 3.0 ); for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id < 0 ) continue; argDrawSquare( marker_info[i].vertex, 2, 1 ); } glLineWidth( 1.0 ); } if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); glLineWidth( 3.0 ); glColor3f( 1.0, 1.0, 1.0 ); argLineSegHMD( 0, calib_pos[co1][1], AR_HMD_XSIZE, calib_pos[co1][1] ); argLineSegHMD( calib_pos[co1][0], 0, calib_pos[co1][0], AR_HMD_YSIZE ); glLineWidth( 1.0 ); argDrawMode2D(); cfmax = 0.0; j = -1; for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id != target_id ) continue; if( marker_info[i].cf > cfmax ) { cfmax = marker_info[i].cf; j = i; } } if( j < 0 ) { argSwapBuffers(); return; } err = arGetTransMat(&marker_info[j], target_center, target_width, target_trans); if( err >= 0.0 ) { target_visible = 1; if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); argDrawAttention( calib_pos[co1], co2 ); argDrawMode2D(); if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { glColor3f( 0.0, 1.0, 0.0 ); glLineWidth( 3.0 ); argDrawSquare( marker_info[j].vertex, 1, 1 ); glLineWidth( 1.0 ); } } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i,j,k; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /*draw the video*/ argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* capture the next video frame */ arVideoCapNext(); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth(6.0); /* detect the markers in the video frame */ if(arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } for( i = 0; i < marker_num; i++ ) { argDrawSquare(marker_info[i].vertex,0,0); } /* check for known patterns */ for( i = 0; i < objectnum; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].id == marker_info[j].id) { /* you've found a pattern */ //printf("Found pattern: %d ",patt_id); glColor3f( 0.0, 1.0, 0.0 ); argDrawSquare(marker_info[j].vertex,0,0); if( k == -1 ) k = j; else /* make sure you have the best pattern (highest confidence factor) */ if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { object[i].visible = 0; continue; } /* calculate the transform for each marker */ if( object[i].visible == 0 ) { arGetTransMat(&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans); } else { arGetTransMatCont(&marker_info[k], object[i].trans, object[i].marker_center, object[i].marker_width, object[i].trans); } object[i].visible = 1; } /*check for object collisions between marker 0 and 1 */ if(object[0].visible && object[1].visible) { if(checkCollisions(object[0],object[1],COLLIDE_DIST)) { object[0].collide = 1; object[1].collide = 1; } else { object[0].collide = 0; object[1].collide = 0; } } /* draw the AR graphics */ draw( object, objectnum ); /*swap the graphics buffers*/ argSwapBuffers(); }
static void mainLoop( void ) { AR2VideoBufferT *videoBuffL; AR2VideoBufferT *videoBuffR; ARUint8 *dataPtrL; ARUint8 *dataPtrR; int cornerFlagL; int cornerFlagR; int cornerCountL; int cornerCountR; char buf[256]; int i; if ((videoBuffL = ar2VideoGetImage(vidL))) { gVideoBuffL = videoBuffL; } if ((videoBuffR = ar2VideoGetImage(vidR))) { gVideoBuffR = videoBuffR; } if (gVideoBuffL && gVideoBuffR) { // Warn about significant time differences. i = ((int)gVideoBuffR->time_sec - (int)gVideoBuffL->time_sec) * 1000 + ((int)gVideoBuffR->time_usec - (int)gVideoBuffL->time_usec) / 1000; if( i > 20 ) { ARLOG("Time diff = %d[msec]\n", i); } else if( i < -20 ) { ARLOG("Time diff = %d[msec]\n", i); } dataPtrL = gVideoBuffL->buff; dataPtrR = gVideoBuffR->buff; glClear(GL_COLOR_BUFFER_BIT); argDrawMode2D( vpL ); argDrawImage( dataPtrL ); argDrawMode2D( vpR ); argDrawImage( dataPtrR ); copyImage( dataPtrL, (ARUint8 *)calibImageL->imageData, xsizeL*ysizeL, pixFormatL ); cornerFlagL = cvFindChessboardCorners(calibImageL, cvSize(chessboardCornerNumY,chessboardCornerNumX), cornersL, &cornerCountL, CV_CALIB_CB_ADAPTIVE_THRESH|CV_CALIB_CB_FILTER_QUADS ); copyImage( dataPtrR, (ARUint8 *)calibImageR->imageData, xsizeR*ysizeR, pixFormatR ); cornerFlagR = cvFindChessboardCorners(calibImageR, cvSize(chessboardCornerNumY,chessboardCornerNumX), cornersR, &cornerCountR, CV_CALIB_CB_ADAPTIVE_THRESH|CV_CALIB_CB_FILTER_QUADS ); argDrawMode2D( vpL ); if(cornerFlagL) glColor3f(1.0f, 0.0f, 0.0f); else glColor3f(0.0f, 1.0f, 0.0f); glLineWidth(2.0f); //ARLOG("Detected corners = %d\n", cornerCount); for( i = 0; i < cornerCountL; i++ ) { argDrawLineByObservedPos(cornersL[i].x-5, cornersL[i].y-5, cornersL[i].x+5, cornersL[i].y+5); argDrawLineByObservedPos(cornersL[i].x-5, cornersL[i].y+5, cornersL[i].x+5, cornersL[i].y-5); //ARLOG(" %f, %f\n", cornersL[i].x, cornersL[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, cornersL[i].x, cornersL[i].y+20); } argDrawMode2D( vpR ); if(cornerFlagR) glColor3f(1.0f, 0.0f, 0.0f); else glColor3f(0.0f, 1.0f, 0.0f); glLineWidth(2.0f); //ARLOG("Detected corners = %d\n", cornerCount); for( i = 0; i < cornerCountR; i++ ) { argDrawLineByObservedPos(cornersR[i].x-5, cornersR[i].y-5, cornersR[i].x+5, cornersR[i].y+5); argDrawLineByObservedPos(cornersR[i].x-5, cornersR[i].y+5, cornersR[i].x+5, cornersR[i].y-5); //ARLOG(" %f, %f\n", cornersR[i].x, cornersR[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, cornersR[i].x, cornersR[i].y+20); } if( cornerFlagL && cornerFlagR ) { cornerFlag = 1; glColor3f(1.0f, 0.0f, 0.0f); } else { cornerFlag = 0; glColor3f(0.0f, 1.0f, 0.0f); } argDrawMode2D( vpL ); sprintf(buf, "Captured Image: %2d/%2d\n", capturedImageNum, calibImageNum); argDrawStringsByIdealPos(buf, 10, 30); argSwapBuffers(); gVideoBuffL = gVideoBuffR = NULL; } else arUtilSleep(2); }
static void mainLoop(void) { static AR2VideoBufferT buff = {0}; static int oldImageMode = -1; static int oldDispMode = -1; static int oldDistMode = -1; ARdouble patt_trans[3][4]; int i, j; if (!buff.buff) { arMalloc(buff.buff, ARUint8, xsize*ysize*PIXEL_SIZE); } if( oldImageMode != 0 && imageMode == 0 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } i = 0; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } i = 639; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 0; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 479; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } oldImageMode = 0; } if( oldImageMode != 1 && imageMode == 1 ) { for( j = 0; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( j = 1; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 1; } if( oldImageMode != 2 && imageMode == 2 ) { for( i = 0; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( i = 1; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 2; } if( oldImageMode != 3 && imageMode == 3 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } oldImageMode = 3; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, &buff) < 0) { cleanup(); exit(0); } glClearColor( 0.0f, 0.0f, 0.0f, 0.0f ); glClearDepth( 1.0f ); glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT); if( oldDispMode != 0 && dispMode == 0 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_GL_DRAW_PIXELS ); oldDispMode = 0; debugReportMode(vp); } else if( oldDispMode != 1 && dispMode == 1 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); oldDispMode = 1; debugReportMode(vp); } else if( oldDispMode != 2 && dispMode == 2 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FIELD ); oldDispMode = 2; debugReportMode(vp); } if( oldDistMode != 0 && distMode == 0 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_DISABLE ); oldDistMode = 0; } if( oldDistMode != 1 && distMode == 1 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_ENABLE ); oldDistMode = 1; } argDrawMode2D(vp); argDrawImage(buff.buff); if( imageMode == 3 ) { glLineWidth( 1.0f ); glColor3f( 0.0f, 1.0f, 0.0f ); argDrawSquareByIdealPos( arHandle->markerInfo[0].vertex ); glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByIdealPos( 0.0, 0.0, 640.0, 0.0 ); argDrawLineByIdealPos( 0.0, 479.0, 640.0, 479.0 ); argDrawLineByIdealPos( 0.0, -1.0, 0.0, 479.0 ); argDrawLineByIdealPos( 639.0, -1.0, 639.0, 479.0 ); argDrawLineByIdealPos( 0.0, 188.0, 639.0, 188.0 ); argDrawLineByIdealPos( 0.0, 292.0, 639.0, 292.0 ); argDrawLineByIdealPos( 278.0, 0.0, 278.0, 479.0 ); argDrawLineByIdealPos( 382.0, 0.0, 382.0, 479.0 ); } if( arHandle->marker_num == 0 ) { argSwapBuffers(); return; } arGetTransMatSquare(ar3DHandle, &(arHandle->markerInfo[0]), SQUARE_WIDTH, patt_trans); draw(patt_trans); argSwapBuffers(); }