/* main loop */ static void mainLoop(void) { int i, j, k; if( count == 100 ) { printf("*** %f (frame/sec)\n", (double)count/arUtilTimer()); count = 0; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0, 0 ); /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug ) { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 3.0 ); for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id < 0 ) continue; argDrawSquare( marker_info[i].vertex, 2, 1 ); } glLineWidth( 1.0 ); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; if (dataPtr = GetNextImage()) { if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { printf("could'nt find any marker"); cleanup(); exit(0); } //else printf("i found the marker"); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); if (geometry == 0) { draw(); } else { geometryout(); } argSwapBuffers(); } else { printf("Done - press any key \n"); getchar(); exit(0); } }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } //additions from sheyne, trying to understand how it is working. //from documentation on http://artoolkit.sourceforge.net/apidoc/structARMarkerInfo.html //also look at: http://www.hitl.washington.edu/artoolkit/documentation/devframework.htm printf("pos: %f,%f\n", marker_info->pos[0], marker_info->pos[1]); for (j=0; j<4; j++) { printf("\t vertex: %f, %f\n", marker_info->vertex[j][0],marker_info->vertex[j][1]); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
static void mainLoop() { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //Get video frame if ((dataPtr = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } if (count == 0) { arUtilTimerReset(); } count++; //Display video stream argDrawMode2D(); argDispImage(dataPtr, 0, 0); /* detect the markers in the video frame */ if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) { cleanup(); exit(0); } //Get next video frame arVideoCapNext(); /* check for object visibility */ k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } //Don't bother drawing or calculating orientations if no patterns are found if (k == -1) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int areamax; int i; if( (dataPtr = (unsigned char *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } img_copy( dataPtr, image, xsize*ysize*AR_PIX_SIZE ); arVideoCapNext(); if( arDetectMarker(image, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } areamax = 0; target = NULL; for( i = 0; i < marker_num; i++ ) { if( marker_info[i].area > areamax ) { areamax = marker_info[i].area; target = &(marker_info[i]); } } argDispImage( image, 0, 0 ); if( target != NULL ) { glLineWidth( 2.0 ); glColor3d( 0.0, 1.0, 0.0 ); argLineSeg( target->vertex[0][0], target->vertex[0][1], target->vertex[1][0], target->vertex[1][1], 0, 0 ); argLineSeg( target->vertex[3][0], target->vertex[3][1], target->vertex[0][0], target->vertex[0][1], 0, 0 ); glColor3d( 1.0, 0.0, 0.0 ); argLineSeg( target->vertex[1][0], target->vertex[1][1], target->vertex[2][0], target->vertex[2][1], 0, 0 ); argLineSeg( target->vertex[2][0], target->vertex[2][1], target->vertex[3][0], target->vertex[3][1], 0, 0 ); } argSwapBuffers(); return; }
static void mainLoop(void) { //maincount++; //if (maincount > 2) exit(0); argDrawMode2D(); argDispImage( dataPtr, 0,0 ); draw(); argSwapBuffers(); sleep(1); return; // (0); }
void MainLoop(void) { ARUint8 *image; ARMarkerInfo *marker_info; int marker_num; int j, k; if ((image = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } argDrawMode2D(); argDispImage(image, 0, 0); if (arDetectMarker(image, thresh, &marker_info, &marker_num) < 0) { Cleanup(); exit(0); } arVideoCapNext(); k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } if (k != -1) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); DrawObject(); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* draw the paddle, base and menu */ if( paddleInfo->active ){ draw_paddle( paddleInfo); } /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } //printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 20, 150.0f, 105.0f, 0.0f); /* find the paddle position relative to the base */ findPaddlePosition(curPaddlePos, paddleInfo->trans, config->trans); /* check for collisions with targets */ for(i=0;i<TARGET_NUM;i++){ myTarget[i].state = NOT_TOUCHED; if(checkCollision(curPaddlePos, myTarget[i].pos, 20.0f)) { myTarget[i].state = TOUCHED; fprintf(stderr,"touched !!\n"); } } /* draw the targets */ for(i=0;i<TARGET_NUM;i++){ draw(myTarget[i],config->trans); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ for( i = 0; i < PTT_NUM; i++){ k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { /* not find marker */ object[i].visible = 0; isFirst[i] = 1; } else{ /* get the transformation between the marker and the real camera */ if( isFirst[i]){ arGetTransMat(&marker_info[k], object[i].patt_center, object[i].patt_width, object[i].patt_trans); }else{ arGetTransMatCont(&marker_info[k], object[i].patt_trans, object[i].patt_center, object[i].patt_width, object[i].patt_trans); } object[i].visible = 1; isFirst[i] = 0; /* 追加 */ if(i == PTT2_MARK_ID){ arUtilMatInv( object[PTT2_MARK_ID].patt_trans, itrans2); // 逆行列の計算 } } } //Initialize(); // fix me draw(); argSwapBuffers(); }
void mainLoop() { ARMarkerInfo *marker_info; ARUint8 *dataPtr; int marker_num; if(!calib)//special paycay florian cvReleaseImage(&image); if(!calib) detectColision(); // Recuperation du flux video if ( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } // Passage en mode 2D pour analyse de l'image capturee argDrawMode2D(); // Récupération de l'image openCV puis conversion en ARImage //IplImage* imgTest; image = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 4); image->imageData = (char *)dataPtr; //sinon l'image est à l'envers cvFlip(image, image, 1); //test si les couleurs ont déjà été calibrée // si oui on teste si y a collision, sinon on calibre interactionBoutton(); if(calib) calibrage(); else { updateColor(); interactions(); } // affichage image à l'ecran argDispImage( (unsigned char *)image->imageData, 0,0 ); // Recuperation d'une autre image car on a fini avec la precedente arVideoCapNext(); if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) { printf("impossible de detecter le marqueur\n"); cleanup(); } if(visible == false && !calib) //element IHM : procedure qui permet de savoir si on recherche ou pas + réinit mouvemment des objets précédement affiché { glEnable(GL_LIGHT0); objet1_x =0;objet1_y =0;objet2_x =0;objet2_y =0; if(scan.isVisible(0)==true) scan.setVisible(false,0); if(scan.isVisible(1)==false) scan.setVisible(true,1); glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching",cparam.xsize-100,cparam.ysize-30); if(alterne1==0 && alterne2 > 20) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching .",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } if(alterne1==1 && alterne2 > 20 ) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching ..",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } if(alterne1==2 && alterne2 > 20) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching ...",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } alterne2+=1; glDisable(GL_LIGHT0); } else if(calib) { if(couleur == 0) { glColor3ub(0,0,255); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose thumb's color",cparam.xsize-220,cparam.ysize-30); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); } else if(couleur == 1) { glColor3ub(0,255,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose forefinger's color",cparam.xsize-220,cparam.ysize-30); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Press return for thumb",cparam.xsize-220,cparam.ysize-(30+18*2)); } else { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose middle's color",cparam.xsize-220,cparam.ysize-(30)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Press return for forefinger",cparam.xsize-220,cparam.ysize-(30+18*2)); } } else //passage mode 3d + init profondeur { argDrawMode3D(); argDraw3dCamera(0, 0); glClearDepth(1.0); glClear(GL_DEPTH_BUFFER_BIT); } /// Visibilite de l'objet if(visible == false ) //si on a jms vu de patron ou qu'on a demandé une recapture des patrons faire { //recherche objet visible for (int i=0; i<2; i++) //pour chaque patron initialisé faire { k = -1; //k renseigne sur la visibilité du marker et son id for (int j=0; j<marker_num; j++) // pour chaque marqueur trouver avec arDetectMarker { if (object[i].patt_id == marker_info[j].id) { if (k == -1) { k = j; } else if (marker_info[k].cf < marker_info[j].cf) { k = j; } } } object[i].visible = k; if (k >= 0) { visible = true; arGetTransMat(&marker_info[k], object[i].center, object[i].width,object[i].trans); printf("object[%d] center[%f, %f]\n", i, marker_info->pos[0], marker_info->pos[1]); printf("object[%d] hg[%f, %f]\n", i, marker_info->vertex[0][0], marker_info->vertex[0][1]); printf("object[%d] hd[%f, %f]\n", i, marker_info->vertex[1][0], marker_info->vertex[1][1]); printf("object[%d] bg[%f, %f]\n", i, marker_info->vertex[2][0], marker_info->vertex[2][1]); printf("object[%d] bd[%f, %f]\n", i, marker_info->vertex[3][0], marker_info->vertex[3][1]); //changement etat boutton if(scan.isVisible(0)==false) scan.setVisible(true,0); if(scan.isVisible(1)==true) scan.setVisible(false,1); //si on a vu un patron, on créé une nouvelle instance de l'objet créé par le patron, qu'on stocke dans les objets à l'écran. onscreen_object.push_back(Object3D(mesh.at(object[i].model_id), object[i].center, object[i].trans, object[i].width)); } } } //vu qu'on ne gère plus à partir de la variable "visible" d'un patron, on display, dans tout les cas, soit le vecteur est vide, soit //on a un ou plusieurs objets à afficher display(true); if(menuShow==true) menu.show(); if(!calib) scan.show(); help.show(); quit.show(); argSwapBuffers(); /// Affichage de l'image sur l'interface graphique }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } glClearColor( 0.0, 0.0, 0.0, 0.0 ); glClearDepth( 1.0 ); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); argDrawMode2D(); if( disp_mode ) { argDispImage( dataPtr, 0, 0 ); } else { argDispImage( dataPtr, 1, 1 ); } /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug ) { if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); } /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( marker_info[j].id == target_id ) { if( k == -1 ) k = j; else { if( marker_info[k].cf < marker_info[j].cf ) k = j; } } } if( k != -1 ) { glDisable(GL_DEPTH_TEST); switch( outputMode ) { case 0: getResultRaw( &marker_info[k] ); break; case 1: getResultQuat( &marker_info[k] ); break; } } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* check for object visibility */ for( i = 0; i < 2; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } object[i].visible = k; if( k >= 0 ) { arGetTransMat(&marker_info[k], object[i].center, object[i].width, object[i].trans); draw( object[i].model_id, object[i].trans ); } } argSwapBuffers(); if( object[0].visible >= 0 && object[1].visible >= 0 ) { double wmat1[3][4], wmat2[3][4]; arUtilMatInv(object[0].trans, wmat1); arUtilMatMul(wmat1, object[1].trans, wmat2); for( j = 0; j < 3; j++ ) { for( i = 0; i < 4; i++ ) printf("%8.4f ", wmat2[j][i]); printf("\n"); } printf("\n\n"); } }
static void argCalibMainFunc(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j; double cfmax; double err; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } target_visible = 0; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { (*gCalibPostFunc)( NULL, NULL ); arFittingMode = arFittingModeBak; #ifndef ANDROID glutKeyboardFunc( gKeyFunc ); glutMouseFunc( gMouseFunc ); glutIdleFunc( gMainFunc ); glutDisplayFunc( gMainFunc ); #endif return; } arVideoCapNext(); glClearColor( 0.0, 0.0, 0.0, 0.0 ); #ifndef ANDROID glClear(GL_COLOR_BUFFER_BIT); #endif /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 3.0 ); for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id < 0 ) continue; argDrawSquare( marker_info[i].vertex, 2, 1 ); } glLineWidth( 1.0 ); } if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); glLineWidth( 3.0 ); glColor3f( 1.0, 1.0, 1.0 ); argLineSegHMD( 0, calib_pos[co1][1], AR_HMD_XSIZE, calib_pos[co1][1] ); argLineSegHMD( calib_pos[co1][0], 0, calib_pos[co1][0], AR_HMD_YSIZE ); glLineWidth( 1.0 ); argDrawMode2D(); cfmax = 0.0; j = -1; for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id != target_id ) continue; if( marker_info[i].cf > cfmax ) { cfmax = marker_info[i].cf; j = i; } } if( j < 0 ) { argSwapBuffers(); return; } err = arGetTransMat(&marker_info[j], target_center, target_width, target_trans); if( err >= 0.0 ) { target_visible = 1; if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); argDrawAttention( calib_pos[co1], co2 ); argDrawMode2D(); if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { glColor3f( 0.0, 1.0, 0.0 ); glLineWidth( 3.0 ); argDrawSquare( marker_info[j].vertex, 1, 1 ); glLineWidth( 1.0 ); } } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //update new data g_MyKinect.Update(); #ifdef USE_USERDETECTOR if(g_MyKinect.userStatus.isPlayerVisible()) { XV3 tmp = g_MyKinect.userDetector->getSkeletonJointPosition(XN_SKEL_RIGHT_HAND); printf("Right hand position: %.2f %.2f %.2f\n", tmp.X, tmp.Y, tmp.Z); } #endif if(drawFromKinect) { //get image data to detect marker if( (dataPtr = (ARUint8 *)g_MyKinect.GetBGRA32Image()) == NULL ) { arUtilSleep(2); return; } } else { /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } if(drawFromKinect) { //option . You can choose many display mode. image, Depth by Color, depth mixed image if(displayMode == 2) dataPtr = (ARUint8 *)g_MyKinect.GetDepthDrewByColor(); else if(displayMode == 3) dataPtr = (ARUint8 *)g_MyKinect.GetDepthMixedImage(); } argDrawMode2D(); argDispImage( dataPtr, 0,0 ); arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glScalef(1.0,1.0,5.0); glClear(GL_DEPTH_BUFFER_BIT); //PRINT DOS PREDIOS FANTASMAS (COM E SEM MARCADORES) if (mostraFantasmas == 1) { desenhaFantasmasSemTag(); //Desenha predios fantasmas com marcadores identificados glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); for(i = (config->marker_num) - 3; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) { glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 0 ); glScalef(1.0,1.0,0.5); }else{ glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 1 ); glScalef(1.0,1.0,0.5); } } glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); } desenhaCarros(); for (i = 0; i < (config->marker_num) - 3; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }
void findMarkers(void) { ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ //if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { //if( count == 0 ) arUtilTimerReset(); //count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); //fprintf(stderr,"no visible objects\n"); #if 0 int i; for (i = 0; i < 4; i ++) { for (j = 0; j < 3; j++) { fprintf("0,\t"); } //fprintf("\n"); } fprintf("\n"); #endif //return; int i; for (i = 0; i < 12; i++) { fprintf(stdout, "0,\t"); } fprintf(stdout,"\n"); // } else { //fprintf("patt_trans\n"); /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); /// what is patt_center, it seems to be zeros //fprintf("%f,\t%f,\t", patt_center[0], patt_center[1]); fprintf(stdout,"%g,\t%g,\n", marker_info[k].pos[0], marker_info[k].pos[1]); int i; for (j = 0; j < 3; j++) { for (i = 0; i < 4; i++) { fprintf(stdout, "%f,\t", patt_trans[j][i]); } printf("\t"); } fprintf(stdout,"\n"); //draw(); } }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; double angle; err=0.; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } // printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 15, 150.0, 110.0, 0.0); /* find the paddle position relative to the base */ if (paddleInfo->active) findPaddlePosition(curPaddlePos,paddleInfo->trans,config->trans); /* checking for paddle gesture */ if( paddleInfo->active) { int findItem=-1; if (myPaddleItem.item!=-1) { if( check_incline(paddleInfo->trans, config->trans, &angle) ) { myPaddleItem.x += 2.0 * cos(angle); myPaddleItem.y += 2.0 * sin(angle); if( myPaddleItem.x*myPaddleItem.x + myPaddleItem.y*myPaddleItem.y > 900.0 ) { myPaddleItem.x -= 2.0 * cos(angle); myPaddleItem.y -= 2.0 * sin(angle); myListItem.item[myPaddleItem.item].onpaddle=0; myListItem.item[myPaddleItem.item].pos[0]=curPaddlePos[0]; myListItem.item[myPaddleItem.item].pos[1]=curPaddlePos[1]; myPaddleItem.item = -1; } } } else { if ((findItem=check_pickup(paddleInfo->trans, config->trans,&myListItem, &angle))!=-1) { myPaddleItem.item=findItem; myPaddleItem.x =0.0; myPaddleItem.y =0.0; myPaddleItem.angle = 0.0; myListItem.item[myPaddleItem.item].onpaddle=1; } } } /* draw the item */ drawItems(config->trans,&myListItem); /* draw the paddle */ if( paddleInfo->active ){ draw_paddle(paddleInfo,&myPaddleItem); } argSwapBuffers(); }
void MainLoop() { //QueryPerformanceFrequency(&nFreq); //QueryPerformanceCounter(&nBefore); DWORD StartTime,EndTime,PassTime; double l_StartTime,l_EndTime,l_PassTime; #ifdef _WIN32 StartTime=timeGetTime(); #else l_StartTime=gettimeofday_sec(); #endif ARUint8 *image; ARMarkerInfo *marker_info; int marker_num; int j,k; if( (image = (ARUint8*)arVideoGetImage() )==NULL){ arUtilSleep(2); return; } argDrawMode2D(); argDispImage(image, 0, 0); if(arDetectMarker(image, thresh, &marker_info, &marker_num) < 0){ CleanUp(); exit(0); } arVideoCapNext(); k=-1; for(j=0;j<marker_num;j++){ if(patt_id==marker_info[j].id){ k = (k==-1) ? j : k; k = (marker_info[k].cf < marker_info[j].cf) ? j: k; } } if(k!=-1) { if(isFirst==true) nyar_NyARTransMat_O2_transMat(nyobj,&marker_info[k],patt_center,patt_width,patt_trans); else nyar_NyARTransMat_O2_transMatCont(nyobj,&marker_info[k],patt_trans,patt_center,patt_width,patt_trans); isFirst=false; if(GameOver==false){ if(arUtilTimer()>1.0){ MovePiece(3,f,p); score+=f.ShiftPiece(f.deletePiece()); arUtilTimerReset(); GameOver=GameOverCheck(f,p); } } else{ if(arUtilTimer()>15.0) InitGame(); } DrawObject(); } argSwapBuffers(); #ifdef _WIN32 EndTime=timeGetTime(); PassTime=EndTime-StartTime; (1000/FPS>PassTime)?Wait(1000/FPS-PassTime):Wait(0); FPSCount(&fps); printf("FPS=%d\n",fps); #else l_EndTime=gettimeofday_sec(); l_PassTime=l_EndTime-l_StartTime; ((double)(1000/FPS)>l_PassTime)?Wait((double)1000/FPS-l_PassTime):Wait(0); FPSCount(&fps); printf("FPS=%d\n",fps); #endif }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); for( i = 0; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i,j,k; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /*draw the video*/ argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* capture the next video frame */ arVideoCapNext(); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth(6.0); /* detect the markers in the video frame */ if(arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } for( i = 0; i < marker_num; i++ ) { argDrawSquare(marker_info[i].vertex,0,0); } /* check for known patterns */ for( i = 0; i < objectnum; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].id == marker_info[j].id) { /* you've found a pattern */ //printf("Found pattern: %d ",patt_id); glColor3f( 0.0, 1.0, 0.0 ); argDrawSquare(marker_info[j].vertex,0,0); if( k == -1 ) k = j; else /* make sure you have the best pattern (highest confidence factor) */ if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { object[i].visible = 0; continue; } /* calculate the transform for each marker */ if( object[i].visible == 0 ) { arGetTransMat(&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans); } else { arGetTransMatCont(&marker_info[k], object[i].trans, object[i].marker_center, object[i].marker_width, object[i].trans); } object[i].visible = 1; } /*check for object collisions between marker 0 and 1 */ if(object[0].visible && object[1].visible) { if(checkCollisions(object[0],object[1],COLLIDE_DIST)) { object[0].collide = 1; object[1].collide = 1; } else { object[0].collide = 0; object[1].collide = 0; } } /* draw the AR graphics */ draw( object, objectnum ); /*swap the graphics buffers*/ argSwapBuffers(); }
//======================================================= // メインループ関数 //======================================================= void MainLoop(void) { ARUint8 *image; // カメラキャプチャ画像 ARMarkerInfo *marker_info; // マーカ情報 int marker_num; // 検出されたマーカの数 int i, j, k; // カメラ画像の取得 if( (image = (ARUint8 *)arVideoGetImage()) == NULL ){ arUtilSleep( 2 ); return; } if( count == 0 ) arUtilTimerReset(); count++; // カメラ画像の描画 argDrawMode2D(); argDispImage( image, 0, 0 ); // マーカの検出と認識 if( arDetectMarker( image, thresh, &marker_info, &marker_num ) < 0 ){ Cleanup(); exit(0); } // 次の画像のキャプチャ指示 arVideoCapNext(); // 3Dオブジェクトを描画するための準備 argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth(1.0); // デプスバッファの消去値 glClear( GL_DEPTH_BUFFER_BIT ); // デプスバッファの初期化 if(movex[0]!=0 && movex[3]!=0 && movex[7]!=0){ rmove++; if(rmove!=0){ Drawnashi( marker[3].mark_id, marker[3].patt_trans); } if(rmove>40.0){ rmove=0.0; for(int i=0;i<MARK_NUM;i++){ movex[i]=0; } } }else{ // マーカの一致度の比較 for( i=0; i<MARK_NUM; i++ ){ k = -1; for( j=0; j<marker_num; j++ ){ if( marker[i].patt_id == marker_info[j].id ){ if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } // マーカーが見つからなかったとき if( k == -1 ){ if(marker[i].visible != 0){ midi_out(i+1); midi_stop(i+1); movex[i]=1; marker[i].visible = 0; }else if(movex[i]!=0){ DrawObject( marker[i].mark_id, marker[i].patt_trans,i ); } }else{ // 座標変換行列を取得 if( marker[i].visible == 0 ) { // 1フレームを使ってマーカの位置・姿勢(座標変換行列)の計算 arGetTransMat( &marker_info[k], marker[i].patt_center, marker[i].patt_width, marker[i].patt_trans ); //初回の認識ではarGetTransMatを2回目以降ではarGetTransMatContを使うと安定するらしい marker[i].visible = 1; } else { // 前のフレームを使ってマーカの位置・姿勢(座標変換行列)の計算 arGetTransMatCont( &marker_info[k], marker[i].patt_trans, marker[i].patt_center, marker[i].patt_width, marker[i].patt_trans ); } // 3Dオブジェクトの描画 if(movex[i]!=0){ DrawObject( marker[i].mark_id, marker[i].patt_trans,i ); } } if(movex[i]>=40.0) movex[i]=0; if(movex[i]!=0) movex[i]++; } } // バッファの内容を画面に表示 argSwapBuffers(); }