/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } //additions from sheyne, trying to understand how it is working. //from documentation on http://artoolkit.sourceforge.net/apidoc/structARMarkerInfo.html //also look at: http://www.hitl.washington.edu/artoolkit/documentation/devframework.htm printf("pos: %f,%f\n", marker_info->pos[0], marker_info->pos[1]); for (j=0; j<4; j++) { printf("\t vertex: %f, %f\n", marker_info->vertex[j][0],marker_info->vertex[j][1]); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
//================== // マーカー検出処理 //================== bool cARTK::update( void ) { ARUint8 *pImage; ARMarkerInfo *pMarkerInfo; int iNumDetectedMarker; // カメラ画像の取得 if( (pImage = arVideoGetImage()) != NULL ) { m_pARTImage = pImage; m_bMarkersFound = false; // カメラ画像からマーカーを検出 if( arDetectMarker( m_pARTImage, m_iThreshold, &pMarkerInfo, &iNumDetectedMarker ) < 0 ) { exit( -1 ); } // 検出されたマーカー情報の中から一番信頼性の高いものを探す for( int i = 0 ; i < ARMK_MAXNUM ; i++ ) { int k = -1; for( int j = 0 ; j < iNumDetectedMarker ; j++ ) { if( pMarkerInfo[j].id == m_sMarkerInfo[i].iPattID ) { if( k == -1 || pMarkerInfo[j].cf > pMarkerInfo[k].cf ) k = j; } } if( k != -1 ) { // カメラのトランスフォーム行列を取得 if( !m_sMarkerInfo[i].bVisible ) nyar_NyARTransMat_O2_transMat( m_pNyARInst, &(pMarkerInfo[k]), m_sMarkerInfo[i].dCenterPos, m_sMarkerInfo[i].dWidth, m_sMarkerInfo[i].dTransMat ); else nyar_NyARTransMat_O2_transMatCont( m_pNyARInst, &(pMarkerInfo[k]), m_sMarkerInfo[i].dTransMat, m_sMarkerInfo[i].dCenterPos, m_sMarkerInfo[i].dWidth, m_sMarkerInfo[i].dTransMat ); m_sMarkerInfo[i].bVisible = true; m_bMarkersFound = true; } else { m_sMarkerInfo[i].bVisible = false; } } return true; } arUtilSleep( 2 ); return false; }
static void mainLoop() { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //Get video frame if ((dataPtr = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } if (count == 0) { arUtilTimerReset(); } count++; //Display video stream argDrawMode2D(); argDispImage(dataPtr, 0, 0); /* detect the markers in the video frame */ if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) { cleanup(); exit(0); } //Get next video frame arVideoCapNext(); /* check for object visibility */ k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } //Don't bother drawing or calculating orientations if no patterns are found if (k == -1) { argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); draw(); argSwapBuffers(); }
static void dispImage(void) { AR2VideoBufferT *buff; double x, y; int ssx, eex, ssy, eey; int i; if( status == 0 ) { while (!(buff = arVideoGetImage()) || !buff->fillFlag) arUtilSleep(2); argDrawMode2D( vp ); argDrawImage(buff->buff); } else if( status == 1 ) { argDrawMode2D( vp ); argDrawImage( patt.savedImage[patt.loop_num-1] ); for( i = 0; i < point_num; i++ ) { x = patt.point[patt.loop_num-1][i].x_coord; y = patt.point[patt.loop_num-1][i].y_coord; glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByObservedPos( x-10, y, x+10, y ); argDrawLineByObservedPos( x, y-10, x, y+10 ); } if( sx != -1 && sy != -1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } dispClipImage( ssx, ssy, eex-ssx+1, eey-ssy+1, clipImage ); } } else if( status == 2 ) { argDrawMode2D( vp ); argDrawImage( patt.savedImage[check_num] ); for( i = 0; i < patt.h_num*patt.v_num; i++ ) { x = patt.point[check_num][i].x_coord; y = patt.point[check_num][i].y_coord; glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByObservedPos( x-10, y, x+10, y ); argDrawLineByObservedPos( x, y-10, x, y+10 ); } draw_line(); } argSwapBuffers(); }
//================== // マーカー検出処理 //================== bool cARTK::update( void ) { ARUint8 *pImage; ARMarkerInfo *pMarkerInfo; int iNumDetectedMarker; // カメラ画像の取得 if( (pImage = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return false; } memcpy( m_pARTImage, pImage, m_uiARTImageSize ); m_bMarkerFound = false; // カメラ画像からマーカーを検出 if( arDetectMarker( m_pARTImage, 130, &pMarkerInfo, &iNumDetectedMarker ) < 0 ) { exit( -1 ); } // 検出されたマーカー情報の中から一番信頼性の高いものを探す int k = -1; for( int j = 0 ; j < iNumDetectedMarker ; j++ ) { if( pMarkerInfo[j].id == m_iPattID ) { if( k == -1 || pMarkerInfo[j].cf > pMarkerInfo[k].cf ) k = j; } } if( k != -1 ) { // カメラのトランスフォーム行列を取得 if( m_bFirstTime ) arGetTransMat( &(pMarkerInfo[k]), m_dPattCenter, m_dPattWidth, m_dPattTransMat ); else arGetTransMatCont( &(pMarkerInfo[k]), m_dPattTransMat, m_dPattCenter, m_dPattWidth, m_dPattTransMat ); m_bFirstTime = false; m_bMarkerFound = true; } // 次のカメラ画像のキャプチャを開始 arVideoCapNext(); return true; }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int areamax; int i; if( (dataPtr = (unsigned char *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } img_copy( dataPtr, image, xsize*ysize*AR_PIX_SIZE ); arVideoCapNext(); if( arDetectMarker(image, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } areamax = 0; target = NULL; for( i = 0; i < marker_num; i++ ) { if( marker_info[i].area > areamax ) { areamax = marker_info[i].area; target = &(marker_info[i]); } } argDispImage( image, 0, 0 ); if( target != NULL ) { glLineWidth( 2.0 ); glColor3d( 0.0, 1.0, 0.0 ); argLineSeg( target->vertex[0][0], target->vertex[0][1], target->vertex[1][0], target->vertex[1][1], 0, 0 ); argLineSeg( target->vertex[3][0], target->vertex[3][1], target->vertex[0][0], target->vertex[0][1], 0, 0 ); glColor3d( 1.0, 0.0, 0.0 ); argLineSeg( target->vertex[1][0], target->vertex[1][1], target->vertex[2][0], target->vertex[2][1], 0, 0 ); argLineSeg( target->vertex[2][0], target->vertex[2][1], target->vertex[3][0], target->vertex[3][1], 0, 0 ); } argSwapBuffers(); return; }
static void mainLoop(void) { ARUint8 *dataPtr; int cornerCount; char buf[256]; int i; if ((dataPtr = arVideoGetImage()) == NULL) { arUtilSleep(2); return; } glClear(GL_COLOR_BUFFER_BIT); argDrawMode2D(vp); argDrawImage(dataPtr); // Convert to grayscale, results will go to arIPI->image, which also provides the backing for calibImage. arImageProcLuma(arIPI, dataPtr); cornerFlag = cvFindChessboardCorners(calibImage, cvSize(chessboardCornerNumY, chessboardCornerNumX), corners, &cornerCount, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FILTER_QUADS); if (cornerFlag) glColor4ub(255, 0, 0, 255); else glColor4ub(0, 255, 0, 255); glLineWidth(2.0f); // ARLOG("Detected corners = %d\n", cornerCount); for (i = 0; i < cornerCount; i++) { argDrawLineByObservedPos(corners[i].x - 5, corners[i].y - 5, corners[i].x + 5, corners[i].y + 5); argDrawLineByObservedPos(corners[i].x - 5, corners[i].y + 5, corners[i].x + 5, corners[i].y - 5); // ARLOG(" %f, %f\n", corners[i].x, corners[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, corners[i].x, corners[i].y + 20); } sprintf(buf, "Captured Image: %2d/%2d\n", capturedImageNum, calibImageNum); argDrawStringsByObservedPos(buf, 10, 30); argSwapBuffers(); }
void MainLoop(void) { ARUint8 *image; ARMarkerInfo *marker_info; int marker_num; int j, k; if ((image = (ARUint8 *)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } argDrawMode2D(); argDispImage(image, 0, 0); if (arDetectMarker(image, thresh, &marker_info, &marker_num) < 0) { Cleanup(); exit(0); } arVideoCapNext(); k = -1; for (j = 0; j < marker_num; j++) { if (patt_id == marker_info[j].id) { if (k == -1) k = j; else if (marker_info[k].cf < marker_info[j].cf) k = j; } } if (k != -1) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); DrawObject(); } argSwapBuffers(); }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; int i, j, k; // Calculate time delta. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; ms_prev = ms; // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. // Calculate FPS every 30 frames. if (gCallCountMarkerDetect % 30 == 0) { gFPS = 30.0/arUtilTimer(); arUtilTimerReset(); gCallCountMarkerDetect = 0; } gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Run marker detection on frame if (threadHandle) { // Perform NFT tracking. float err; int ret; int pageNo; if( detectedPage == -2 ) { trackingInitStart( threadHandle, gARTImage ); detectedPage = -1; } if( detectedPage == -1 ) { ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo); if( ret == 1 ) { if (pageNo >= 0 && pageNo < surfaceSetCount) { ARLOGd("Detected page %d.\n", pageNo); detectedPage = pageNo; ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans); } else { ARLOGe("Detected bad page %d.\n", pageNo); detectedPage = -2; } } else if( ret < 0 ) { ARLOGd("No page detected.\n"); detectedPage = -2; } } if( detectedPage >= 0 && detectedPage < surfaceSetCount) { if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) { ARLOGd("Tracking lost.\n"); detectedPage = -2; } else { ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1); } } } else { ARLOGe("Error: threadHandle\n"); detectedPage = -2; } // Update markers. for (i = 0; i < markersNFTCount; i++) { markersNFT[i].validPrev = markersNFT[i].valid; if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) { markersNFT[i].valid = TRUE; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k]; } else markersNFT[i].valid = FALSE; if (markersNFT[i].valid) { // Filter the pose estimate. if (markersNFT[i].ftmi) { if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersNFT[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRH(markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersNFT[i].pose); } else { if (markersNFT[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } // Tell GLUT the display has changed. glutPostRedisplay(); } else { arUtilSleep(2); } }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* check for object visibility */ for( i = 0; i < PTT_NUM; i++){ k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { /* not find marker */ object[i].visible = 0; isFirst[i] = 1; } else{ /* get the transformation between the marker and the real camera */ if( isFirst[i]){ arGetTransMat(&marker_info[k], object[i].patt_center, object[i].patt_width, object[i].patt_trans); }else{ arGetTransMatCont(&marker_info[k], object[i].patt_trans, object[i].patt_center, object[i].patt_width, object[i].patt_trans); } object[i].visible = 1; isFirst[i] = 0; /* 追加 */ if(i == PTT2_MARK_ID){ arUtilMatInv( object[PTT2_MARK_ID].patt_trans, itrans2); // 逆行列の計算 } } } //Initialize(); // fix me draw(); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glScalef(1.0,1.0,5.0); glClear(GL_DEPTH_BUFFER_BIT); //PRINT DOS PREDIOS FANTASMAS (COM E SEM MARCADORES) if (mostraFantasmas == 1) { desenhaFantasmasSemTag(); //Desenha predios fantasmas com marcadores identificados glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); for(i = (config->marker_num) - 3; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) { glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 0 ); glScalef(1.0,1.0,0.5); }else{ glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 1 ); glScalef(1.0,1.0,0.5); } } glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); } desenhaCarros(); for (i = 0; i < (config->marker_num) - 3; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); for( i = 0; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }
static void mouseEvent(int button, int state, int x, int y) { AR2VideoBufferT *buff; unsigned char *p, *p1; int ssx, ssy, eex, eey; int i, j, k; char line[256]; if( x < 0 ) x = 0; if( x >= xsize ) x = xsize-1; if( y < 0 ) y = 0; if( y >= ysize ) y = ysize-1; x *= SCALE; y *= SCALE; if( button == GLUT_RIGHT_BUTTON && state == GLUT_UP ) { if( status == 0 ) { arVideoCapStop(); arVideoClose(); if( patt.loop_num > 0 ) { calc_distortion( &patt, xsize, ysize, aspect_ratio, dist_factor, dist_function_version ); ARLOG("--------------\n"); if (dist_function_version == 3) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Aspect Ratio: %f\n", dist_factor[3]); ARLOG("Dist Factor1: %f\n", dist_factor[4]); ARLOG("Dist Factor2: %f\n", dist_factor[5]); } else if (dist_function_version == 2) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Dist Factor1: %f\n", dist_factor[3]); ARLOG("Dist Factor2: %f\n", dist_factor[4]); } else if (dist_function_version == 1) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Dist Factor: %f\n", dist_factor[3]); } ARLOG("--------------\n"); status = 2; check_num = 0; print_comment(5); } else { exit(0); } } else if( status == 1 ) { if( patt.loop_num == 0 ) {ARLOGe("error!!\n"); exit(0);} patt.loop_num--; free( patt.point[patt.loop_num] ); free( patt.savedImage[patt.loop_num] ); status = 0; point_num = 0; arVideoCapStart(); if( patt.loop_num == 0 ) print_comment(0); else print_comment(4); } } if( button == GLUT_LEFT_BUTTON && state == GLUT_DOWN ) { if( status == 1 && point_num < patt.h_num*patt.v_num ) { sx = ex = x; sy = ey = y; p = &(patt.savedImage[patt.loop_num-1][(y*xsize+x)*pixelSize]); p1 = &(clipImage[0]); if (pixelFormat == AR_PIXEL_FORMAT_BGRA || pixelFormat == AR_PIXEL_FORMAT_RGBA) { k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_ARGB || pixelFormat == AR_PIXEL_FORMAT_ABGR) { k = (255*3 - (*(p+1) + *(p+2) + *(p+3))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+1) = *(p1+2) = *(p1+3) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_BGR || pixelFormat == AR_PIXEL_FORMAT_RGB) { k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_MONO || pixelFormat == AR_PIXEL_FORMAT_420v || pixelFormat == AR_PIXEL_FORMAT_420f) { k = 255 - *p; if( k < thresh ) k = 0; else k = 255; *p1 = k; } else if (pixelFormat == AR_PIXEL_FORMAT_2vuy) { k = 255 - *(p+1); if( k < thresh ) k = 0; else k = 255; *(p1+1) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_yuvs) { k = 255 - *p; if( k < thresh ) k = 0; else k = 255; *p1 = k; } } } if( button == GLUT_LEFT_BUTTON && state == GLUT_UP ) { if( status == 0 && patt.loop_num < LOOP_MAX ) { while (!(buff = arVideoGetImage()) || !buff->fillFlag) arUtilSleep(2); p = buff->buff; patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*ysize*pixelSize ); if( patt.savedImage[patt.loop_num] == NULL ) exit(0); p1 = patt.savedImage[patt.loop_num]; for(i=0;i<xsize*ysize*pixelSize;i++) *(p1++) = *(p++); arVideoCapStop(); patt.point[patt.loop_num] = (CALIB_COORD_T *)malloc( sizeof(CALIB_COORD_T)*patt.h_num*patt.v_num ); if( patt.point[patt.loop_num] == NULL ) exit(0); patt.loop_num++; status = 1; sx = sy = ex= ey = -1; print_comment(1); } else if( status == 1 && point_num == patt.h_num*patt.v_num ) { status = 0; point_num = 0; arVideoCapStart(); ARLOG("### No.%d ###\n", patt.loop_num); for( j = 0; j < patt.v_num; j++ ) { for( i = 0; i < patt.h_num; i++ ) { ARLOG("%2d, %2d: %6.2f, %6.2f\n", i+1, j+1, patt.point[patt.loop_num-1][j*patt.h_num+i].x_coord, patt.point[patt.loop_num-1][j*patt.h_num+i].y_coord); } } ARLOG("\n\n"); if( patt.loop_num < LOOP_MAX ) print_comment(4); else print_comment(6); } else if( status == 1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } patt.point[patt.loop_num-1][point_num].x_coord = 0.0; patt.point[patt.loop_num-1][point_num].y_coord = 0.0; p = clipImage; k = 0; for( j = 0; j < (eey-ssy+1); j++ ) { for( i = 0; i < (eex-ssx+1); i++ ) { if( pixelSize == 1 ) { patt.point[patt.loop_num-1][point_num].x_coord += i * *p; patt.point[patt.loop_num-1][point_num].y_coord += j * *p; k += *p; } else { patt.point[patt.loop_num-1][point_num].x_coord += i * *(p+1); patt.point[patt.loop_num-1][point_num].y_coord += j * *(p+1); k += *(p+1); } p += pixelSize; } } if( k != 0 ) { patt.point[patt.loop_num-1][point_num].x_coord /= k; patt.point[patt.loop_num-1][point_num].y_coord /= k; patt.point[patt.loop_num-1][point_num].x_coord += ssx; patt.point[patt.loop_num-1][point_num].y_coord += ssy; point_num++; } sx = sy = ex= ey = -1; ARLOG(" # %d/%d\n", point_num, patt.h_num*patt.v_num); if( point_num == patt.h_num*patt.v_num ) print_comment(2); } else if( status == 2 ) { check_num++; if( check_num == patt.loop_num ) { if(patt.loop_num >= 2) { if( calc_inp(&patt, dist_factor, xsize, ysize, mat, dist_function_version) < 0 ) { ARLOGe("Calibration failed.\n"); exit(0); } save_param(); if (dist_function_version == 3) { printf("Do you want to repeat again?"); scanf("%s", line); if( line[0] == 'y' ) { aspect_ratio *= mat[0][0] / mat[1][1]; ARLOG("New aspect ratio = %f\n", aspect_ratio); calc_distortion( &patt, xsize, ysize, aspect_ratio, dist_factor, dist_function_version ); ARLOG("--------------\n"); ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Aspect Ratio: %f\n", dist_factor[3]); ARLOG("Dist Factor1: %f\n", dist_factor[4]); ARLOG("Dist Factor2: %f\n", dist_factor[5]); ARLOG("--------------\n"); status = 2; check_num = 0; print_comment(5); return; } } } exit(0); } if( check_num+1 == patt.loop_num ) { ARLOG("\nLeft Mouse Button: Next Step.\n"); } else { ARLOG(" %d/%d.\n", check_num+1, patt.loop_num); } } } return; }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } glClearColor( 0.0, 0.0, 0.0, 0.0 ); glClearDepth( 1.0 ); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); argDrawMode2D(); if( disp_mode ) { argDispImage( dataPtr, 0, 0 ); } else { argDispImage( dataPtr, 1, 1 ); } /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug ) { if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); } /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( marker_info[j].id == target_id ) { if( k == -1 ) k = j; else { if( marker_info[k].cf < marker_info[j].cf ) k = j; } } } if( k != -1 ) { glDisable(GL_DEPTH_TEST); switch( outputMode ) { case 0: getResultRaw( &marker_info[k] ); break; case 1: getResultQuat( &marker_info[k] ); break; } } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; double angle; err=0.; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } // printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 15, 150.0, 110.0, 0.0); /* find the paddle position relative to the base */ if (paddleInfo->active) findPaddlePosition(curPaddlePos,paddleInfo->trans,config->trans); /* checking for paddle gesture */ if( paddleInfo->active) { int findItem=-1; if (myPaddleItem.item!=-1) { if( check_incline(paddleInfo->trans, config->trans, &angle) ) { myPaddleItem.x += 2.0 * cos(angle); myPaddleItem.y += 2.0 * sin(angle); if( myPaddleItem.x*myPaddleItem.x + myPaddleItem.y*myPaddleItem.y > 900.0 ) { myPaddleItem.x -= 2.0 * cos(angle); myPaddleItem.y -= 2.0 * sin(angle); myListItem.item[myPaddleItem.item].onpaddle=0; myListItem.item[myPaddleItem.item].pos[0]=curPaddlePos[0]; myListItem.item[myPaddleItem.item].pos[1]=curPaddlePos[1]; myPaddleItem.item = -1; } } } else { if ((findItem=check_pickup(paddleInfo->trans, config->trans,&myListItem, &angle))!=-1) { myPaddleItem.item=findItem; myPaddleItem.x =0.0; myPaddleItem.y =0.0; myPaddleItem.angle = 0.0; myListItem.item[myPaddleItem.item].onpaddle=1; } } } /* draw the item */ drawItems(config->trans,&myListItem); /* draw the paddle */ if( paddleInfo->active ){ draw_paddle(paddleInfo,&myPaddleItem); } argSwapBuffers(); }
static void dispImage(void) { unsigned char *dataPtr; double x, y; int ssx, eex, ssy, eey; int i; if( status == 0 ) { if( (dataPtr = (unsigned char *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } dispImage2( dataPtr ); arVideoCapNext(); } else if( status == 1 ) { dispImage2( patt.savedImage[patt.loop_num-1] ); for( i = 0; i < point_num; i++ ) { x = patt.point[patt.loop_num-1][i].x_coord; y = patt.point[patt.loop_num-1][i].y_coord; glColor3f( 1.0, 0.0, 0.0 ); glBegin(GL_LINES); glVertex2f( x-10, (ysize-1)-y ); glVertex2f( x+10, (ysize-1)-y ); glVertex2f( x, (ysize-1)-(y-10) ); glVertex2f( x, (ysize-1)-(y+10) ); glEnd(); } if( sx != -1 && sy != -1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } dispClipImage( ssx, ysize-1-ssy, eex-ssx+1, eey-ssy+1, clipImage ); #if 0 glColor3f( 0.0, 0.0, 1.0 ); glBegin(GL_LINE_LOOP); glVertex2f( sx, (ysize-1)-sy ); glVertex2f( ex, (ysize-1)-sy ); glVertex2f( ex, (ysize-1)-ey ); glVertex2f( sx, (ysize-1)-ey ); glEnd(); #endif } } else if( status == 2 ) { dispImage2( patt.savedImage[check_num] ); for( i = 0; i < patt.h_num*patt.v_num; i++ ) { x = patt.point[check_num][i].x_coord; y = patt.point[check_num][i].y_coord; glColor3f( 1.0, 0.0, 0.0 ); glBegin(GL_LINES); glVertex2f( x-10, (ysize-1)-y ); glVertex2f( x+10, (ysize-1)-y ); glVertex2f( x, (ysize-1)-(y-10) ); glVertex2f( x, (ysize-1)-(y+10) ); glEnd(); } draw_line(); } glutSwapBuffers(); }
static void mainLoop( void ) { AR2VideoBufferT *videoBuffL; AR2VideoBufferT *videoBuffR; ARUint8 *dataPtrL; ARUint8 *dataPtrR; int cornerFlagL; int cornerFlagR; int cornerCountL; int cornerCountR; char buf[256]; int i; if ((videoBuffL = ar2VideoGetImage(vidL))) { gVideoBuffL = videoBuffL; } if ((videoBuffR = ar2VideoGetImage(vidR))) { gVideoBuffR = videoBuffR; } if (gVideoBuffL && gVideoBuffR) { // Warn about significant time differences. i = ((int)gVideoBuffR->time_sec - (int)gVideoBuffL->time_sec) * 1000 + ((int)gVideoBuffR->time_usec - (int)gVideoBuffL->time_usec) / 1000; if( i > 20 ) { ARLOG("Time diff = %d[msec]\n", i); } else if( i < -20 ) { ARLOG("Time diff = %d[msec]\n", i); } dataPtrL = gVideoBuffL->buff; dataPtrR = gVideoBuffR->buff; glClear(GL_COLOR_BUFFER_BIT); argDrawMode2D( vpL ); argDrawImage( dataPtrL ); argDrawMode2D( vpR ); argDrawImage( dataPtrR ); copyImage( dataPtrL, (ARUint8 *)calibImageL->imageData, xsizeL*ysizeL, pixFormatL ); cornerFlagL = cvFindChessboardCorners(calibImageL, cvSize(chessboardCornerNumY,chessboardCornerNumX), cornersL, &cornerCountL, CV_CALIB_CB_ADAPTIVE_THRESH|CV_CALIB_CB_FILTER_QUADS ); copyImage( dataPtrR, (ARUint8 *)calibImageR->imageData, xsizeR*ysizeR, pixFormatR ); cornerFlagR = cvFindChessboardCorners(calibImageR, cvSize(chessboardCornerNumY,chessboardCornerNumX), cornersR, &cornerCountR, CV_CALIB_CB_ADAPTIVE_THRESH|CV_CALIB_CB_FILTER_QUADS ); argDrawMode2D( vpL ); if(cornerFlagL) glColor3f(1.0f, 0.0f, 0.0f); else glColor3f(0.0f, 1.0f, 0.0f); glLineWidth(2.0f); //ARLOG("Detected corners = %d\n", cornerCount); for( i = 0; i < cornerCountL; i++ ) { argDrawLineByObservedPos(cornersL[i].x-5, cornersL[i].y-5, cornersL[i].x+5, cornersL[i].y+5); argDrawLineByObservedPos(cornersL[i].x-5, cornersL[i].y+5, cornersL[i].x+5, cornersL[i].y-5); //ARLOG(" %f, %f\n", cornersL[i].x, cornersL[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, cornersL[i].x, cornersL[i].y+20); } argDrawMode2D( vpR ); if(cornerFlagR) glColor3f(1.0f, 0.0f, 0.0f); else glColor3f(0.0f, 1.0f, 0.0f); glLineWidth(2.0f); //ARLOG("Detected corners = %d\n", cornerCount); for( i = 0; i < cornerCountR; i++ ) { argDrawLineByObservedPos(cornersR[i].x-5, cornersR[i].y-5, cornersR[i].x+5, cornersR[i].y+5); argDrawLineByObservedPos(cornersR[i].x-5, cornersR[i].y+5, cornersR[i].x+5, cornersR[i].y-5); //ARLOG(" %f, %f\n", cornersR[i].x, cornersR[i].y); sprintf(buf, "%d\n", i); argDrawStringsByObservedPos(buf, cornersR[i].x, cornersR[i].y+20); } if( cornerFlagL && cornerFlagR ) { cornerFlag = 1; glColor3f(1.0f, 0.0f, 0.0f); } else { cornerFlag = 0; glColor3f(0.0f, 1.0f, 0.0f); } argDrawMode2D( vpL ); sprintf(buf, "Captured Image: %2d/%2d\n", capturedImageNum, calibImageNum); argDrawStringsByIdealPos(buf, 10, 30); argSwapBuffers(); gVideoBuffL = gVideoBuffR = NULL; } else arUtilSleep(2); }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int imageProcMode; int debugMode; int j, k; /* grab a video frame */ if ((dataPtr = (ARUint8*)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, dataPtr) < 0) { cleanup(); exit(0); } argSetWindow(w1); argDrawMode2D(vp1); arGetDebugMode(arHandle, &debugMode); if (debugMode == 0) { argDrawImage(dataPtr); } else { arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) { argDrawImage(arHandle->labelInfo.bwImage); } else { argDrawImageHalf(arHandle->labelInfo.bwImage); } } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(dataPtr); argSetWindow(w1); if (count % 10 == 0) { sprintf(fps, "%f[fps]", 10.0 / arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); markerNum = arGetMarkerNum(arHandle); if (markerNum == 0) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker(arHandle); k = -1; for (j = 0; j < markerNum; j++) { // ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if (patt_id == markerInfo[j].id) { if (k == -1) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if (k == -1) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); argDrawStringsByIdealPos(errValue, 10, ysize - 60); // ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { static int contF = 0; ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int j, k; //update new data g_MyKinect.Update(); #ifdef USE_USERDETECTOR if(g_MyKinect.userStatus.isPlayerVisible()) { XV3 tmp = g_MyKinect.userDetector->getSkeletonJointPosition(XN_SKEL_RIGHT_HAND); printf("Right hand position: %.2f %.2f %.2f\n", tmp.X, tmp.Y, tmp.Z); } #endif if(drawFromKinect) { //get image data to detect marker if( (dataPtr = (ARUint8 *)g_MyKinect.GetBGRA32Image()) == NULL ) { arUtilSleep(2); return; } } else { /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } if(drawFromKinect) { //option . You can choose many display mode. image, Depth by Color, depth mixed image if(displayMode == 2) dataPtr = (ARUint8 *)g_MyKinect.GetDepthDrewByColor(); else if(displayMode == 3) dataPtr = (ARUint8 *)g_MyKinect.GetDepthMixedImage(); } argDrawMode2D(); argDispImage( dataPtr, 0,0 ); arVideoCapNext(); /* check for object visibility */ k = -1; for( j = 0; j < marker_num; j++ ) { if( patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { contF = 0; argSwapBuffers(); return; } /* get the transformation between the marker and the real camera */ if( mode == 0 || contF == 0 ) { arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans); } else { arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans); } contF = 1; draw( patt_trans ); argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* draw the paddle, base and menu */ if( paddleInfo->active ){ draw_paddle( paddleInfo); } /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } //printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 20, 150.0f, 105.0f, 0.0f); /* find the paddle position relative to the base */ findPaddlePosition(curPaddlePos, paddleInfo->trans, config->trans); /* check for collisions with targets */ for(i=0;i<TARGET_NUM;i++){ myTarget[i].state = NOT_TOUCHED; if(checkCollision(curPaddlePos, myTarget[i].pos, 20.0f)) { myTarget[i].state = TOUCHED; fprintf(stderr,"touched !!\n"); } } /* draw the targets */ for(i=0;i<TARGET_NUM;i++){ draw(myTarget[i],config->trans); } argSwapBuffers(); }
void mainLoop() { ARMarkerInfo *marker_info; ARUint8 *dataPtr; int marker_num; if(!calib)//special paycay florian cvReleaseImage(&image); if(!calib) detectColision(); // Recuperation du flux video if ( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } // Passage en mode 2D pour analyse de l'image capturee argDrawMode2D(); // Récupération de l'image openCV puis conversion en ARImage //IplImage* imgTest; image = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 4); image->imageData = (char *)dataPtr; //sinon l'image est à l'envers cvFlip(image, image, 1); //test si les couleurs ont déjà été calibrée // si oui on teste si y a collision, sinon on calibre interactionBoutton(); if(calib) calibrage(); else { updateColor(); interactions(); } // affichage image à l'ecran argDispImage( (unsigned char *)image->imageData, 0,0 ); // Recuperation d'une autre image car on a fini avec la precedente arVideoCapNext(); if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) { printf("impossible de detecter le marqueur\n"); cleanup(); } if(visible == false && !calib) //element IHM : procedure qui permet de savoir si on recherche ou pas + réinit mouvemment des objets précédement affiché { glEnable(GL_LIGHT0); objet1_x =0;objet1_y =0;objet2_x =0;objet2_y =0; if(scan.isVisible(0)==true) scan.setVisible(false,0); if(scan.isVisible(1)==false) scan.setVisible(true,1); glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching",cparam.xsize-100,cparam.ysize-30); if(alterne1==0 && alterne2 > 20) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching .",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } if(alterne1==1 && alterne2 > 20 ) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching ..",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } if(alterne1==2 && alterne2 > 20) { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Searching ...",cparam.xsize-100,cparam.ysize-30); if(alterne2 > 30){alterne2=0;alterne1=(alterne1+1)%3;} } alterne2+=1; glDisable(GL_LIGHT0); } else if(calib) { if(couleur == 0) { glColor3ub(0,0,255); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose thumb's color",cparam.xsize-220,cparam.ysize-30); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); } else if(couleur == 1) { glColor3ub(0,255,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose forefinger's color",cparam.xsize-220,cparam.ysize-30); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Press return for thumb",cparam.xsize-220,cparam.ysize-(30+18*2)); } else { glColor3ub(255,0,0); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Choose middle's color",cparam.xsize-220,cparam.ysize-(30)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"then press enter",cparam.xsize-220,cparam.ysize-(30+18)); texte(GLUT_BITMAP_HELVETICA_18,(char*)"Press return for forefinger",cparam.xsize-220,cparam.ysize-(30+18*2)); } } else //passage mode 3d + init profondeur { argDrawMode3D(); argDraw3dCamera(0, 0); glClearDepth(1.0); glClear(GL_DEPTH_BUFFER_BIT); } /// Visibilite de l'objet if(visible == false ) //si on a jms vu de patron ou qu'on a demandé une recapture des patrons faire { //recherche objet visible for (int i=0; i<2; i++) //pour chaque patron initialisé faire { k = -1; //k renseigne sur la visibilité du marker et son id for (int j=0; j<marker_num; j++) // pour chaque marqueur trouver avec arDetectMarker { if (object[i].patt_id == marker_info[j].id) { if (k == -1) { k = j; } else if (marker_info[k].cf < marker_info[j].cf) { k = j; } } } object[i].visible = k; if (k >= 0) { visible = true; arGetTransMat(&marker_info[k], object[i].center, object[i].width,object[i].trans); printf("object[%d] center[%f, %f]\n", i, marker_info->pos[0], marker_info->pos[1]); printf("object[%d] hg[%f, %f]\n", i, marker_info->vertex[0][0], marker_info->vertex[0][1]); printf("object[%d] hd[%f, %f]\n", i, marker_info->vertex[1][0], marker_info->vertex[1][1]); printf("object[%d] bg[%f, %f]\n", i, marker_info->vertex[2][0], marker_info->vertex[2][1]); printf("object[%d] bd[%f, %f]\n", i, marker_info->vertex[3][0], marker_info->vertex[3][1]); //changement etat boutton if(scan.isVisible(0)==false) scan.setVisible(true,0); if(scan.isVisible(1)==true) scan.setVisible(false,1); //si on a vu un patron, on créé une nouvelle instance de l'objet créé par le patron, qu'on stocke dans les objets à l'écran. onscreen_object.push_back(Object3D(mesh.at(object[i].model_id), object[i].center, object[i].trans, object[i].width)); } } } //vu qu'on ne gère plus à partir de la variable "visible" d'un patron, on display, dans tout les cas, soit le vecteur est vide, soit //on a un ou plusieurs objets à afficher display(true); if(menuShow==true) menu.show(); if(!calib) scan.show(); help.show(); quit.show(); argSwapBuffers(); /// Affichage de l'image sur l'interface graphique }
static void argCalibMainFunc(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j; double cfmax; double err; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } target_visible = 0; /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { (*gCalibPostFunc)( NULL, NULL ); arFittingMode = arFittingModeBak; #ifndef ANDROID glutKeyboardFunc( gKeyFunc ); glutMouseFunc( gMouseFunc ); glutIdleFunc( gMainFunc ); glutDisplayFunc( gMainFunc ); #endif return; } arVideoCapNext(); glClearColor( 0.0, 0.0, 0.0, 0.0 ); #ifndef ANDROID glClear(GL_COLOR_BUFFER_BIT); #endif /* if the debug mode is on draw squares around the detected squares in the video image */ if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 2, 1 ); else argDispImage( arImage, 2, 1); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 3.0 ); for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id < 0 ) continue; argDrawSquare( marker_info[i].vertex, 2, 1 ); } glLineWidth( 1.0 ); } if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); glLineWidth( 3.0 ); glColor3f( 1.0, 1.0, 1.0 ); argLineSegHMD( 0, calib_pos[co1][1], AR_HMD_XSIZE, calib_pos[co1][1] ); argLineSegHMD( calib_pos[co1][0], 0, calib_pos[co1][0], AR_HMD_YSIZE ); glLineWidth( 1.0 ); argDrawMode2D(); cfmax = 0.0; j = -1; for( i = 0; i < marker_num; i++ ) { if( marker_info[i].id != target_id ) continue; if( marker_info[i].cf > cfmax ) { cfmax = marker_info[i].cf; j = i; } } if( j < 0 ) { argSwapBuffers(); return; } err = arGetTransMat(&marker_info[j], target_center, target_width, target_trans); if( err >= 0.0 ) { target_visible = 1; if( left_right == 0 ) argDraw2dLeft(); else argDraw2dRight(); argDrawAttention( calib_pos[co1], co2 ); argDrawMode2D(); if( arDebug && gMiniXnum >= 2 && gMiniYnum >= 1 ) { glColor3f( 0.0, 1.0, 0.0 ); glLineWidth( 3.0 ); argDrawSquare( marker_info[j].vertex, 1, 1 ); glLineWidth( 1.0 ); } } argSwapBuffers(); }
void MainLoop() { //QueryPerformanceFrequency(&nFreq); //QueryPerformanceCounter(&nBefore); DWORD StartTime,EndTime,PassTime; double l_StartTime,l_EndTime,l_PassTime; #ifdef _WIN32 StartTime=timeGetTime(); #else l_StartTime=gettimeofday_sec(); #endif ARUint8 *image; ARMarkerInfo *marker_info; int marker_num; int j,k; if( (image = (ARUint8*)arVideoGetImage() )==NULL){ arUtilSleep(2); return; } argDrawMode2D(); argDispImage(image, 0, 0); if(arDetectMarker(image, thresh, &marker_info, &marker_num) < 0){ CleanUp(); exit(0); } arVideoCapNext(); k=-1; for(j=0;j<marker_num;j++){ if(patt_id==marker_info[j].id){ k = (k==-1) ? j : k; k = (marker_info[k].cf < marker_info[j].cf) ? j: k; } } if(k!=-1) { if(isFirst==true) nyar_NyARTransMat_O2_transMat(nyobj,&marker_info[k],patt_center,patt_width,patt_trans); else nyar_NyARTransMat_O2_transMatCont(nyobj,&marker_info[k],patt_trans,patt_center,patt_width,patt_trans); isFirst=false; if(GameOver==false){ if(arUtilTimer()>1.0){ MovePiece(3,f,p); score+=f.ShiftPiece(f.deletePiece()); arUtilTimerReset(); GameOver=GameOverCheck(f,p); } } else{ if(arUtilTimer()>15.0) InitGame(); } DrawObject(); } argSwapBuffers(); #ifdef _WIN32 EndTime=timeGetTime(); PassTime=EndTime-StartTime; (1000/FPS>PassTime)?Wait(1000/FPS-PassTime):Wait(0); FPSCount(&fps); printf("FPS=%d\n",fps); #else l_EndTime=gettimeofday_sec(); l_PassTime=l_EndTime-l_StartTime; ((double)(1000/FPS)>l_PassTime)?Wait((double)1000/FPS-l_PassTime):Wait(0); FPSCount(&fps); printf("FPS=%d\n",fps); #endif }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i,j,k; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /*draw the video*/ argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* capture the next video frame */ arVideoCapNext(); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth(6.0); /* detect the markers in the video frame */ if(arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } for( i = 0; i < marker_num; i++ ) { argDrawSquare(marker_info[i].vertex,0,0); } /* check for known patterns */ for( i = 0; i < objectnum; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].id == marker_info[j].id) { /* you've found a pattern */ //printf("Found pattern: %d ",patt_id); glColor3f( 0.0, 1.0, 0.0 ); argDrawSquare(marker_info[j].vertex,0,0); if( k == -1 ) k = j; else /* make sure you have the best pattern (highest confidence factor) */ if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { object[i].visible = 0; continue; } /* calculate the transform for each marker */ if( object[i].visible == 0 ) { arGetTransMat(&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans); } else { arGetTransMatCont(&marker_info[k], object[i].trans, object[i].marker_center, object[i].marker_width, object[i].trans); } object[i].visible = 1; } /*check for object collisions between marker 0 and 1 */ if(object[0].visible && object[1].visible) { if(checkCollisions(object[0],object[1],COLLIDE_DIST)) { object[0].collide = 1; object[1].collide = 1; } else { object[0].collide = 0; object[1].collide = 0; } } /* draw the AR graphics */ draw( object, objectnum ); /*swap the graphics buffers*/ argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, j, k; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; argDrawMode2D(); argDispImage( dataPtr, 0,0 ); /* detect the markers in the video frame */ if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } arVideoCapNext(); argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* check for object visibility */ for( i = 0; i < 2; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].patt_id == marker_info[j].id ) { if( k == -1 ) k = j; else if( marker_info[k].cf < marker_info[j].cf ) k = j; } } object[i].visible = k; if( k >= 0 ) { arGetTransMat(&marker_info[k], object[i].center, object[i].width, object[i].trans); draw( object[i].model_id, object[i].trans ); } } argSwapBuffers(); if( object[0].visible >= 0 && object[1].visible >= 0 ) { double wmat1[3][4], wmat2[3][4]; arUtilMatInv(object[0].trans, wmat1); arUtilMatMul(wmat1, object[1].trans, wmat2); for( j = 0; j < 3; j++ ) { for( i = 0; i < 4; i++ ) printf("%8.4f ", wmat2[j][i]); printf("\n"); } printf("\n\n"); } }
static void mouseEvent(int button, int state, int x, int y) { unsigned char *p, *p1; int ssx, ssy, eex, eey; int i, j, k; if( button == GLUT_RIGHT_BUTTON && state == GLUT_UP ) { if( status == 0 ) { arVideoCapStop(); arVideoClose(); if( patt.loop_num > 0 ) { calc_distortion( &patt, xsize, ysize, dist_factor ); printf("--------------\n"); printf("Center X: %f\n", dist_factor[0]); printf(" Y: %f\n", dist_factor[1]); printf("Dist Factor: %f\n", dist_factor[2]); printf("Size Adjust: %f\n", dist_factor[3]); printf("--------------\n"); status = 2; check_num = 0; print_comment(5); } else { glutDestroyWindow( win ); exit(0); } } else if( status == 1 ) { if( patt.loop_num == 0 ) {printf("error!!\n"); exit(0);} patt.loop_num--; free( patt.point[patt.loop_num] ); free( patt.savedImage[patt.loop_num] ); status = 0; point_num = 0; arVideoCapStart(); if( patt.loop_num == 0 ) print_comment(0); else print_comment(4); } } if( button == GLUT_LEFT_BUTTON && state == GLUT_DOWN ) { if( status == 1 && point_num < patt.h_num*patt.v_num ) { sx = ex = x; sy = ey = y; p = &(patt.savedImage[patt.loop_num-1][(y*xsize+x)*AR_PIX_SIZE]); p1 = &(clipImage[0]); #ifdef AR_PIX_FORMAT_BGRA k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_ABGR k = (255*3 - (*(p+1) + *(p+2) + *(p+3))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+1) = *(p1+2) = *(p1+3) = k; #endif #ifdef AR_PIX_FORMAT_BGR k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_RGBA k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_RGB k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif } } if( button == GLUT_LEFT_BUTTON && state == GLUT_UP ) { if( status == 0 && patt.loop_num < LOOP_MAX ) { while( (p = (unsigned char *)arVideoGetImage()) == NULL ) { arUtilSleep(2); } #ifdef USE_TEXMAP patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*tex1Ysize*AR_PIX_SIZE ); #else patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*ysize*AR_PIX_SIZE ); #endif if( patt.savedImage[patt.loop_num] == NULL ) exit(0); p1 = patt.savedImage[patt.loop_num]; for(i=0;i<xsize*ysize*AR_PIX_SIZE;i++) *(p1++) = *(p++); arVideoCapStop(); patt.point[patt.loop_num] = (CALIB_COORD_T *)malloc( sizeof(CALIB_COORD_T)*patt.h_num*patt.v_num ); if( patt.point[patt.loop_num] == NULL ) exit(0); patt.loop_num++; status = 1; sx = sy = ex= ey = -1; print_comment(1); } else if( status == 1 && point_num == patt.h_num*patt.v_num ) { status = 0; point_num = 0; arVideoCapStart(); printf("### No.%d ###\n", patt.loop_num); for( j = 0; j < patt.v_num; j++ ) { for( i = 0; i < patt.h_num; i++ ) { printf("%2d, %2d: %6.2f, %6.2f\n", i+1, j+1, patt.point[patt.loop_num-1][j*patt.h_num+i].x_coord, patt.point[patt.loop_num-1][j*patt.h_num+i].y_coord); } } printf("\n\n"); if( patt.loop_num < LOOP_MAX ) print_comment(4); else print_comment(6); } else if( status == 1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } patt.point[patt.loop_num-1][point_num].x_coord = 0.0; patt.point[patt.loop_num-1][point_num].y_coord = 0.0; p = clipImage; k = 0; for( j = 0; j < (eey-ssy+1); j++ ) { for( i = 0; i < (eex-ssx+1); i++ ) { patt.point[patt.loop_num-1][point_num].x_coord += i * *(p+1); patt.point[patt.loop_num-1][point_num].y_coord += j * *(p+1); k += *(p+1); p += AR_PIX_SIZE; } } if( k != 0 ) { patt.point[patt.loop_num-1][point_num].x_coord /= k; patt.point[patt.loop_num-1][point_num].y_coord /= k; patt.point[patt.loop_num-1][point_num].x_coord += ssx; patt.point[patt.loop_num-1][point_num].y_coord += ssy; point_num++; } sx = sy = ex= ey = -1; printf(" # %d/%d\n", point_num, patt.h_num*patt.v_num); if( point_num == patt.h_num*patt.v_num ) print_comment(2); } else if( status == 2 ) { check_num++; if( check_num == patt.loop_num ) { if(patt.loop_num >= 2) { if( calc_inp(&patt, dist_factor, xsize, ysize, mat) < 0 ) { printf("Calibration failed.\n"); exit(0); } save_param(); } glutDestroyWindow( win ); exit(0); } if( check_num+1 == patt.loop_num ) { printf("\nLeft Mouse Button: Next Step.\n"); } else { printf(" %d/%d.\n", check_num+1, patt.loop_num); } } } }
static void mainLoop(void) { AR2VideoBufferT *buff; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int debugMode; int j, k; /* grab a video frame */ buff = arVideoGetImage(); if (!buff || !buff->fillFlag) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if( arDetectMarker(arHandle, buff) < 0 ) { cleanup(); exit(0); } argSetWindow(w1); arGetDebugMode(arHandle, &debugMode); if (debugMode == AR_DEBUG_ENABLE) { int imageProcMode; argViewportSetPixFormat(vp1, AR_PIXEL_FORMAT_MONO); // Drawing the debug image. argDrawMode2D(vp1); arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) argDrawImage(arHandle->labelInfo.bwImage); else argDrawImageHalf(arHandle->labelInfo.bwImage); } else { AR_PIXEL_FORMAT pixFormat; arGetPixelFormat(arHandle, &pixFormat); argViewportSetPixFormat(vp1, pixFormat); // Drawing the input image. argDrawMode2D(vp1); argDrawImage(buff->buff); } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(buff->buff); argSetWindow(w1); if( count % 10 == 0 ) { sprintf(fps, "%f[fps]", 10.0/arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); markerNum = arGetMarkerNum( arHandle ); if( markerNum == 0 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker( arHandle ); k = -1; for( j = 0; j < markerNum; j++ ) { //ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if( patt_id == markerInfo[j].id ) { if( k == -1 ) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if( k == -1 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); argDrawStringsByIdealPos(errValue, 10, ysize-60); //ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; ARMarkerInfo* markerInfo; int markerNum; ARdouble err; int i, j, k; // Calculate time delta. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; ms_prev = ms; // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Get detected markers markerInfo = arGetMarker(gARHandle); markerNum = arGetMarkerNum(gARHandle); // Update markers. for (i = 0; i < markersSquareCount; i++) { markersSquare[i].validPrev = markersSquare[i].valid; // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idPatt) { if (k == -1) { if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } if (k != -1) { markersSquare[i].valid = TRUE; ARLOGd("Marker %d matched pattern %d.\n", i, markerInfo[k].id); // Get the transformation between the marker and the real camera into trans. if (markersSquare[i].validPrev && useContPoseEstimation) { err = arGetTransMatSquareCont(gAR3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans); } else { err = arGetTransMatSquare(gAR3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans); } } else { markersSquare[i].valid = FALSE; } if (markersSquare[i].valid) { // Filter the pose estimate. if (markersSquare[i].ftmi) { if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersSquare[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRH((const ARdouble (*)[4])markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersSquare[i].pose); } else { if (markersSquare[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } // Tell GLUT the display has changed. glutPostRedisplay(); } else { arUtilSleep(2); } }
/* * Class: com_clab_artoolkit_port_JARToolkit * Method: JARUtilSleep * Signature: (I)V */ JNIEXPORT void JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_utilSleep(JNIEnv *, jclass, jint msec) { arUtilSleep( msec ); }