bool ARTApp::detectMarkerAndPose(ARUint8 *image, ARHandle *arHandle, AR3DHandle *ar3DHandle, int pattID, int pattWidth, ARdouble pattTrans[3][4]) { // Detect the markers in the video frame. if (arDetectMarker(arHandle, image) < 0) { return false; } // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. int k = -1; for (int j = 0; j < arHandle->marker_num; j++) { if (arHandle->markerInfo[j].id == pattID) { if (k == -1) k = j; // First marker detected. else if (arHandle->markerInfo[j].cf > arHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected. } } if (k != -1) { // Get the transformation between the marker and the real camera into pattTans. arGetTransMatSquare(ar3DHandle, &(arHandle->markerInfo[k]), pattWidth, pattTrans); return true; } return false; }
void ofxArtool5::calcMarkerTransformation(int & mId){ if(mId!=-1){ // Get the transformation between the marker and the real camera into gPatt_trans. ARdouble err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[mId]), gPatt_width, gPatt_trans); bPattFound=true; }else{ bPattFound=false; } }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; ARMarkerInfo* markerInfo; int markerNum; ARdouble err; int i, j, k; // Calculate time delta. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; ms_prev = ms; // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Get detected markers markerInfo = arGetMarker(gARHandle); markerNum = arGetMarkerNum(gARHandle); // Update markers. for (i = 0; i < markersSquareCount; i++) { markersSquare[i].validPrev = markersSquare[i].valid; // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idPatt) { if (k == -1) { if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } if (k != -1) { markersSquare[i].valid = TRUE; ARLOGd("Marker %d matched pattern %d.\n", i, markerInfo[k].id); // Get the transformation between the marker and the real camera into trans. if (markersSquare[i].validPrev && useContPoseEstimation) { err = arGetTransMatSquareCont(gAR3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans); } else { err = arGetTransMatSquare(gAR3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans); } } else { markersSquare[i].valid = FALSE; } if (markersSquare[i].valid) { // Filter the pose estimate. if (markersSquare[i].ftmi) { if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersSquare[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRH((const ARdouble (*)[4])markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersSquare[i].pose); } else { if (markersSquare[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } // Tell GLUT the display has changed. glutPostRedisplay(); } else { arUtilSleep(2); } }
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeVideoFrame(JNIEnv* env, jobject obj, jbyteArray pinArray)) { int i, j, k; jbyte* inArray; ARdouble err; if (!videoInited) { #ifdef DEBUG LOGI("nativeVideoFrame !VIDEO\n"); #endif return; // No point in trying to track until video is inited. } if (!gARViewInited) { return; // Also, we won't track until the ARView has been inited. #ifdef DEBUG LOGI("nativeVideoFrame !ARVIEW\n"); #endif } #ifdef DEBUG LOGI("nativeVideoFrame\n"); #endif // Copy the incoming YUV420 image in pinArray. env->GetByteArrayRegion(pinArray, 0, gVideoFrameSize, (jbyte *)gVideoFrame); // As of ARToolKit v5.0, NV21 format video frames are handled natively, // and no longer require colour conversion to RGBA. // If you still require RGBA format information from the video, // here is where you'd do the conversion: // color_convert_common(gVideoFrame, gVideoFrame + videoWidth*videoHeight, videoWidth, videoHeight, myRGBABuffer); videoFrameNeedsPixelBufferDataUpload = true; // Note that buffer needs uploading. (Upload must be done on OpenGL context's thread.) // Run marker detection on frame arDetectMarker(arHandle, gVideoBuffer); // Get detected markers ARMarkerInfo* markerInfo = arGetMarker(arHandle); int markerNum = arGetMarkerNum(arHandle); // Update markers. for (i = 0; i < markersSquareCount; i++) { markersSquare[i].validPrev = markersSquare[i].valid; // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idPatt) { if (k == -1) { if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } if (k != -1) { markersSquare[i].valid = TRUE; #ifdef DEBUG LOGI("Marker %d matched pattern %d.\n", k, markerInfo[k].id); #endif // Get the transformation between the marker and the real camera into trans. if (markersSquare[i].validPrev) { err = arGetTransMatSquareCont(ar3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans); } else { err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans); } } else { markersSquare[i].valid = FALSE; } if (markersSquare[i].valid) { // Filter the pose estimate. if (markersSquare[i].ftmi) { if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) { LOGE("arFilterTransMat error with marker %d.\n", i); } } if (!markersSquare[i].validPrev) { // Marker has become visible, tell any dependent objects. //ARMarkerAppearedNotification } // We have a new pose, so set that. arglCameraViewRHf(markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. //ARMarkerUpdatedPoseNotification } else { if (markersSquare[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. //ARMarkerDisappearedNotification } } } }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; AR2VideoBufferT *movieBuffer; ARdouble err; int j, k; // Find out how long since mainLoop() last ran. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz. ms_prev = ms; // Grab a movie frame (if available). if ((movieBuffer = ar2VideoGetImage(gMovieVideo)) != NULL) { if (movieBuffer->buff && movieBuffer->fillFlag) gMovieImage = movieBuffer->buff; } // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; for (j = 0; j < gARHandle->marker_num; j++) { if (gARHandle->markerInfo[j].id == gPatt_id) { if (k == -1) k = j; // First marker detected. else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected. } } if (k != -1) { // Get the transformation between the marker and the real camera into gPatt_trans. if (gPatt_found && useContPoseEstimation) { err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_trans, gPatt_width, gPatt_trans); } else { err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans); // Marker has appeared, so un-pause movie. ar2VideoCapStart(gMovieVideo); } gPatt_found = TRUE; } else { if (gPatt_found) { // Marker has disappeared, so pause movie. ar2VideoCapStop(gMovieVideo); } gPatt_found = FALSE; } // Tell GLUT the display has changed. glutPostRedisplay(); } }
static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int imageProcMode; int debugMode; int j, k; /* grab a video frame */ if ((dataPtr = (ARUint8*)arVideoGetImage()) == NULL) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, dataPtr) < 0) { cleanup(); exit(0); } argSetWindow(w1); argDrawMode2D(vp1); arGetDebugMode(arHandle, &debugMode); if (debugMode == 0) { argDrawImage(dataPtr); } else { arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) { argDrawImage(arHandle->labelInfo.bwImage); } else { argDrawImageHalf(arHandle->labelInfo.bwImage); } } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(dataPtr); argSetWindow(w1); if (count % 10 == 0) { sprintf(fps, "%f[fps]", 10.0 / arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); markerNum = arGetMarkerNum(arHandle); if (markerNum == 0) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker(arHandle); k = -1; for (j = 0; j < markerNum; j++) { // ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if (patt_id == markerInfo[j].id) { if (k == -1) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if (k == -1) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize - 30); argDrawStringsByIdealPos(errValue, 10, ysize - 60); // ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
static ARdouble arGetTransMatMultiSquare2(AR3DHandle *handle, ARMarkerInfo *marker_info, int marker_num, ARMultiMarkerInfoT *config, int robustFlag) { ARdouble *pos2d, *pos3d; ARdouble trans1[3][4], trans2[3][4]; ARdouble err, err2; int max, maxArea; int vnum; int dir; int i, j, k; //char mes[12]; //ARLOG("-- Pass1--\n"); for( i = 0; i < config->marker_num; i++ ) { k = -1; if( config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_TEMPLATE ) { for( j = 0; j < marker_num; j++ ) { if( marker_info[j].idPatt != config->marker[i].patt_id ) continue; if( marker_info[j].cfPatt < config->cfPattCutoff ) continue; if( k == -1 ) k = j; else if( marker_info[k].cfPatt < marker_info[j].cfPatt ) k = j; } config->marker[i].visible = k; if( k >= 0 ) marker_info[k].dir = marker_info[k].dirPatt; } else { // config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_MATRIX for( j = 0; j < marker_num; j++ ) { // Check if we need to examine the globalID rather than patt_id. if (marker_info[j].idMatrix == 0 && marker_info[j].globalID != 0ULL) { if( marker_info[j].globalID != config->marker[i].globalID ) continue; } else { if( marker_info[j].idMatrix != config->marker[i].patt_id ) continue; } if( marker_info[j].cfMatrix < config->cfMatrixCutoff ) continue; if( k == -1 ) k = j; else if( marker_info[k].cfMatrix < marker_info[j].cfMatrix ) k = j; } config->marker[i].visible = k; if( k >= 0 ) marker_info[k].dir = marker_info[k].dirMatrix; } //if(k>=0) ARLOG(" *%d\n",i); } //ARLOG("-- Pass2--\n"); vnum = 0; for( i = 0; i < config->marker_num; i++ ) { if( (j=config->marker[i].visible) < 0 ) continue; //glColor3f( 1.0, 1.0, 0.0 ); //sprintf(mes,"%d",i); //argDrawStringsByIdealPos( mes, marker_info[j].pos[0], marker_info[j].pos[1] ); err = arGetTransMatSquare(handle, &marker_info[j], config->marker[i].width, trans2); //ARLOG(" [%d:dir=%d] err = %f (%f,%f,%f)\n", i, marker_info[j].dir, err, trans2[0][3], trans2[1][3], trans2[2][3]); if( err > AR_MULTI_POSE_ERROR_CUTOFF_EACH_DEFAULT ) { config->marker[i].visible = -1; if (marker_info[j].cutoffPhase == AR_MARKER_INFO_CUTOFF_PHASE_NONE) marker_info[j].cutoffPhase = AR_MARKER_INFO_CUTOFF_PHASE_POSE_ERROR; continue; } //ARLOG(" *%d\n",i); // Use the largest (in terms of 2D coordinates) marker's pose estimate as the // input for the initial estimate for the pose estimator. if( vnum == 0 || maxArea < marker_info[j].area ) { maxArea = marker_info[j].area; max = i; for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) trans1[j][k] = trans2[j][k]; } } vnum++; } if( vnum == 0 || vnum < config->min_submarker) { config->prevF = 0; return -1; } arUtilMatMul( (const ARdouble (*)[4])trans1, (const ARdouble (*)[4])config->marker[max].itrans, trans2 ); arMalloc(pos2d, ARdouble, vnum*4*2); arMalloc(pos3d, ARdouble, vnum*4*3); j = 0; for( i = 0; i < config->marker_num; i++ ) { if( (k=config->marker[i].visible) < 0 ) continue; dir = marker_info[k].dir; pos2d[j*8+0] = marker_info[k].vertex[(4-dir)%4][0]; pos2d[j*8+1] = marker_info[k].vertex[(4-dir)%4][1]; pos2d[j*8+2] = marker_info[k].vertex[(5-dir)%4][0]; pos2d[j*8+3] = marker_info[k].vertex[(5-dir)%4][1]; pos2d[j*8+4] = marker_info[k].vertex[(6-dir)%4][0]; pos2d[j*8+5] = marker_info[k].vertex[(6-dir)%4][1]; pos2d[j*8+6] = marker_info[k].vertex[(7-dir)%4][0]; pos2d[j*8+7] = marker_info[k].vertex[(7-dir)%4][1]; pos3d[j*12+0] = config->marker[i].pos3d[0][0]; pos3d[j*12+1] = config->marker[i].pos3d[0][1]; pos3d[j*12+2] = config->marker[i].pos3d[0][2]; pos3d[j*12+3] = config->marker[i].pos3d[1][0]; pos3d[j*12+4] = config->marker[i].pos3d[1][1]; pos3d[j*12+5] = config->marker[i].pos3d[1][2]; pos3d[j*12+6] = config->marker[i].pos3d[2][0]; pos3d[j*12+7] = config->marker[i].pos3d[2][1]; pos3d[j*12+8] = config->marker[i].pos3d[2][2]; pos3d[j*12+9] = config->marker[i].pos3d[3][0]; pos3d[j*12+10] = config->marker[i].pos3d[3][1]; pos3d[j*12+11] = config->marker[i].pos3d[3][2]; j++; } if( config->prevF == 0 ) { if( robustFlag ) { err = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.8 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.6 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.4 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.0 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); } } } } } else { err = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); } free(pos3d); free(pos2d); } else { if( robustFlag ) { err2 = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMat( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.8 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.6 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.4 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.0 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } } } } } else { err2 = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMat( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } free(pos3d); free(pos2d); } if (err < AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT) config->prevF = 1; else { config->prevF = 0; for (i = 0; i < config->marker_num; i++) { if ((k = config->marker[i].visible) < 0) continue; if (marker_info[k].cutoffPhase == AR_MARKER_INFO_CUTOFF_PHASE_NONE) marker_info[k].cutoffPhase = AR_MARKER_INFO_CUTOFF_PHASE_POSE_ERROR_MULTI; } } return err; }
static void mainLoop(void) { static int imageNumber = 0; static int ms_prev; int ms; float s_elapsed; ARUint8 *image; ARdouble err; int j, k; // Find out how long since mainLoop() last ran. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz. ms_prev = ms; // Update drawing. DrawCubeUpdate(s_elapsed); // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. if (gARTImageSavePlease) { char imageNumberText[15]; sprintf(imageNumberText, "image-%04d.jpg", imageNumber++); if (arVideoSaveImageJPEG(gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, gARTImage, imageNumberText, 75, 0) < 0) { ARLOGe("Error saving video image.\n"); } gARTImageSavePlease = FALSE; } gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; for (j = 0; j < gARHandle->marker_num; j++) { if (gARHandle->markerInfo[j].id == gPatt_id) { if (k == -1) k = j; // First marker detected. else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected. } } if (k != -1) { // Get the transformation between the marker and the real camera into gPatt_trans. err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans); gPatt_found = TRUE; } else { gPatt_found = FALSE; } // Tell GLUT the display has changed. glutPostRedisplay(); } }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; ARdouble err; int i, j, k; // Find out how long since mainLoop() last ran. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz. ms_prev = ms; // Update drawing. arVrmlTimerUpdate(); // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Check for object visibility. for (i = 0; i < gObjectDataCount; i++) { // Check through the marker_info array for highest confidence // visible marker matching our object's pattern. k = -1; for (j = 0; j < gARHandle->marker_num; j++) { if (gARHandle->markerInfo[j].id == gObjectData[i].id) { if (k == -1) k = j; // First marker detected. else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected. } } if (k != -1) { // Get the transformation between the marker and the real camera. //ARLOGe("Saw object %d.\n", i); if (gObjectData[i].visible && useContPoseEstimation) { err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]), gObjectData[i].trans, gObjectData[i].marker_width, gObjectData[i].trans); } else { err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gObjectData[i].marker_width, gObjectData[i].trans); } gObjectData[i].visible = 1; } else { gObjectData[i].visible = 0; } } // Tell GLUT the display has changed. glutPostRedisplay(); } }
static void mainLoop(void) { AR2VideoBufferT *buff; ARMarkerInfo *markerInfo; int markerNum; ARdouble patt_trans[3][4]; ARdouble err; int debugMode; int j, k; /* grab a video frame */ buff = arVideoGetImage(); if (!buff || !buff->fillFlag) { arUtilSleep(2); return; } /* detect the markers in the video frame */ if( arDetectMarker(arHandle, buff) < 0 ) { cleanup(); exit(0); } argSetWindow(w1); arGetDebugMode(arHandle, &debugMode); if (debugMode == AR_DEBUG_ENABLE) { int imageProcMode; argViewportSetPixFormat(vp1, AR_PIXEL_FORMAT_MONO); // Drawing the debug image. argDrawMode2D(vp1); arGetImageProcMode(arHandle, &imageProcMode); if (imageProcMode == AR_IMAGE_PROC_FRAME_IMAGE) argDrawImage(arHandle->labelInfo.bwImage); else argDrawImageHalf(arHandle->labelInfo.bwImage); } else { AR_PIXEL_FORMAT pixFormat; arGetPixelFormat(arHandle, &pixFormat); argViewportSetPixFormat(vp1, pixFormat); // Drawing the input image. argDrawMode2D(vp1); argDrawImage(buff->buff); } argSetWindow(w2); argDrawMode2D(vp2); argDrawImage(buff->buff); argSetWindow(w1); if( count % 10 == 0 ) { sprintf(fps, "%f[fps]", 10.0/arUtilTimer()); arUtilTimerReset(); } count++; glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); markerNum = arGetMarkerNum( arHandle ); if( markerNum == 0 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } /* check for object visibility */ markerInfo = arGetMarker( arHandle ); k = -1; for( j = 0; j < markerNum; j++ ) { //ARLOG("ID=%d, CF = %f\n", markerInfo[j].id, markerInfo[j].cf); if( patt_id == markerInfo[j].id ) { if( k == -1 ) { if (markerInfo[j].cf > 0.7) k = j; } else if (markerInfo[j].cf > markerInfo[k].cf) k = j; } } if( k == -1 ) { argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); return; } err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), patt_width, patt_trans); sprintf(errValue, "err = %f", err); glColor3f(0.0f, 1.0f, 0.0f); argDrawStringsByIdealPos(fps, 10, ysize-30); argDrawStringsByIdealPos(errValue, 10, ysize-60); //ARLOG("err = %f\n", err); draw(patt_trans); argSetWindow(w1); argSwapBuffers(); argSetWindow(w2); argSwapBuffers(); }
static void mainLoop(void) { static AR2VideoBufferT buff = {0}; static int oldImageMode = -1; static int oldDispMode = -1; static int oldDistMode = -1; ARdouble patt_trans[3][4]; int i, j; if (!buff.buff) { arMalloc(buff.buff, ARUint8, xsize*ysize*PIXEL_SIZE); } if( oldImageMode != 0 && imageMode == 0 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } i = 0; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } i = 639; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 0; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 479; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } oldImageMode = 0; } if( oldImageMode != 1 && imageMode == 1 ) { for( j = 0; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( j = 1; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 1; } if( oldImageMode != 2 && imageMode == 2 ) { for( i = 0; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( i = 1; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 2; } if( oldImageMode != 3 && imageMode == 3 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } oldImageMode = 3; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, &buff) < 0) { cleanup(); exit(0); } glClearColor( 0.0f, 0.0f, 0.0f, 0.0f ); glClearDepth( 1.0f ); glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT); if( oldDispMode != 0 && dispMode == 0 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_GL_DRAW_PIXELS ); oldDispMode = 0; debugReportMode(vp); } else if( oldDispMode != 1 && dispMode == 1 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); oldDispMode = 1; debugReportMode(vp); } else if( oldDispMode != 2 && dispMode == 2 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FIELD ); oldDispMode = 2; debugReportMode(vp); } if( oldDistMode != 0 && distMode == 0 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_DISABLE ); oldDistMode = 0; } if( oldDistMode != 1 && distMode == 1 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_ENABLE ); oldDistMode = 1; } argDrawMode2D(vp); argDrawImage(buff.buff); if( imageMode == 3 ) { glLineWidth( 1.0f ); glColor3f( 0.0f, 1.0f, 0.0f ); argDrawSquareByIdealPos( arHandle->markerInfo[0].vertex ); glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByIdealPos( 0.0, 0.0, 640.0, 0.0 ); argDrawLineByIdealPos( 0.0, 479.0, 640.0, 479.0 ); argDrawLineByIdealPos( 0.0, -1.0, 0.0, 479.0 ); argDrawLineByIdealPos( 639.0, -1.0, 639.0, 479.0 ); argDrawLineByIdealPos( 0.0, 188.0, 639.0, 188.0 ); argDrawLineByIdealPos( 0.0, 292.0, 639.0, 292.0 ); argDrawLineByIdealPos( 278.0, 0.0, 278.0, 479.0 ); argDrawLineByIdealPos( 382.0, 0.0, 382.0, 479.0 ); } if( arHandle->marker_num == 0 ) { argSwapBuffers(); return; } arGetTransMatSquare(ar3DHandle, &(arHandle->markerInfo[0]), SQUARE_WIDTH, patt_trans); draw(patt_trans); argSwapBuffers(); }
bool ARMarkerSquare::updateWithDetectedMarkers(ARMarkerInfo* markerInfo, int markerNum, AR3DHandle *ar3DHandle) { //ARController::logv("ARMarkerSquare::update()"); if (patt_id < 0) return false; // Can't update if no pattern loaded visiblePrev = visible; if (markerInfo) { int k = -1; if (patt_type == AR_PATTERN_TYPE_TEMPLATE) { // Iterate over all detected markers. for (int j = 0; j < markerNum; j++ ) { if (patt_id == markerInfo[j].idPatt) { // The pattern of detected trapezoid matches marker[k]. if (k == -1) { if (markerInfo[j].cfPatt > m_cfMin) k = j; // Count as a match if match confidence exceeds cfMin. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Or if it exceeds match confidence of a different already matched trapezoid (i.e. assume only one instance of each marker). } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (int j = 0; j < markerNum; j++) { if (patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= m_cfMin) k = j; // Count as a match if match confidence exceeds cfMin. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Or if it exceeds match confidence of a different already matched trapezoid (i.e. assume only one instance of each marker). } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } // Consider marker visible if a match was found. if (k != -1) { visible = true; m_cf = markerInfo[k].cf; // If the model is visible, update its transformation matrix if (visiblePrev && useContPoseEstimation) { // If the marker was visible last time, use "cont" version of arGetTransMatSquare arGetTransMatSquareCont(ar3DHandle, &(markerInfo[k]), trans, m_width, trans); } else { // If the marker wasn't visible last time, use normal version of arGetTransMatSquare arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), m_width, trans); } } else { visible = false; m_cf = 0.0f; } } else { visible = false; m_cf = 0.0f; } return (ARMarker::update()); // Parent class will finish update. }