void *ar2Tracking2d( THREAD_HANDLE_T *threadHandle ) { AR2Tracking2DParamT *arg; int ID; arg = (AR2Tracking2DParamT *)threadGetArg(threadHandle); ID = threadGetID(threadHandle); ARLOGi("Start tracking_thread #%d.\n", ID); for(;;) { if( threadStartWait(threadHandle) < 0 ) break; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate, arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->templ2), &(arg->result) ); #else arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate, arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->result) ); #endif threadEndSignal(threadHandle); } ARLOGi("End tracking_thread #%d.\n", ID); return NULL; }
static void *trackingInitMain( THREAD_HANDLE_T *threadHandle ) { TrackingInitHandle *trackingInitHandle; KpmHandle *kpmHandle; KpmResult *kpmResult = NULL; int kpmResultNum; ARUint8 *imagePtr; float err; int i, j, k; if (!threadHandle) { ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n"); return (NULL); } trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle); if (!threadHandle) { ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n"); return (NULL); } kpmHandle = trackingInitHandle->kpmHandle; imagePtr = trackingInitHandle->imagePtr; if (!kpmHandle || !imagePtr) { ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n"); return (NULL); } ARLOGi("Start tracking thread.\n"); kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum ); for(;;) { if( threadStartWait(threadHandle) < 0 ) break; kpmMatching(kpmHandle, imagePtr); trackingInitHandle->flag = 0; for( i = 0; i < kpmResultNum; i++ ) { if( kpmResult[i].camPoseF != 0 ) continue; ARLOGd("kpmGetPose OK.\n"); if( trackingInitHandle->flag == 0 || err > kpmResult[i].error ) { // Take the first or best result. trackingInitHandle->flag = 1; trackingInitHandle->page = kpmResult[i].pageNo; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k]; err = kpmResult[i].error; } } threadEndSignal(threadHandle); } ARLOGi("End tracking thread.\n"); return (NULL); }
// References globals: markersNFTCount // Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[] static int loadNFTData(void) { int i; KpmRefDataSet *refDataSet; // If data was already loaded, stop KPM tracking thread and unload previously loaded data. if (threadHandle) { ARLOGi("Reloading NFT data.\n"); unloadNFTData(); } else { ARLOGi("Loading NFT data.\n"); } refDataSet = NULL; for (i = 0; i < markersNFTCount; i++) { // Load KPM data. KpmRefDataSet *refDataSet2; ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname); if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < 0 ) { ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname); markersNFT[i].pageNo = -1; continue; } markersNFT[i].pageNo = surfaceSetCount; ARLOGi(" Assigned page no. %d.\n", surfaceSetCount); if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < 0) { ARLOGe("Error: kpmChangePageNoOfRefDataSet\n"); exit(-1); } if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < 0) { ARLOGe("Error: kpmMergeRefDataSet\n"); exit(-1); } ARLOGi(" Done.\n"); // Load AR2 data. ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname); if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) { ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname); } ARLOGi(" Done.\n"); surfaceSetCount++; if (surfaceSetCount == PAGES_MAX) break; } if (kpmSetRefDataSet(kpmHandle, refDataSet) < 0) { ARLOGe("Error: kpmSetRefDataSet\n"); exit(-1); } kpmDeleteRefDataSet(&refDataSet); // Start the KPM tracking thread. threadHandle = trackingInitInit(kpmHandle); if (!threadHandle) exit(-1); ARLOGi("Loading of NFT data complete.\n"); return (TRUE); }
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount static int unloadNFTData(void) { int i, j; if (threadHandle) { ARLOGi("Stopping NFT2 tracking thread.\n"); trackingInitQuit(&threadHandle); } j = 0; for (i = 0; i < surfaceSetCount; i++) { if (j == 0) ARLOGi("Unloading NFT tracking surfaces.\n"); ar2FreeSurfaceSet(&surfaceSet[i]); // Also sets surfaceSet[i] to NULL. j++; } if (j > 0) ARLOGi("Unloaded %d NFT tracking surfaces.\n", j); surfaceSetCount = 0; return 0; }
// Modifies globals: kpmHandle, ar2Handle. static int initNFT(ARParamLT *cparamLT, AR_PIXEL_FORMAT pixFormat) { ARLOGd("Initialising NFT.\n"); // // NFT init. // // KPM init. kpmHandle = kpmCreateHandle(cparamLT, pixFormat); if (!kpmHandle) { ARLOGe("Error: kpmCreateHandle.\n"); return (FALSE); } //kpmSetProcMode( kpmHandle, KpmProcHalfSize ); // AR2 init. if( (ar2Handle = ar2CreateHandle(cparamLT, pixFormat, AR2_TRACKING_DEFAULT_THREAD_NUM)) == NULL ) { ARLOGe("Error: ar2CreateHandle.\n"); kpmDeleteHandle(&kpmHandle); return (FALSE); } if (threadGetCPU() <= 1) { ARLOGi("Using NFT tracking settings for a single CPU.\n"); ar2SetTrackingThresh(ar2Handle, 5.0); ar2SetSimThresh(ar2Handle, 0.50); ar2SetSearchFeatureNum(ar2Handle, 16); ar2SetSearchSize(ar2Handle, 6); ar2SetTemplateSize1(ar2Handle, 6); ar2SetTemplateSize2(ar2Handle, 6); } else { ARLOGi("Using NFT tracking settings for more than one CPU.\n"); ar2SetTrackingThresh(ar2Handle, 5.0); ar2SetSimThresh(ar2Handle, 0.50); ar2SetSearchFeatureNum(ar2Handle, 16); ar2SetSearchSize(ar2Handle, 12); ar2SetTemplateSize1(ar2Handle, 6); ar2SetTemplateSize2(ar2Handle, 6); } // NFT dataset loading will happen later. return (TRUE); }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } return (TRUE); }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "Data2/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "Data2/markers.dat"; const char objectDataFilename[] = "Data2/objects.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
AR2SurfaceSetT *ar2ReadSurfaceSet( const char *filename, const char *ext, ARPattHandle *pattHandle ) { AR2SurfaceSetT *surfaceSet; FILE *fp = NULL; int readMode; char buf[256], name[256]; int i, j, k; if( ext == NULL || *ext == '\0' || strcmp(ext,"fset") == 0 ) { strncpy(name, filename, sizeof(name) - 1); name[sizeof(name) - 1] = '\0'; readMode = 0; } else { char namebuf[512]; sprintf(namebuf, "%s.%s", filename, ext); if ((fp = fopen(namebuf,"r")) == NULL) { ARLOGe("Error opening file '%s': ", filename); ARLOGperror(NULL); return (NULL); } readMode = 1; } arMalloc(surfaceSet, AR2SurfaceSetT, 1); if( readMode ) { if( get_buff(buf, 256, fp) == NULL ) { fclose(fp); free(surfaceSet); return (NULL); } if( sscanf(buf, "%d", &i) != 1 ) { fclose(fp); free(surfaceSet); return (NULL); } if( i < 1 ) { fclose(fp); free(surfaceSet); return (NULL); } surfaceSet->num = i; surfaceSet->contNum = 0; } else { surfaceSet->num = 1; surfaceSet->contNum = 0; } arMalloc(surfaceSet->surface, AR2SurfaceT, surfaceSet->num); for( i = 0; i < surfaceSet->num; i++ ) { ARLOGi("\n### Surface No.%d ###\n", i+1); if( readMode ) { if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%s", name) != 1 ) break; ar2UtilRemoveExt( name ); } ARLOGi(" Read ImageSet.\n"); surfaceSet->surface[i].imageSet = ar2ReadImageSet( name ); if( surfaceSet->surface[i].imageSet == NULL ) { ARLOGe("Error opening file '%s.iset'.\n", name); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); ARLOGi(" Read FeatureSet.\n"); surfaceSet->surface[i].featureSet = ar2ReadFeatureSet( name, "fset" ); if( surfaceSet->surface[i].featureSet == NULL ) { ARLOGe("Error opening file '%s.fset'.\n", name); ar2FreeImageSet(&surfaceSet->surface[i].imageSet); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); if (pattHandle) { ARLOGi(" Read MarkerSet.\n"); ar2UtilRemoveExt( name ); surfaceSet->surface[i].markerSet = ar2ReadMarkerSet( name, "mrk", pattHandle ); if( surfaceSet->surface[i].markerSet == NULL ) { ARLOGe("Error opening file '%s.mrk'.\n", name); ar2FreeFeatureSet(&surfaceSet->surface[i].featureSet); ar2FreeImageSet(&surfaceSet->surface[i].imageSet); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); } else { surfaceSet->surface[i].markerSet = NULL; } if (readMode) { if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[0][0]), &(surfaceSet->surface[i].trans[0][1]), &(surfaceSet->surface[i].trans[0][2]), &(surfaceSet->surface[i].trans[0][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[1][0]), &(surfaceSet->surface[i].trans[1][1]), &(surfaceSet->surface[i].trans[1][2]), &(surfaceSet->surface[i].trans[1][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[2][0]), &(surfaceSet->surface[i].trans[2][1]), &(surfaceSet->surface[i].trans[2][2]), &(surfaceSet->surface[i].trans[2][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } } else { for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) { surfaceSet->surface[i].trans[j][k] = (j == k)? 1.0f: 0.0f; } } } arUtilMatInvf( (const float (*)[4])surfaceSet->surface[i].trans, surfaceSet->surface[i].itrans ); ar2UtilReplaceExt( name, 256, "jpg"); arMalloc( surfaceSet->surface[i].jpegName, char, 256); strncpy( surfaceSet->surface[i].jpegName, name, 256 ); } if (fp) fclose(fp); //COVHI10459 if (i < surfaceSet->num) exit(0); return surfaceSet; }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "../share/artoolkit-examples/Data/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "../share/artoolkit-examples/Data/markers.dat"; const char objectDataFilename[] = "../share/artoolkit-examples/Data/objects.dat"; // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Init AR. gARPattHandle = arPattCreateHandle(); if (!gARPattHandle) { ARLOGe("Error creating pattern handle.\n"); exit(-1); } gARHandle = arCreateHandle(gCparamLT); if (!gARHandle) { ARLOGe("Error creating AR handle.\n"); exit(-1); } arPattAttach(gARHandle, gARPattHandle); if (arSetPixelFormat(gARHandle, arVideoGetPixelFormat()) < 0) { ARLOGe("Error setting pixel format.\n"); exit(-1); } gAR3DHandle = ar3DCreateHandle(&gCparamLT->param); if (!gAR3DHandle) { ARLOGe("Error creating 3D handle.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, gARPattHandle, &markersSquare, &markersSquareCount, &gARPattDetectionMode); ARLOGi("Marker count = %d\n", markersSquareCount); // // Other ARToolKit setup. // arSetMarkerExtractionMode(gARHandle, AR_USE_TRACKING_HISTORY_V2); //arSetMarkerExtractionMode(gARHandle, AR_NOUSE_TRACKING_HISTORY); //arSetLabelingThreshMode(gARHandle, AR_LABELING_THRESH_MODE_MANUAL); // Uncomment to force manual thresholding. // Set the pattern detection mode (template (pictorial) vs. matrix (barcode) based on // the marker types as defined in the marker config. file. arSetPatternDetectionMode(gARHandle, gARPattDetectionMode); // Default = AR_TEMPLATE_MATCHING_COLOR // Other application-wide marker options. Once set, these apply to all markers in use in the application. // If you are using standard ARToolKit picture (template) markers, leave commented to use the defaults. // If you are usign a different marker design (see http://www.artoolworks.com/support/app/marker.php ) // then uncomment and edit as instructed by the marker design application. //arSetLabelingMode(gARHandle, AR_LABELING_BLACK_REGION); // Default = AR_LABELING_BLACK_REGION //arSetBorderSize(gARHandle, 0.25f); // Default = 0.25f //arSetMatrixCodeType(gARHandle, AR_MATRIX_CODE_3x3); // Default = AR_MATRIX_CODE_3x3 // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arglSetupDebugMode(gArglSettings, gARHandle); // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) { ARLOGe("setupCamera(): Error: arCreateHandle.\n"); return (FALSE); } if (arSetPixelFormat(*arhandle, pixFormat) < 0) { ARLOGe("setupCamera(): Error: arSetPixelFormat.\n"); return (FALSE); } if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < 0) { ARLOGe("setupCamera(): Error: arSetDebugMode.\n"); return (FALSE); } if (arSetImageProcMode(*arhandle, AR_IMAGE_PROC_FRAME_IMAGE) < 0) { // Change to AR_IMAGE_PROC_FIELD_IMAGE if using a DVCam. ARLOGe("setupCamera(): Error: arSetImageProcMode.\n"); return (FALSE); } if ((*ar3dhandle = ar3DCreateHandle(&(*cparamLT_p)->param)) == NULL) { ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n"); return (FALSE); } if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
void LogFramesRate() { ARLOGi("Frames received: %d\n", m_framesReceived); }
static int readImageFromFile(const char *filename, ARUint8 **image_p, int *xsize_p, int *ysize_p, int *nc_p, float *dpi_p) { char *ext; char buf[256]; char buf1[512], buf2[512]; if (!filename || !image_p || !xsize_p || !ysize_p || !nc_p || !dpi_p) return (E_BAD_PARAMETER); ext = arUtilGetFileExtensionFromPath(filename, 1); if (!ext) { ARLOGe("Error: unable to determine extension of file '%s'. Exiting.\n", filename); EXIT(E_INPUT_DATA_ERROR); } if (strcmp(ext, "jpeg") == 0 || strcmp(ext, "jpg") == 0 || strcmp(ext, "jpe") == 0) { ARLOGi("Reading JPEG file...\n"); ar2UtilDivideExt( filename, buf1, buf2 ); jpegImage = ar2ReadJpegImage( buf1, buf2 ); if( jpegImage == NULL ) { ARLOGe("Error: unable to read JPEG image from file '%s'. Exiting.\n", filename); EXIT(E_INPUT_DATA_ERROR); } ARLOGi(" Done.\n"); *image_p = jpegImage->image; if (jpegImage->nc != 1 && jpegImage->nc != 3) { ARLOGe("Error: Input JPEG image is in neither RGB nor grayscale format. %d bytes/pixel %sformat is unsupported. Exiting.\n", jpegImage->nc, (jpegImage->nc == 4 ? "(possibly CMYK) " : "")); EXIT(E_INPUT_DATA_ERROR); } *nc_p = jpegImage->nc; ARLOGi("JPEG image '%s' is %dx%d.\n", filename, jpegImage->xsize, jpegImage->ysize); if (jpegImage->xsize < KPM_MINIMUM_IMAGE_SIZE || jpegImage->ysize < KPM_MINIMUM_IMAGE_SIZE) { ARLOGe("Error: JPEG image width and height must be at least %d pixels. Exiting.\n", KPM_MINIMUM_IMAGE_SIZE); EXIT(E_INPUT_DATA_ERROR); } *xsize_p = jpegImage->xsize; *ysize_p = jpegImage->ysize; if (*dpi_p == -1.0) { if( jpegImage->dpi == 0.0f ) { for (;;) { printf("JPEG image '%s' does not contain embedded resolution data, and no resolution specified on command-line.\nEnter resolution to use (in decimal DPI): ", filename); if( fgets( buf, 256, stdin ) == NULL ) { EXIT(E_USER_INPUT_CANCELLED); } if( sscanf(buf, "%f", &(jpegImage->dpi)) == 1 ) break; } } *dpi_p = jpegImage->dpi; } //} else if (strcmp(ext, "png") == 0) { } else { ARLOGe("Error: file '%s' has extension '%s', which is not supported for reading. Exiting.\n", filename, ext); free(ext); EXIT(E_INPUT_DATA_ERROR); } free(ext); return 0; }
// Reads dpiMinAllowable, xsize, ysize, dpi, background, dpiMin, dpiMax. // Sets dpiMin, dpiMax, dpi_num, dpi_list. static int setDPI( void ) { float dpiWork, dpiMinAllowable; char buf1[256]; int i; // Determine minimum allowable DPI, truncated to 3 decimal places. dpiMinAllowable = truncf(((float)KPM_MINIMUM_IMAGE_SIZE / (float)(MIN(xsize, ysize))) * dpi * 1000.0) / 1000.0f; if (background) { if (dpiMin == -1.0f) dpiMin = dpiMinAllowable; if (dpiMax == -1.0f) dpiMax = dpi; } if (dpiMin == -1.0f) { for (;;) { printf("Enter the minimum image resolution (DPI, in range [%.3f, %.3f]): ", dpiMinAllowable, (dpiMax == -1.0f ? dpi : dpiMax)); if( fgets( buf1, 256, stdin ) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if( sscanf(buf1, "%f", &dpiMin) == 0 ) continue; if (dpiMin >= dpiMinAllowable && dpiMin <= (dpiMax == -1.0f ? dpi : dpiMax)) break; else printf("Error: you entered %.3f, but value must be greater than or equal to %.3f and less than or equal to %.3f.\n", dpiMin, dpiMinAllowable, (dpiMax == -1.0f ? dpi : dpiMax)); } } else if (dpiMin < dpiMinAllowable) { ARLOGe("Warning: -min_dpi=%.3f smaller than minimum allowable. Value will be adjusted to %.3f.\n", dpiMin, dpiMinAllowable); dpiMin = dpiMinAllowable; } if (dpiMax == -1.0f) { for (;;) { printf("Enter the maximum image resolution (DPI, in range [%.3f, %.3f]): ", dpiMin, dpi); if( fgets( buf1, 256, stdin ) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if( sscanf(buf1, "%f", &dpiMax) == 0 ) continue; if (dpiMax >= dpiMin && dpiMax <= dpi) break; else printf("Error: you entered %.3f, but value must be greater than or equal to minimum resolution (%.3f) and less than or equal to image resolution (%.3f).\n", dpiMax, dpiMin, dpi); } } else if (dpiMax > dpi) { ARLOGe("Warning: -max_dpi=%.3f larger than maximum allowable. Value will be adjusted to %.3f.\n", dpiMax, dpi); dpiMax = dpi; } // Decide how many levels we need. if (dpiMin == dpiMax) { dpi_num = 1; } else { dpiWork = dpiMin; for( i = 1;; i++ ) { dpiWork *= powf(2.0f, 1.0f/3.0f); // *= 1.25992104989487 if( dpiWork >= dpiMax*0.95f ) { break; } } dpi_num = i + 1; } arMalloc(dpi_list, float, dpi_num); // Determine the DPI values of each level. dpiWork = dpiMin; for( i = 0; i < dpi_num; i++ ) { ARLOGi("Image DPI (%d): %f\n", i+1, dpiWork); dpi_list[dpi_num - i - 1] = dpiWork; // Lowest value goes at tail of array, highest at head. dpiWork *= powf(2.0f, 1.0f/3.0f); if( dpiWork >= dpiMax*0.95f ) dpiWork = dpiMax; } return 0; }
int main( int argc, char *argv[] ) { AR2JpegImageT *jpegImage = NULL; ARUint8 *image = NULL; AR2ImageSetT *imageSet = NULL; AR2FeatureMapT *featureMap = NULL; AR2FeatureSetT *featureSet = NULL; KpmRefDataSet *refDataSet = NULL; float scale1, scale2; int procMode; char buf[1024]; int num; int i, j; char *sep = NULL; time_t clock; int maxFeatureNum; int err; for( i = 1; i < argc; i++ ) { if( strncmp(argv[i], "-dpi=", 5) == 0 ) { if( sscanf(&argv[i][5], "%f", &dpi) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-sd_thresh=", 11) == 0 ) { if( sscanf(&argv[i][11], "%f", &sd_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-max_thresh=", 12) == 0 ) { if( sscanf(&argv[i][12], "%f", &max_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-min_thresh=", 12) == 0 ) { if( sscanf(&argv[i][12], "%f", &min_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-feature_density=", 13) == 0 ) { if( sscanf(&argv[i][13], "%d", &featureDensity) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-level=", 7) == 0 ) { if( sscanf(&argv[i][7], "%d", &tracking_extraction_level) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-leveli=", 8) == 0 ) { if( sscanf(&argv[i][8], "%d", &initialization_extraction_level) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-max_dpi=", 9) == 0 ) { if( sscanf(&argv[i][9], "%f", &dpiMax) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-min_dpi=", 9) == 0 ) { if( sscanf(&argv[i][9], "%f", &dpiMin) != 1 ) usage(argv[0]); } else if( strcmp(argv[i], "-background") == 0 ) { background = 1; } else if( strcmp(argv[i], "-nofset") == 0 ) { genfset = 0; } else if( strcmp(argv[i], "-fset") == 0 ) { genfset = 1; } else if( strcmp(argv[i], "-nofset2") == 0 ) { ARLOGe("Error: -nofset2 option no longer supported as of ARToolKit v5.3.\n"); exit(-1); } else if( strcmp(argv[i], "-fset2") == 0 ) { ARLOGe("Error: -fset2 option no longer supported as of ARToolKit v5.3.\n"); exit(-1); } else if( strcmp(argv[i], "-nofset3") == 0 ) { genfset3 = 0; } else if( strcmp(argv[i], "-fset3") == 0 ) { genfset3 = 1; } else if( strncmp(argv[i], "-log=", 5) == 0 ) { strncpy(logfile, &(argv[i][5]), sizeof(logfile) - 1); logfile[sizeof(logfile) - 1] = '\0'; // Ensure NULL termination. } else if( strncmp(argv[i], "-loglevel=", 10) == 0 ) { if (strcmp(&(argv[i][10]), "DEBUG") == 0) arLogLevel = AR_LOG_LEVEL_DEBUG; else if (strcmp(&(argv[i][10]), "INFO") == 0) arLogLevel = AR_LOG_LEVEL_INFO; else if (strcmp(&(argv[i][10]), "WARN") == 0) arLogLevel = AR_LOG_LEVEL_WARN; else if (strcmp(&(argv[i][10]), "ERROR") == 0) arLogLevel = AR_LOG_LEVEL_ERROR; else usage(argv[0]); } else if( strncmp(argv[i], "-exitcode=", 10) == 0 ) { strncpy(exitcodefile, &(argv[i][10]), sizeof(exitcodefile) - 1); exitcodefile[sizeof(exitcodefile) - 1] = '\0'; // Ensure NULL termination. } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "-?") == 0) { usage(argv[0]); } else if( filename[0] == '\0' ) { strncpy(filename, argv[i], sizeof(filename) - 1); filename[sizeof(filename) - 1] = '\0'; // Ensure NULL termination. } else { ARLOGe("Error: unrecognised option '%s'\n", argv[i]); usage(argv[0]); } } // Do some checks on the input. if (filename[0] == '\0') { ARLOGe("Error: no input file specified. Exiting.\n"); usage(argv[0]); } sep = strrchr(filename, '.'); if (!sep || (strcmp(sep, ".jpeg") && strcmp(sep, ".jpg") && strcmp(sep, ".jpe") && strcmp(sep, ".JPEG") && strcmp(sep, ".JPE") && strcmp(sep, ".JPG"))) { ARLOGe("Error: input file must be a JPEG image (with suffix .jpeg/.jpg/.jpe). Exiting.\n"); usage(argv[0]); } if (background) { #if HAVE_DAEMON_FUNC if (filename[0] != '/' || logfile[0] != '/' || exitcodefile[0] != '/') { ARLOGe("Error: -background flag requires full pathname of files (input, -log or -exitcode) to be specified. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (tracking_extraction_level == -1 && (sd_thresh == -1.0 || min_thresh == -1.0 || max_thresh == -1.0)) { ARLOGe("Error: -background flag requires -level or -sd_thresh, -min_thresh and -max_thresh -to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (initialization_extraction_level == -1 && (featureDensity == -1)) { ARLOGe("Error: -background flag requires -leveli or -surf_thresh to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (dpi == -1.0) { ARLOGe("Error: -background flag requires -dpi to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (dpiMin != -1.0f && (dpiMin <= 0.0f || dpiMin > dpi)) { ARLOGe("Error: -min_dpi must be greater than 0 and less than or equal to -dpi. Exiting.n\n"); EXIT(E_BAD_PARAMETER); } if (dpiMax != -1.0f && (dpiMax < dpiMin || dpiMax > dpi)) { ARLOGe("Error: -max_dpi must be greater than or equal to -min_dpi and less than or equal to -dpi. Exiting.n\n"); EXIT(E_BAD_PARAMETER); } #else ARLOGe("Error: -background flag not supported on this operating system. Exiting.\n"); exit(E_BACKGROUND_OPERATION_UNSUPPORTED); #endif } if (background) { #if HAVE_DAEMON_FUNC // Daemonize. if (daemon(0, 0) == -1) { perror("Unable to detach from controlling terminal"); EXIT(E_UNABLE_TO_DETACH_FROM_CONTROLLING_TERMINAL); } // At this point, stdin, stdout and stderr point to /dev/null. #endif } if (logfile[0]) { if (!freopen(logfile, "a", stdout) || !freopen(logfile, "a", stderr)) ARLOGe("Unable to redirect stdout or stderr to logfile.\n"); } if (exitcodefile[0]) { atexit(write_exitcode); } // Print the start date and time. clock = time(NULL); if (clock != (time_t)-1) { struct tm *timeptr = localtime(&clock); if (timeptr) { char stime[26+8] = ""; if (strftime(stime, sizeof(stime), "%Y-%m-%d %H:%M:%S %z", timeptr)) // e.g. "1999-12-31 23:59:59 NZDT". ARLOGi("--\nGenerator started at %s\n", stime); } } if (genfset) { if (tracking_extraction_level == -1 && (sd_thresh == -1.0 || min_thresh == -1.0 || max_thresh == -1.0 || occ_size == -1)) { do { printf("Select extraction level for tracking features, 0(few) <--> 4(many), [default=%d]: ", TRACKING_EXTRACTION_LEVEL_DEFAULT); if( fgets(buf, sizeof(buf), stdin) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if (buf[0] == '\n') tracking_extraction_level = TRACKING_EXTRACTION_LEVEL_DEFAULT; else sscanf(buf, "%d", &tracking_extraction_level); } while (tracking_extraction_level < 0 || tracking_extraction_level > 4); } switch (tracking_extraction_level) { case 0: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L0; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L0; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L0; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE; break; case 1: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L1; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L1; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L1; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE; break; case 2: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L2; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L2; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L2; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*2/3; break; case 3: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L3; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L3; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L3; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*2/3; break; case 4: // Same as 3, but with smaller AR2_DEFAULT_OCCUPANCY_SIZE. if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L3; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L3; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L3; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*1/2; break; default: // We only get to here if the parameters are already set. break; } ARLOGi("MAX_THRESH = %f\n", max_thresh); ARLOGi("MIN_THRESH = %f\n", min_thresh); ARLOGi("SD_THRESH = %f\n", sd_thresh); } if (genfset3) { if (initialization_extraction_level == -1 && featureDensity == -1) { do { printf("Select extraction level for initializing features, 0(few) <--> 3(many), [default=%d]: ", INITIALIZATION_EXTRACTION_LEVEL_DEFAULT); if( fgets(buf,1024,stdin) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if (buf[0] == '\n') initialization_extraction_level = INITIALIZATION_EXTRACTION_LEVEL_DEFAULT; else sscanf(buf, "%d", &initialization_extraction_level); } while (initialization_extraction_level < 0 || initialization_extraction_level > 3); } switch(initialization_extraction_level) { case 0: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L0; break; default: case 1: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L1; break; case 2: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L2; break; case 3: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L3; break; } ARLOGi("SURF_FEATURE = %d\n", featureDensity); } if ((err = readImageFromFile(filename, &image, &xsize, &ysize, &nc, &dpi)) != 0) { ARLOGe("Error reading image from file '%s'.\n", filename); EXIT(err); } setDPI(); ARLOGi("Generating ImageSet...\n"); ARLOGi(" (Source image xsize=%d, ysize=%d, channels=%d, dpi=%.1f).\n", xsize, ysize, nc, dpi); imageSet = ar2GenImageSet( image, xsize, ysize, nc, dpi, dpi_list, dpi_num ); ar2FreeJpegImage(&jpegImage); if( imageSet == NULL ) { ARLOGe("ImageSet generation error!!\n"); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); ar2UtilRemoveExt( filename ); ARLOGi("Saving to %s.iset...\n", filename); if( ar2WriteImageSet( filename, imageSet ) < 0 ) { ARLOGe("Save error: %s.iset\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); if (genfset) { arMalloc( featureSet, AR2FeatureSetT, 1 ); // A featureSet with a single image, arMalloc( featureSet->list, AR2FeaturePointsT, imageSet->num ); // and with 'num' scale levels of this image. featureSet->num = imageSet->num; ARLOGi("Generating FeatureList...\n"); for( i = 0; i < imageSet->num; i++ ) { ARLOGi("Start for %f dpi image.\n", imageSet->scale[i]->dpi); featureMap = ar2GenFeatureMap( imageSet->scale[i], AR2_DEFAULT_TS1*AR2_TEMP_SCALE, AR2_DEFAULT_TS2*AR2_TEMP_SCALE, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE1, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE2, AR2_DEFAULT_MAX_SIM_THRESH2, AR2_DEFAULT_SD_THRESH2 ); if( featureMap == NULL ) { ARLOGe("Error!!\n"); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); featureSet->list[i].coord = ar2SelectFeature2( imageSet->scale[i], featureMap, AR2_DEFAULT_TS1*AR2_TEMP_SCALE, AR2_DEFAULT_TS2*AR2_TEMP_SCALE, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE2, occ_size, max_thresh, min_thresh, sd_thresh, &num ); if( featureSet->list[i].coord == NULL ) num = 0; featureSet->list[i].num = num; featureSet->list[i].scale = i; scale1 = 0.0f; for( j = 0; j < imageSet->num; j++ ) { if( imageSet->scale[j]->dpi < imageSet->scale[i]->dpi ) { if( imageSet->scale[j]->dpi > scale1 ) scale1 = imageSet->scale[j]->dpi; } } if( scale1 == 0.0f ) { featureSet->list[i].mindpi = imageSet->scale[i]->dpi * 0.5f; } else { /* scale2 = imageSet->scale[i]->dpi; scale = sqrtf( scale1 * scale2 ); featureSet->list[i].mindpi = scale2 / ((scale2/scale - 1.0f)*1.1f + 1.0f); */ featureSet->list[i].mindpi = scale1; } scale1 = 0.0f; for( j = 0; j < imageSet->num; j++ ) { if( imageSet->scale[j]->dpi > imageSet->scale[i]->dpi ) { if( scale1 == 0.0f || imageSet->scale[j]->dpi < scale1 ) scale1 = imageSet->scale[j]->dpi; } } if( scale1 == 0.0f ) { featureSet->list[i].maxdpi = imageSet->scale[i]->dpi * 2.0f; } else { //scale2 = imageSet->scale[i]->dpi * 1.2f; scale2 = imageSet->scale[i]->dpi; /* scale = sqrtf( scale1 * scale2 ); featureSet->list[i].maxdpi = scale2 * ((scale/scale2 - 1.0f)*1.1f + 1.0f); */ featureSet->list[i].maxdpi = scale2*0.8f + scale1*0.2f; } ar2FreeFeatureMap( featureMap ); } ARLOGi(" Done.\n"); ARLOGi("Saving FeatureSet...\n"); if( ar2SaveFeatureSet( filename, "fset", featureSet ) < 0 ) { ARLOGe("Save error: %s.fset\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); ar2FreeFeatureSet( &featureSet ); } if (genfset3) { ARLOGi("Generating FeatureSet3...\n"); refDataSet = NULL; procMode = KpmProcFullSize; for( i = 0; i < imageSet->num; i++ ) { //if( imageSet->scale[i]->dpi > 100.0f ) continue; maxFeatureNum = featureDensity * imageSet->scale[i]->xsize * imageSet->scale[i]->ysize / (480*360); ARLOGi("(%d, %d) %f[dpi]\n", imageSet->scale[i]->xsize, imageSet->scale[i]->ysize, imageSet->scale[i]->dpi); if( kpmAddRefDataSet ( #if AR2_CAPABLE_ADAPTIVE_TEMPLATE imageSet->scale[i]->imgBWBlur[1], #else imageSet->scale[i]->imgBW, #endif AR_PIXEL_FORMAT_MONO, imageSet->scale[i]->xsize, imageSet->scale[i]->ysize, imageSet->scale[i]->dpi, procMode, KpmCompNull, maxFeatureNum, 1, i, &refDataSet) < 0 ) { // Page number set to 1 by default. ARLOGe("Error at kpmAddRefDataSet.\n"); EXIT(E_DATA_PROCESSING_ERROR); } } ARLOGi(" Done.\n"); ARLOGi("Saving FeatureSet3...\n"); if( kpmSaveRefDataSet(filename, "fset3", refDataSet) != 0 ) { ARLOGe("Save error: %s.fset2\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); kpmDeleteRefDataSet( &refDataSet ); } ar2FreeImageSet( &imageSet ); // Print the start date and time. clock = time(NULL); if (clock != (time_t)-1) { struct tm *timeptr = localtime(&clock); if (timeptr) { char stime[26+8] = ""; if (strftime(stime, sizeof(stime), "%Y-%m-%d %H:%M:%S %z", timeptr)) // e.g. "1999-12-31 23:59:59 NZDT". ARLOGi("Generator finished at %s\n--\n", stime); } } exitcode = E_NO_ERROR; return (exitcode); }
static void init( void ) { ARParam cparam; ARParamLT *cparamLT; ARGViewport viewport; xsize = 640; ysize = 480; ARLOGi("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ arParamClear(&cparam, xsize, ysize, AR_DIST_FUNCTION_VERSION_DEFAULT); ARLOG("*** Camera Parameter ***\n"); arParamDisp( &cparam ); //COVHI10445 ignored as false positive, i.e. cparam->m[3][4] uninitialized. cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET); arHandle = arCreateHandle(cparamLT); if( arHandle == NULL ) { ARLOGe("Error: arCreateHandle.\n"); exit(0); } arSetPixelFormat( arHandle, PIXEL_FORMAT ); arSetLabelingMode( arHandle, AR_LABELING_BLACK_REGION ); arSetImageProcMode( arHandle, AR_IMAGE_PROC_FRAME_IMAGE ); ar3DHandle = ar3DCreateHandle( &cparam ); if( ar3DHandle == NULL ) { ARLOGe("Error: ar3DCreateHandle.\n"); exit(0); } /* open the graphics window */ viewport.sx = 0; viewport.sy = 0; viewport.xsize = xsize; viewport.ysize = ysize; vp = argCreateViewport( &viewport ); if( vp == NULL ) exit(0); argViewportSetCparam( vp, &cparam ); argViewportSetPixFormat( vp, PIXEL_FORMAT ); argViewportSetDispMode( vp, AR_GL_DISP_MODE_FIT_TO_VIEWPORT ); argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_GL_DRAW_PIXELS ); //argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); //argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FIELD ); argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_DISABLE ); //argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_ENABLE ); #if 0 if( argSetFullScreenConfig("1024x768") == 0 ) { ARLOGe("Full screen is not possible.\n"); exit(0); } //argGetWindowSizeFullScreen( &viewport.xsize, &viewport.ysize ); viewport.sx = 0; viewport.sy = 0; viewport.xsize = 1024; viewport.ysize = 768; argViewportSetViewportFullScreen( vpL, &viewport ); viewport.sx = 1024; argViewportSetViewportFullScreen( vpR, &viewport ); #endif }
AR2ImageSetT *ar2ReadImageSet( char *filename ) { FILE *fp; AR2JpegImageT *jpgImage; AR2ImageSetT *imageSet; float dpi; int i, k1; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE int j, k2; ARUint *p1, *p2; #endif size_t len; const char ext[] = ".iset"; char *buf; len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator. arMalloc(buf, char, len); sprintf(buf, "%s%s", filename, ext); fp = fopen(buf, "rb"); free(buf); if (!fp) { ARLOGe("Error: unable to open file '%s%s' for reading.\n", filename, ext); return (NULL); } arMalloc( imageSet, AR2ImageSetT, 1 ); if( fread(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1 || imageSet->num <= 0) { ARLOGe("Error reading imageSet.\n"); goto bail; } ARLOGi("Imageset contains %d images.\n", imageSet->num); arMalloc( imageSet->scale, AR2ImageT*, imageSet->num ); arMalloc( imageSet->scale[0], AR2ImageT, 1 ); jpgImage = ar2ReadJpegImage2(fp); // Caller must free result. if( jpgImage == NULL || jpgImage->nc != 1 ) { ARLOGw("Falling back to reading '%s%s' in ARToolKit v4.x format.\n", filename, ext); free(imageSet->scale[0]); free(imageSet->scale); free(imageSet); if( jpgImage == NULL ) { rewind(fp); return ar2ReadImageSetOld(fp); } free(jpgImage); //COVHI10396 fclose(fp); return NULL; } imageSet->scale[0]->xsize = jpgImage->xsize; imageSet->scale[0]->ysize = jpgImage->ysize; imageSet->scale[0]->dpi = jpgImage->dpi; // The dpi value is not read correctly by jpeglib embedded in OpenCV 2.2.x. #if AR2_CAPABLE_ADAPTIVE_TEMPLATE imageSet->scale[0]->imgBWBlur[0] = jpgImage->image; // Create the blurred images. for( j = 1; j < AR2_BLUR_IMAGE_MAX; j++ ) { arMalloc( imageSet->scale[0]->imgBWBlur[j], ARUint8, imageSet->scale[0]->xsize * imageSet->scale[0]->ysize); p1 = dst->imgBWBlur[0]; p2 = dst->imgBWBlur[i]; for( k1 = 0; k1 < imageSet->scale[0]->xsize * imageSet->scale[0]->ysize; k1++ ) *(p2++) = *(p1++); defocus_image( imageSet->scale[0]->imgBWBlur[j], imageSet->scale[0]->xsize, imageSet->scale[0]->ysize, 3 ); } #else imageSet->scale[0]->imgBW = jpgImage->image; #endif free(jpgImage); // Minify for the other scales. // First, find the list of scales we wrote into the file. fseek(fp, (long)(-(int)sizeof(dpi)*(imageSet->num - 1)), SEEK_END); for( i = 1; i < imageSet->num; i++ ) { if( fread(&dpi, sizeof(dpi), 1, fp) != 1 ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } imageSet->scale[i] = ar2GenImageLayer2( imageSet->scale[0], dpi ); if( imageSet->scale[i] == NULL ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } } fclose(fp); return imageSet; bail1: free(imageSet->scale); bail: free(imageSet); fclose(fp); return NULL; }
AR2VideoParamWinMCT *ar2VideoOpenWinMC(const char *config) { AR2VideoParamWinMCT *vid; int width = 320; int height = 240; int flipH = 0, flipV = 0; int devNum = 0; int showDialog = -1; const char *a; char b[256]; const char config_default[] = ""; int err_i = 0; //ARLOG("Entering ar2VideoOpenWinMC\n"); if (ar2VideoWinMCRefCount == 0) { if (!ar2VideoWinMCInit2()) return NULL; } ar2VideoWinMCRefCount++; arMallocClear(vid, AR2VideoParamWinMCT, 1); vid->format = AR_PIXEL_FORMAT_INVALID; vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Unknown; // Ensure the provided config is valid, otherwise use default config. if (!config) a = config_default; else if (!config[0]) a = config_default; else a = config; if (a != NULL) { for (;;) { while(*a == ' ' || *a == '\t') a++; if (*a == '\0') break; if (sscanf(a, "%s", b) == 0) break; if (strncmp(b, "-devNum=", 8) == 0) { if (sscanf(&b[8], "%d", &devNum) != 1) err_i = 1; else if (devNum < 0) { ARLOGe("Error: device number must be integer beginning with 1, or 0 to use default device.\n"); err_i = 1; } } else if( strncmp( b, "-format=", 8 ) == 0 ) { if (strcmp(b+8, "0") == 0) { vid->format = AR_PIXEL_FORMAT_INVALID; ARLOGi("Requesting images in system default format.\n"); } else if (strcmp(b+8, "BGRA") == 0) { vid->format = AR_PIXEL_FORMAT_BGRA; ARLOGi("Requesting images in BGRA format.\n"); } else if (strcmp(b+8, "BGR") == 0) { vid->format = AR_PIXEL_FORMAT_BGR; ARLOGi("Requesting images in BGR format.\n"); } else if (strcmp(b+8, "RGB_565") == 0) { vid->format = AR_PIXEL_FORMAT_RGB_565; ARLOGi("Requesting images in RGB_565 format.\n"); } else if (strcmp(b+8, "RGBA_5551") == 0) { vid->format = AR_PIXEL_FORMAT_RGBA_5551; ARLOGi("Requesting images in RGB_5551 format.\n"); } else if (strcmp(b+8, "2vuy") == 0 || strcmp(b+8, "UYVY") == 0) { vid->format = AR_PIXEL_FORMAT_2vuy; ARLOGi("Requesting images in 2vuy/UYVY format.\n"); } else if (strcmp(b+8, "yuvs") == 0 || strcmp(b+8, "YUY2") == 0) { vid->format = AR_PIXEL_FORMAT_yuvs; ARLOGi("Requesting images in yuvs/YUY2 format.\n"); } else if (strcmp(b+8, "NV21") == 0) { vid->format = AR_PIXEL_FORMAT_NV21; ARLOGi("Requesting images in NV21 format.\n"); } else if (strcmp(b+8, "420f") == 0 || strcmp(b+8, "NV12") == 0) { vid->format = AR_PIXEL_FORMAT_420f; ARLOGi("Requesting images in 420f/NV12 format.\n"); } else { ARLOGe("Ignoring request for unsupported video format '%s'.\n", b+8); } } else if( strncmp( b, "-position=", 10 ) == 0 ) { if (strcmp(b+10, "rear") == 0 || strcmp(b+10, "back") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Back; } else if (strcmp(b+10, "front") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Front; } else if (strcmp(b+10, "left") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Left; } else if (strcmp(b+10, "right") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Right; } else if (strcmp(b+10, "top") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Top; } else if (strcmp(b+10, "bottom") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Bottom; } else if (strcmp(b+10, "default") == 0) { vid->preferredDeviceLocation = Windows::Devices::Enumeration::Panel::Unknown; } else { ARLOGe("Error: unsupported video device position requested. Using default.\n"); } } else if( strncmp( b, "-width=", 7 ) == 0 ) { if( sscanf( &b[7], "%d", &width ) == 0 ) err_i = 1; } else if( strncmp( b, "-height=", 8 ) == 0 ) { if( sscanf( &b[8], "%d", &height ) == 0 ) err_i = 1; } else if (strcmp(b, "-showDialog") == 0) { showDialog = 1; } else if (strcmp(b, "-noShowDialog") == 0) { showDialog = 0; } else if (strcmp(b, "-flipH") == 0) { flipH = 1; } else if (strcmp(b, "-noFlipH") == 0) { flipH = 0; } else if (strcmp(b, "-flipV") == 0) { flipV = 1; } else if (strcmp(b, "-noFlipV") == 0) { flipV = 0; } else if (strcmp(b, "-device=WinMC") == 0) { //ARLOG("Device set to WinMC\n"); } else { ARLOGe("Unrecognized config token: '%s'\n", b); ar2VideoDispOptionWinMC(); return 0; } if (err_i) goto bail; while(*a != ' ' && *a != '\t' && *a != '\0') a++; } } // Defaults. if (vid->format == AR_PIXEL_FORMAT_INVALID) vid->format = AR_PIXEL_FORMAT_BGR; // Alloc and init WindowsMediaCapture. vid->wmc = new WindowsMediaCapture; if (!vid->wmc) { ARLOGe("Error creating instance of Windows.Media.Capture.\n"); goto bail; } if (flipV) vid->wmc->setFlipV(true); if (!startWMC(vid, width, height)) goto bail1; return vid; bail1: delete vid->wmc; vid->wmc = NULL; bail: free(vid); ar2VideoWinMCRefCount--; if (ar2VideoWinMCRefCount == 0) ar2VideoWinMCFinal2(); return NULL; }
int main(int argc, char** argv) { int i; char glutGamemode[32]; char cparam_name[] = "Data/camera_para.dat"; char vconf[] = ""; char objectDataFilename[] = "Data/object_data_vrml"; // // Library inits. // glutInit(&argc, argv); // // Video setup. // if (!setupCamera(cparam_name, vconf, &gCparamLT, &gARHandle, &gAR3DHandle)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } #ifdef _WIN32 CoInitialize(NULL); #endif // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (!windowed) { if (windowRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", windowWidth, windowHeight, windowDepth, windowRefresh); else sprintf(glutGamemode, "%ix%i:%i", windowWidth, windowHeight, windowDepth); glutGameModeString(glutGamemode); glutEnterGameMode(); } else { glutInitWindowSize(windowWidth, windowHeight); glutCreateWindow(argv[0]); } // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arglSetupDebugMode(gArglSettings, gARHandle); arUtilTimerReset(); if (!setupMarkersObjects(objectDataFilename, &gObjectData, &gObjectDataCount, gARHandle)) { ARLOGe("main(): Unable to set up AR objects and markers.\n"); cleanup(); exit(-1); } // Test render all the VRML objects. ARLOGi("Pre-rendering the VRML objects...\n"); glEnable(GL_TEXTURE_2D); for (i = 0; i < gObjectDataCount; i++) { arVrmlDraw(gObjectData[i].vrml_id); } glDisable(GL_TEXTURE_2D); ARLOGi(" done\n"); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
AR2VideoParamAndroidT *ar2VideoOpenAndroid( const char *config ) { char *cacheDir = NULL; AR2VideoParamAndroidT *vid; const char *a; char line[1024]; int err_i = 0; int i; int width = 0, height = 0; arMallocClear( vid, AR2VideoParamAndroidT, 1 ); a = config; if( a != NULL) { for(;;) { while( *a == ' ' || *a == '\t' ) a++; if( *a == '\0' ) break; if (sscanf(a, "%s", line) == 0) break; if( strcmp( line, "-device=Android" ) == 0 ) { } else if( strncmp( line, "-width=", 7 ) == 0 ) { if( sscanf( &line[7], "%d", &width ) == 0 ) { ARLOGe("Error: Configuration option '-width=' must be followed by width in integer pixels.\n"); err_i = 1; } } else if( strncmp( line, "-height=", 8 ) == 0 ) { if( sscanf( &line[8], "%d", &height ) == 0 ) { ARLOGe("Error: Configuration option '-height=' must be followed by height in integer pixels.\n"); err_i = 1; } } else if( strncmp( line, "-format=", 8 ) == 0 ) { if (strcmp(line+8, "0") == 0) { vid->format = 0; ARLOGi("Requesting images in system default format.\n"); } else if (strcmp(line+8, "RGBA") == 0) { vid->format = AR_PIXEL_FORMAT_RGBA; ARLOGi("Requesting images in RGBA format.\n"); } else if (strcmp(line+8, "NV21") == 0) { vid->format = AR_PIXEL_FORMAT_NV21; ARLOGi("Requesting images in NV21 format.\n"); } else if (strcmp(line+8, "420f") == 0 || strcmp(line+8, "NV12") == 0) { vid->format = AR_PIXEL_FORMAT_420f; ARLOGi("Requesting images in 420f/NV12 format.\n"); } else { ARLOGe("Ignoring request for unsupported video format '%s'.\n", line+8); } } else if (strncmp(a, "-cachedir=", 10) == 0) { // Attempt to read in pathname, allowing for quoting of whitespace. a += 10; // Skip "-cachedir=" characters. if (*a == '"') { a++; // Read all characters up to next '"'. i = 0; while (i < (sizeof(line) - 1) && *a != '\0') { line[i] = *a; a++; if (line[i] == '"') break; i++; } line[i] = '\0'; } else { sscanf(a, "%s", line); } if (!strlen(line)) { ARLOGe("Error: Configuration option '-cachedir=' must be followed by path (optionally in double quotes).\n"); err_i = 1; } else { free(cacheDir); cacheDir = strdup(line); } } else { err_i = 1; } if (err_i) { ARLOGe("Error: Unrecognised configuration option '%s'.\n", a); ar2VideoDispOptionAndroid(); goto bail; } while( *a != ' ' && *a != '\t' && *a != '\0') a++; } } // Initial state. if (!vid->format) vid->format = AR_INPUT_ANDROID_PIXEL_FORMAT; if (!vid->focal_length) vid->focal_length = AR_VIDEO_ANDROID_FOCAL_LENGTH_DEFAULT; // In lieu of identifying the actual camera, we use manufacturer/model/board to identify a device, // and assume that identical devices have identical cameras. // Handset ID, via <sys/system_properties.h>. int len; len = __system_property_get(ANDROID_OS_BUILD_MANUFACTURER, vid->device_id); // len = (int)strlen(device_id). vid->device_id[len] = '/'; len++; len += __system_property_get(ANDROID_OS_BUILD_MODEL, vid->device_id + len); vid->device_id[len] = '/'; len++; len += __system_property_get(ANDROID_OS_BUILD_BOARD, vid->device_id + len); // Set width and height if specified. if (width && height) { vid->width = width; vid->height = height; } if (cparamSearchInit(cacheDir, false) < 0) { ARLOGe("Unable to initialise cparamSearch.\n"); goto bail; }; goto done; bail: free(vid); vid = NULL; done: free(cacheDir); return (vid); }
int main(int argc, char** argv) { char glutGamemode[32]; const char *cparam_name = "Data2/camera_para.dat"; char vconf[] = ""; const char markerConfigDataFilename[] = "Data2/markers.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Library inits. // glutInit(&argc, argv); // // Video setup. // #ifdef _WIN32 CoInitialize(NULL); #endif if (!setupCamera(cparam_name, vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (!prefWindowed) { if (prefRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else sprintf(glutGamemode, "%ix%i:%i", prefWidth, prefHeight, prefDepth); glutGameModeString(glutGamemode); glutEnterGameMode(); } else { glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arUtilTimerReset(); // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }