int main(int argc, char **argv) { //initialize applications memset(buf, '\0', BUFLEN); glutInit(&argc, argv); init(); arVideoCapStart(); glutTimerFunc(100, update, 0); //start the main event loop argMainLoop(NULL, keyEvent, mainLoop); return 0; }
int main(int argc, char **argv) { //initialize applications glutInit(&argc, argv); init(); arVideoCapStart(); //start the main event loop argMainLoop( NULL, keyEvent, mainLoop ); return 0; }
int main(int argc,char**argv){ ARParam cparam; ARParam wparam; int xsize,ysize; glutInit(&argc,argv); if(arVideoOpen(vconf_name)<0){ puts("ビデオデバイスエラー"); return -1; } if(arVideoInqSize(&xsize,&ysize) < 0)return -1; if(arParamLoad(cparam_name,1,&wparam)< 0){ puts("パラメータ読み込み失敗"); return -1; } arParamChangeSize(&wparam,xsize,ysize,&cparam); arInitCparam(&cparam); nyobj = nyar_NyARTransMat_O2_create(&cparam); if( (patt_id=arLoadPatt(pattern_name)) < 0){ puts("パターン読み込みエラー"); return -1; } argInit(&cparam, 1.0, 0, 0, 0, 0); mqoInit(); if(Data_Load()==-1) return -1; arVideoCapStart(); InitGame(); arUtilTimerReset(); #ifdef _WIN32 TIMECAPS Caps; timeGetDevCaps(&Caps, sizeof(TIMECAPS)); // 性能取得 timeBeginPeriod(Caps.wPeriodMin); #endif argMainLoop(MouseEvent,KeyEvent,MainLoop); #ifdef _WIN32 timeEndPeriod(Caps.wPeriodMin); #endif nyar_NyARTransMat_O2_free(nyobj); return 0; }
main(int argc, char **argv) { init( argc, argv ); glutKeyboardFunc(keyEvent); glutMouseFunc(mouseEvent); glutMotionFunc(motionEvent); glutIdleFunc(dispImage); glutDisplayFunc(dispImage); print_comment(0); status = 0; point_num = 0; arVideoCapStart(); glutMainLoop(); }
int main(int argc, char **argv) { ARParam cparam; ARParam wparam; int xsize, ysize; printf("test\n"); glutInit(&argc, argv); if (arVideoOpen(vconf_name) < 0) { printf("ビデオデバイスのエラー"); return -1; } if (arVideoInqSize(&xsize, &ysize) < 0) return -1; if (arParamLoad(cparam_name, 1, &wparam) < 0) { printf("カメラパラメータの読み込みに失敗しました\n"); return -1; } arParamChangeSize(&wparam, xsize, ysize, &cparam); arInitCparam(&cparam); if ((patt_id = arLoadPatt(patt_name)) < 0) { printf("パターンファイルの読み込みに失敗しました\n"); return -1; } argInit(&cparam, 1.0, 0, 0, 0, 0); arVideoCapStart(); argMainLoop(MouseEvent, KeyEvent, MainLoop); return 0; }
int main(int argc, char **argv) { printf("Sample2\n"); glutInit(&argc, argv); init(); //init for Kinect g_MyKinect.Init(); g_HandDetectorOpenNI.Init(g_MyKinect.context); //add Gesture g_HandDetectorOpenNI.AddGesture("Click", NULL); g_HandDetectorOpenNI.ClickPointerFunc = &ClickPointerFunction; g_MyKinect.StartGeneratingAll(); arVideoCapStart(); argMainLoop( NULL, keyEvent, mainLoop ); return (0); }
//======================================================= // main関数 //======================================================= int main( int argc, char **argv ) { // GLUTの初期化 glutInit( &argc, argv ); // ARアプリケーションの初期化 Init(); midi_init(); // ビデオキャプチャの開始 arVideoCapStart(); // メインループの開始 argMainLoop( MouseEvent, KeyEvent, MainLoop ); return 0; }
int main(int argc, char **argv) { glutInit(&argc, argv); init( argc, argv ); argSetKeyFunc(keyEvent); argSetMouseFunc(mouseEvent); argSetMotionFunc(motionEvent); argSetDispFunc(dispImage, 1); print_comment(0); status = 0; point_num = 0; arVideoCapStart(); argMainLoop(); return (0); }
static int setupCamera(ARParam *cparam) { ARParam wparam; char name1[256], name2[256]; int xsize, ysize; printf("Enter camera parameter filename"); printf("(Data/camera_para.dat): "); if (fgets(name1, 256, stdin) == NULL) exit(0); if (sscanf(name1, "%s", name2) != 1) { strcpy(name2, "Data/camera_para.dat"); } // Load the camera parameters. if (arParamLoad(name2, 1, &wparam) < 0 ) { printf("Parameter load error !!\n"); return (FALSE); } // Open the video path. if (arVideoOpen(vconf) < 0) { fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoInqSize(&xsize, &ysize) < 0) return (FALSE); fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Resize for the window and init. arParamChangeSize(&wparam, xsize, ysize, cparam); fprintf(stdout, "*** Camera Parameter ***\n"); arParamDisp(cparam); arInitCparam(cparam); if (arVideoCapStart() != 0) { fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
int main(int argc, char *argv[]) { glutInit(&argc, argv); init(argc, argv); argSetWindow(w1); argSetDispFunc(mainLoop, 1); argSetKeyFunc(keyEvent); argSetWindow(w2); argSetDispFunc(mainLoop, 1); argSetKeyFunc(keyEvent); count = 0; fps[0] = '\0'; arVideoCapStart(); arUtilTimerReset(); argMainLoop(); return (0); }
static int setupCamera(const char *cparam_name, char *vconf, ARParam *cparam) { ARParam wparam; int xsize, ysize; // Open the video path. if (arVideoOpen(vconf) < 0) { fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoInqSize(&xsize, &ysize) < 0) { return (FALSE); } fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &wparam) < 0) { fprintf(stderr, "setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); return (FALSE); } arParamChangeSize(&wparam, xsize, ysize, cparam); fprintf(stdout, "*** Camera Parameter ***\n"); arParamDisp(cparam); arInitCparam(cparam); if (arVideoCapStart() != 0) { fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "../share/artoolkit-examples/Data/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "../share/artoolkit-examples/Data/markers.dat"; const char objectDataFilename[] = "../share/artoolkit-examples/Data/objects.dat"; // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Init AR. gARPattHandle = arPattCreateHandle(); if (!gARPattHandle) { ARLOGe("Error creating pattern handle.\n"); exit(-1); } gARHandle = arCreateHandle(gCparamLT); if (!gARHandle) { ARLOGe("Error creating AR handle.\n"); exit(-1); } arPattAttach(gARHandle, gARPattHandle); if (arSetPixelFormat(gARHandle, arVideoGetPixelFormat()) < 0) { ARLOGe("Error setting pixel format.\n"); exit(-1); } gAR3DHandle = ar3DCreateHandle(&gCparamLT->param); if (!gAR3DHandle) { ARLOGe("Error creating 3D handle.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, gARPattHandle, &markersSquare, &markersSquareCount, &gARPattDetectionMode); ARLOGi("Marker count = %d\n", markersSquareCount); // // Other ARToolKit setup. // arSetMarkerExtractionMode(gARHandle, AR_USE_TRACKING_HISTORY_V2); //arSetMarkerExtractionMode(gARHandle, AR_NOUSE_TRACKING_HISTORY); //arSetLabelingThreshMode(gARHandle, AR_LABELING_THRESH_MODE_MANUAL); // Uncomment to force manual thresholding. // Set the pattern detection mode (template (pictorial) vs. matrix (barcode) based on // the marker types as defined in the marker config. file. arSetPatternDetectionMode(gARHandle, gARPattDetectionMode); // Default = AR_TEMPLATE_MATCHING_COLOR // Other application-wide marker options. Once set, these apply to all markers in use in the application. // If you are using standard ARToolKit picture (template) markers, leave commented to use the defaults. // If you are usign a different marker design (see http://www.artoolworks.com/support/app/marker.php ) // then uncomment and edit as instructed by the marker design application. //arSetLabelingMode(gARHandle, AR_LABELING_BLACK_REGION); // Default = AR_LABELING_BLACK_REGION //arSetBorderSize(gARHandle, 0.25f); // Default = 0.25f //arSetMatrixCodeType(gARHandle, AR_MATRIX_CODE_3x3); // Default = AR_MATRIX_CODE_3x3 // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arglSetupDebugMode(gArglSettings, gARHandle); // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
static void mouseEvent(int button, int state, int x, int y) { unsigned char *p, *p1; int ssx, ssy, eex, eey; int i, j, k; if( button == GLUT_RIGHT_BUTTON && state == GLUT_UP ) { if( status == 0 ) { arVideoCapStop(); arVideoClose(); if( patt.loop_num > 0 ) { calc_distortion( &patt, xsize, ysize, dist_factor ); printf("--------------\n"); printf("Center X: %f\n", dist_factor[0]); printf(" Y: %f\n", dist_factor[1]); printf("Dist Factor: %f\n", dist_factor[2]); printf("Size Adjust: %f\n", dist_factor[3]); printf("--------------\n"); status = 2; check_num = 0; print_comment(5); } else { glutDestroyWindow( win ); exit(0); } } else if( status == 1 ) { if( patt.loop_num == 0 ) {printf("error!!\n"); exit(0);} patt.loop_num--; free( patt.point[patt.loop_num] ); free( patt.savedImage[patt.loop_num] ); status = 0; point_num = 0; arVideoCapStart(); if( patt.loop_num == 0 ) print_comment(0); else print_comment(4); } } if( button == GLUT_LEFT_BUTTON && state == GLUT_DOWN ) { if( status == 1 && point_num < patt.h_num*patt.v_num ) { sx = ex = x; sy = ey = y; p = &(patt.savedImage[patt.loop_num-1][(y*xsize+x)*AR_PIX_SIZE]); p1 = &(clipImage[0]); #ifdef AR_PIX_FORMAT_BGRA k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_ABGR k = (255*3 - (*(p+1) + *(p+2) + *(p+3))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+1) = *(p1+2) = *(p1+3) = k; #endif #ifdef AR_PIX_FORMAT_BGR k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_RGBA k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif #ifdef AR_PIX_FORMAT_RGB k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; #endif } } if( button == GLUT_LEFT_BUTTON && state == GLUT_UP ) { if( status == 0 && patt.loop_num < LOOP_MAX ) { while( (p = (unsigned char *)arVideoGetImage()) == NULL ) { arUtilSleep(2); } #ifdef USE_TEXMAP patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*tex1Ysize*AR_PIX_SIZE ); #else patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*ysize*AR_PIX_SIZE ); #endif if( patt.savedImage[patt.loop_num] == NULL ) exit(0); p1 = patt.savedImage[patt.loop_num]; for(i=0;i<xsize*ysize*AR_PIX_SIZE;i++) *(p1++) = *(p++); arVideoCapStop(); patt.point[patt.loop_num] = (CALIB_COORD_T *)malloc( sizeof(CALIB_COORD_T)*patt.h_num*patt.v_num ); if( patt.point[patt.loop_num] == NULL ) exit(0); patt.loop_num++; status = 1; sx = sy = ex= ey = -1; print_comment(1); } else if( status == 1 && point_num == patt.h_num*patt.v_num ) { status = 0; point_num = 0; arVideoCapStart(); printf("### No.%d ###\n", patt.loop_num); for( j = 0; j < patt.v_num; j++ ) { for( i = 0; i < patt.h_num; i++ ) { printf("%2d, %2d: %6.2f, %6.2f\n", i+1, j+1, patt.point[patt.loop_num-1][j*patt.h_num+i].x_coord, patt.point[patt.loop_num-1][j*patt.h_num+i].y_coord); } } printf("\n\n"); if( patt.loop_num < LOOP_MAX ) print_comment(4); else print_comment(6); } else if( status == 1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } patt.point[patt.loop_num-1][point_num].x_coord = 0.0; patt.point[patt.loop_num-1][point_num].y_coord = 0.0; p = clipImage; k = 0; for( j = 0; j < (eey-ssy+1); j++ ) { for( i = 0; i < (eex-ssx+1); i++ ) { patt.point[patt.loop_num-1][point_num].x_coord += i * *(p+1); patt.point[patt.loop_num-1][point_num].y_coord += j * *(p+1); k += *(p+1); p += AR_PIX_SIZE; } } if( k != 0 ) { patt.point[patt.loop_num-1][point_num].x_coord /= k; patt.point[patt.loop_num-1][point_num].y_coord /= k; patt.point[patt.loop_num-1][point_num].x_coord += ssx; patt.point[patt.loop_num-1][point_num].y_coord += ssy; point_num++; } sx = sy = ex= ey = -1; printf(" # %d/%d\n", point_num, patt.h_num*patt.v_num); if( point_num == patt.h_num*patt.v_num ) print_comment(2); } else if( status == 2 ) { check_num++; if( check_num == patt.loop_num ) { if(patt.loop_num >= 2) { if( calc_inp(&patt, dist_factor, xsize, ysize, mat) < 0 ) { printf("Calibration failed.\n"); exit(0); } save_param(); } glutDestroyWindow( win ); exit(0); } if( check_num+1 == patt.loop_num ) { printf("\nLeft Mouse Button: Next Step.\n"); } else { printf(" %d/%d.\n", check_num+1, patt.loop_num); } } } }
int main(int argc, char *argv[]) { printf("Debut initialisation\n"); /// Chargement des objets //on ne stocke plus dans des mesh, mais dans un tableau possible de mesh à charger. L'id du meche à charger par le patron correspond //à l'indice dans le tableau de mesh mesh.push_back(new MeshObj("Others\\legoTexture.obj",NULL)); mesh.push_back(new MeshObj("Others\\brique_lego.obj", NULL)); printf("Chargement des objets réussi\n"); /// Initialisation de glut glutInit(&argc, argv); glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH); glClearColor(0, 0, 0, 0); glEnable(GL_CULL_FACE); glCullFace(GL_BACK); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LESS); glShadeModel(GL_SMOOTH); /// Initialisation d'ARToolKit et de la fenetre + appel boucle infini arInit(); arVideoCapStart(); glutPositionWindow((glutGet(GLUT_SCREEN_WIDTH)-cparam.xsize)/2, (glutGet(GLUT_SCREEN_HEIGHT)-cparam.ysize)/2); glutReshapeFunc(resize); glutMotionFunc(mouseMove); //init du menu d'aide (aide mouvement) menu.addBoutton("img\\delete.png",true,0,cparam.ysize-75,75,cparam.ysize); menu.addBoutton("img\\move.png",true,75,cparam.ysize-75,150,cparam.ysize); menu.addBoutton("img\\resize.png",true,150,cparam.ysize-75,225,cparam.ysize); //init bouttons help/scan //quit quit.addBoutton("img\\quit.png",true,cparam.xsize-120,30,cparam.xsize-16,54+30,true); quit.addBoutton("img\\quit1.png",true,cparam.xsize-120,30,cparam.xsize-16,54+30,false); difQuit=differ(2000); //help; help.addBoutton("img\\aide1.png",true, cparam.xsize-120,54+35,cparam.xsize-16,54+54+35,true); help.addBoutton("img\\aide2.png",true, cparam.xsize-120,54+35,cparam.xsize-16,54+54+35,false); //activé help.addBoutton("img\\aide3.png",true, cparam.xsize-120,54+35,cparam.xsize-16,54+54+35,false); //selectioné difAide=differ(2000); menuShow=false; //scan scan.addBoutton("img\\scan5.png",true,cparam.xsize-120,54+40+54,cparam.xsize-16,54+54+54+40,true); scan.addBoutton("img\\scan6.png",true,cparam.xsize-120,54+40+54,cparam.xsize-16,54+54+54+40,false); scan.addBoutton("img\\scan7.png",true,cparam.xsize-120,54+40+54,cparam.xsize-16,54+54+54+40,false); difScan=differ(2000); /*FMOD_System_Create(&systemSon); FMOD_System_Init(systemSon, 2, FMOD_INIT_NORMAL, NULL); if(!FMOD_System_CreateSound(systemSon, "Data\\mouseclickDown.wav", FMOD_CREATESAMPLE, 0, &clickDown)) printf("chargement son: ok\n"); else printf("chargement son: echec\n"); if(!FMOD_System_CreateSound(systemSon, "Data\\mouseclickUp.wav", FMOD_CREATESAMPLE, 0, &clickUP)) printf("chargement son: ok\n"); else printf("chargement son: echec\n");*/ difIndex=differ(2000); difMajeur=differ(2000); printf("Fin initialisation\n"); argMainLoop(mouseClick, key, mainLoop); return EXIT_SUCCESS; }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) { ARLOGe("setupCamera(): Error: arCreateHandle.\n"); return (FALSE); } if (arSetPixelFormat(*arhandle, pixFormat) < 0) { ARLOGe("setupCamera(): Error: arSetPixelFormat.\n"); return (FALSE); } if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < 0) { ARLOGe("setupCamera(): Error: arSetDebugMode.\n"); return (FALSE); } if ((*ar3dhandle = ar3DCreateHandle(&cparam)) == NULL) { ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n"); return (FALSE); } if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
void main(int argc, char *argv[]) { init(); arVideoCapStart(); argMainLoop(NULL, keyEvent, mainLoop); }
//======== // 初期化 //======== bool cARTK::initialize( void ) { const char *szCameraParamFName = "../Data/camera_para.dat"; const char *szPattFName = "../Data/patt.00"; #ifdef _WIN32 char *szVConfFName = "../Data/WDM_camera_flipV.xml"; #else char *szVConfFName = ""; #endif ARParam sCamParamTemp; int iCamImgSizeX, iCamImgSizeY; // カメラデバイスのオープン if( arVideoOpen( szVConfFName ) < 0 ) { ErrorMessage( "Unable to open connection to camera.\n" ); return false; } // カメラ画像のサイズを取得 if( arVideoInqSize( &iCamImgSizeX, &iCamImgSizeY ) < 0 ) return false; // カメラパラメータファイルの読込み if( arParamLoad( szCameraParamFName, 1, &sCamParamTemp ) < 0 ) { ErrorMessage( "Error loading parameter file for camera.\n" ); return false; } // カメラパラメータのサイズ部分を変更 arParamChangeSize( &sCamParamTemp, iCamImgSizeX, iCamImgSizeY, &m_sCameraParam ); // ライブラリ内のカメラパラメータの初期化 arInitCparam( &m_sCameraParam ); // カメラ画像のキャプチャを開始 if( arVideoCapStart() != 0 ) { ErrorMessage( "Unable to begin camera data capture.\n" ); return false; } // マーカーパターンファイルの読込み if( (m_iPattID = arLoadPatt( szPattFName )) < 0 ) { ErrorMessage( "Pattern file load error !!\n" ); return false; } m_uiARTImageSize = sizeof(ARUint8) * iCamImgSizeX * iCamImgSizeY * 3; m_pARTImage = (ARUint8 *)malloc( m_uiARTImageSize ); memset( m_pARTImage, 0, m_uiARTImageSize ); m_dViewScaleFactor = 0.16; m_dPattWidth = 40.0; m_bFirstTime = true; return true; }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "Data2/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "Data2/markers.dat"; const char objectDataFilename[] = "Data2/objects.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
static void mouseEvent(int button, int state, int x, int y) { AR2VideoBufferT *buff; unsigned char *p, *p1; int ssx, ssy, eex, eey; int i, j, k; char line[256]; if( x < 0 ) x = 0; if( x >= xsize ) x = xsize-1; if( y < 0 ) y = 0; if( y >= ysize ) y = ysize-1; x *= SCALE; y *= SCALE; if( button == GLUT_RIGHT_BUTTON && state == GLUT_UP ) { if( status == 0 ) { arVideoCapStop(); arVideoClose(); if( patt.loop_num > 0 ) { calc_distortion( &patt, xsize, ysize, aspect_ratio, dist_factor, dist_function_version ); ARLOG("--------------\n"); if (dist_function_version == 3) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Aspect Ratio: %f\n", dist_factor[3]); ARLOG("Dist Factor1: %f\n", dist_factor[4]); ARLOG("Dist Factor2: %f\n", dist_factor[5]); } else if (dist_function_version == 2) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Dist Factor1: %f\n", dist_factor[3]); ARLOG("Dist Factor2: %f\n", dist_factor[4]); } else if (dist_function_version == 1) { ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Dist Factor: %f\n", dist_factor[3]); } ARLOG("--------------\n"); status = 2; check_num = 0; print_comment(5); } else { exit(0); } } else if( status == 1 ) { if( patt.loop_num == 0 ) {ARLOGe("error!!\n"); exit(0);} patt.loop_num--; free( patt.point[patt.loop_num] ); free( patt.savedImage[patt.loop_num] ); status = 0; point_num = 0; arVideoCapStart(); if( patt.loop_num == 0 ) print_comment(0); else print_comment(4); } } if( button == GLUT_LEFT_BUTTON && state == GLUT_DOWN ) { if( status == 1 && point_num < patt.h_num*patt.v_num ) { sx = ex = x; sy = ey = y; p = &(patt.savedImage[patt.loop_num-1][(y*xsize+x)*pixelSize]); p1 = &(clipImage[0]); if (pixelFormat == AR_PIXEL_FORMAT_BGRA || pixelFormat == AR_PIXEL_FORMAT_RGBA) { k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_ARGB || pixelFormat == AR_PIXEL_FORMAT_ABGR) { k = (255*3 - (*(p+1) + *(p+2) + *(p+3))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+1) = *(p1+2) = *(p1+3) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_BGR || pixelFormat == AR_PIXEL_FORMAT_RGB) { k = (255*3 - (*(p+0) + *(p+1) + *(p+2))) / 3; if( k < thresh ) k = 0; else k = 255; *(p1+0) = *(p1+1) = *(p1+2) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_MONO || pixelFormat == AR_PIXEL_FORMAT_420v || pixelFormat == AR_PIXEL_FORMAT_420f) { k = 255 - *p; if( k < thresh ) k = 0; else k = 255; *p1 = k; } else if (pixelFormat == AR_PIXEL_FORMAT_2vuy) { k = 255 - *(p+1); if( k < thresh ) k = 0; else k = 255; *(p1+1) = k; } else if (pixelFormat == AR_PIXEL_FORMAT_yuvs) { k = 255 - *p; if( k < thresh ) k = 0; else k = 255; *p1 = k; } } } if( button == GLUT_LEFT_BUTTON && state == GLUT_UP ) { if( status == 0 && patt.loop_num < LOOP_MAX ) { while (!(buff = arVideoGetImage()) || !buff->fillFlag) arUtilSleep(2); p = buff->buff; patt.savedImage[patt.loop_num] = (unsigned char *)malloc( xsize*ysize*pixelSize ); if( patt.savedImage[patt.loop_num] == NULL ) exit(0); p1 = patt.savedImage[patt.loop_num]; for(i=0;i<xsize*ysize*pixelSize;i++) *(p1++) = *(p++); arVideoCapStop(); patt.point[patt.loop_num] = (CALIB_COORD_T *)malloc( sizeof(CALIB_COORD_T)*patt.h_num*patt.v_num ); if( patt.point[patt.loop_num] == NULL ) exit(0); patt.loop_num++; status = 1; sx = sy = ex= ey = -1; print_comment(1); } else if( status == 1 && point_num == patt.h_num*patt.v_num ) { status = 0; point_num = 0; arVideoCapStart(); ARLOG("### No.%d ###\n", patt.loop_num); for( j = 0; j < patt.v_num; j++ ) { for( i = 0; i < patt.h_num; i++ ) { ARLOG("%2d, %2d: %6.2f, %6.2f\n", i+1, j+1, patt.point[patt.loop_num-1][j*patt.h_num+i].x_coord, patt.point[patt.loop_num-1][j*patt.h_num+i].y_coord); } } ARLOG("\n\n"); if( patt.loop_num < LOOP_MAX ) print_comment(4); else print_comment(6); } else if( status == 1 ) { if( sx < ex ) { ssx = sx; eex = ex; } else { ssx = ex; eex = sx; } if( sy < ey ) { ssy = sy; eey = ey; } else { ssy = ey; eey = sy; } patt.point[patt.loop_num-1][point_num].x_coord = 0.0; patt.point[patt.loop_num-1][point_num].y_coord = 0.0; p = clipImage; k = 0; for( j = 0; j < (eey-ssy+1); j++ ) { for( i = 0; i < (eex-ssx+1); i++ ) { if( pixelSize == 1 ) { patt.point[patt.loop_num-1][point_num].x_coord += i * *p; patt.point[patt.loop_num-1][point_num].y_coord += j * *p; k += *p; } else { patt.point[patt.loop_num-1][point_num].x_coord += i * *(p+1); patt.point[patt.loop_num-1][point_num].y_coord += j * *(p+1); k += *(p+1); } p += pixelSize; } } if( k != 0 ) { patt.point[patt.loop_num-1][point_num].x_coord /= k; patt.point[patt.loop_num-1][point_num].y_coord /= k; patt.point[patt.loop_num-1][point_num].x_coord += ssx; patt.point[patt.loop_num-1][point_num].y_coord += ssy; point_num++; } sx = sy = ex= ey = -1; ARLOG(" # %d/%d\n", point_num, patt.h_num*patt.v_num); if( point_num == patt.h_num*patt.v_num ) print_comment(2); } else if( status == 2 ) { check_num++; if( check_num == patt.loop_num ) { if(patt.loop_num >= 2) { if( calc_inp(&patt, dist_factor, xsize, ysize, mat, dist_function_version) < 0 ) { ARLOGe("Calibration failed.\n"); exit(0); } save_param(); if (dist_function_version == 3) { printf("Do you want to repeat again?"); scanf("%s", line); if( line[0] == 'y' ) { aspect_ratio *= mat[0][0] / mat[1][1]; ARLOG("New aspect ratio = %f\n", aspect_ratio); calc_distortion( &patt, xsize, ysize, aspect_ratio, dist_factor, dist_function_version ); ARLOG("--------------\n"); ARLOG("Center X: %f\n", dist_factor[0]); ARLOG(" Y: %f\n", dist_factor[1]); ARLOG("Size Adjust: %f\n", dist_factor[2]); ARLOG("Aspect Ratio: %f\n", dist_factor[3]); ARLOG("Dist Factor1: %f\n", dist_factor[4]); ARLOG("Dist Factor2: %f\n", dist_factor[5]); ARLOG("--------------\n"); status = 2; check_num = 0; print_comment(5); return; } } } exit(0); } if( check_num+1 == patt.loop_num ) { ARLOG("\nLeft Mouse Button: Next Step.\n"); } else { ARLOG(" %d/%d.\n", check_num+1, patt.loop_num); } } } return; }
//======== // 初期化 //======== bool cARTK::initialize( void ) { const char *szCameraParamFName = "Data/camera_para.dat"; #ifdef _WIN32 char *szVConfFName = "Data/WDM_camera_flipV.xml"; #else char *vconf = ""; #endif ARParam sCamParamTemp; int iCamImgSizeX, iCamImgSizeY; // カメラデバイスのオープン if( arVideoOpen( szVConfFName ) < 0 ) { ErrorMessage( "Unable to open connection to camera.\n" ); return false; } // カメラ画像のサイズを取得 if( arVideoInqSize( &iCamImgSizeX, &iCamImgSizeY ) < 0 ) return false; // カメラパラメータファイルの読込み if( arParamLoad( szCameraParamFName, 1, &sCamParamTemp ) < 0 ) { ErrorMessage( "Error loading parameter file for camera.\n" ); return false; } // カメラパラメータのサイズ部分を変更 arParamChangeSize( &sCamParamTemp, iCamImgSizeX, iCamImgSizeY, &m_sCameraParam ); // ライブラリ内のカメラパラメータの初期化 arInitCparam( &m_sCameraParam ); // NyARTransMatの初期化 m_pNyARInst = nyar_NyARTransMat_O2_create( &m_sCameraParam ); // カメラ画像のキャプチャを開始 if( arVideoCapStart() != 0 ) { ErrorMessage( "Unable to begin camera data capture.\n" ); return false; } // マーカーパターンファイルの読込み for( int i = 0 ; i < ARMK_MAXNUM ; i++ ) { if( (m_sMarkerInfo[i].iPattID = arLoadPatt( g_szPattFName[i] )) < 0 ) { ErrorMessage( "Pattern file load error !!\n" ); return false; } m_sMarkerInfo[i].dWidth = g_dPattWidth[i]; m_sMarkerInfo[i].dCenterPos[0] = m_sMarkerInfo[i].dCenterPos[1] = 0.0f; m_sMarkerInfo[i].bVisible = false; } // ちょっとずらす m_sMarkerInfo[1].dCenterPos[0] = 0.0f; m_sMarkerInfo[1].dCenterPos[1] = 30.0f; m_iThreshold = 120; m_dViewScaleFactor = 0.16; return true; }
int main(int argc, char** argv) { char glutGamemode[32]; const char *cparam_name = "Data2/camera_para.dat"; char vconf[] = ""; const char markerConfigDataFilename[] = "Data2/markers.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Library inits. // glutInit(&argc, argv); // // Video setup. // #ifdef _WIN32 CoInitialize(NULL); #endif if (!setupCamera(cparam_name, vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (!prefWindowed) { if (prefRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else sprintf(glutGamemode, "%ix%i:%i", prefWidth, prefHeight, prefDepth); glutGameModeString(glutGamemode); glutEnterGameMode(); } else { glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arUtilTimerReset(); // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }