static void nativeVideoGetCparamCallback(const ARParam *cparam_p, void *userdata) { // Load the camera parameters, resize for the window and init. ARParam cparam; if (cparam_p) cparam = *cparam_p; else { LOGE("Unable to automatically determine camera parameters. Using default.\n"); if (arParamLoad(cparaName, 1, &cparam) < 0) { LOGE("Error: Unable to load parameter file %s for camera.\n", cparaName); return; } } if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) { #ifdef DEBUG LOGI("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); #endif arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam); } #ifdef DEBUG LOGI("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((gCparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { LOGE("Error: arParamLTCreate.\n"); return; } videoInited = true; // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRHf(&gCparamLT->param, NEAR_PLANE, FAR_PLANE, cameraLens); cameraPoseValid = FALSE; if (!initNFT(gCparamLT, gPixFormat)) { LOGE("Error initialising NFT.\n"); arParamLTFree(&gCparamLT); return; } // Marker data has already been loaded, so now load NFT data on a second thread. nftDataLoadingThreadHandle = threadInit(0, NULL, loadNFTDataAsync); if (!nftDataLoadingThreadHandle) { LOGE("Error starting NFT loading thread.\n"); arParamLTFree(&gCparamLT); return; } threadStartSignal(nftDataLoadingThreadHandle); }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } return (TRUE); }
int main(int argc, char *argv[]) { ARParam cparam; //ARParamLT *cparamLT; float trans[3][4]; float pos[2]; float dpi[2]; //char name[1024], ext[1024]; int i, j; float z; init(argc, argv); if (!cpara) cpara = cparaDefault; //ar2UtilDivideExt( cpara, name, ext ); // Load the camera parameters, resize for the window and init. //if( arParamLoad(name, ext, 1, &cparam) < 0 ) if (arParamLoad(cpara, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cpara); exit(-1); } if (xsize != -1 && ysize != -1 && (cparam.xsize != xsize || cparam.ysize != ysize)) { ARLOG("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); //if ((cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { // ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); // exit(-1); //} pos[0] = 0.0; pos[1] = 0.0; for (j = 0; j < 3; j++) for (i = 0; i < 4; i++) trans[j][i] = ((i == j) ? 1.0 : 0.0); for(i = 10; i <= 1000; i*=10 ) { for(j = 1; j < 10; j++) { z = j*i; trans[2][3] = z; ar2GetResolution2( &cparam, trans, pos, dpi ); ARLOG("Distance: %f [mm] --> Resolution = %10.5f, %10.5f [DPI]\n", z, dpi[0], dpi[1]); } } return (0); }
/* * Class: edu_dhbw_andar_ARToolkit * Method: artoolkit_init * Signature: (Ljava/lang/String;IIII)V */ JNIEXPORT void JNICALL Java_edu_dhbw_andar_ARToolkit_artoolkit_1init__Ljava_lang_String_2IIII (JNIEnv *env, jobject object, jstring calibFile, jint imageWidth, jint imageHeight, jint screenWidth, jint screenHeight) { ARParam wparam; const char *cparam_name = (*env)->GetStringUTFChars( env, calibFile, NULL ); xsize = imageHeight; ysize = imageHeight; printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ if( arParamLoad(cparam_name, 1, &wparam) < 0 ) { __android_log_write(ANDROID_LOG_ERROR,"AR native","Camera parameter load error !!"); jclass exc = (*env)->FindClass( env, "edu/dhbw/andar/exceptions/AndARRuntimeException" ); if ( exc != NULL ) (*env)->ThrowNew( env, exc, "Camera parameter load error !!" ); //exit(EXIT_FAILURE); } #ifdef DEBUG_LOGGING else { __android_log_write(ANDROID_LOG_INFO,"AR native","Camera parameter loaded successfully !!"); } #endif arParamChangeSize( &wparam, imageWidth, imageHeight, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); //initialize openGL stuff argInit( &cparam, 1.0, 0, screenWidth, screenHeight, 0 ); //gl_cpara jclass arObjectClass = (*env)->FindClass(env, "edu/dhbw/andar/ARObject"); if (arObjectClass != NULL) { jfieldID glCameraMatrixFieldID = (*env)->GetStaticFieldID(env, arObjectClass, "glCameraMatrix", "[F"); if (glCameraMatrixFieldID != NULL) { jobject glCameraMatrixObj = (*env)->GetStaticObjectField(env, arObjectClass, glCameraMatrixFieldID); if(glCameraMatrixObj != NULL) { float *glCamMatrix = (*env)->GetFloatArrayElements(env, glCameraMatrixObj, JNI_FALSE); int i=0; for(i=0; i<16; i++) glCamMatrix[i] = gl_cpara[i]; (*env)->ReleaseFloatArrayElements(env, glCameraMatrixObj, glCamMatrix, 0); } } } (*env)->ReleaseStringUTFChars( env, calibFile, cparam_name); }
static void init( void ) { ARParam wparam; /* open the video path */ if( arVideoOpen( vconf ) < 0 ) exit(0); /* find the size of the window */ if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0); printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ if( arParamLoad(cparam_name, 1, &wparam) < 0 ) { printf("Camera parameter load error !!\n"); exit(0); } arParamChangeSize( &wparam, xsize, ysize, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); /* load the paddle marker file */ if( (paddleInfo = paddleInit(paddle_name)) == NULL ) { printf("paddleInit error!!\n"); exit(0); } printf("Loaded Paddle File\n"); if( (config = arMultiReadConfigFile(config_name)) == NULL ) { printf("config data load error !!\n"); exit(0); } printf("Loaded Multi Marker File\n"); /* init items */ myListItem.itemnum=4; myListItem.item[0].pos[0]=0.;myListItem.item[0].pos[1]=0.;myListItem.item[0].onpaddle=0; myListItem.item[1].pos[0]=100.;myListItem.item[1].pos[1]=-100.;myListItem.item[1].onpaddle=0; myListItem.item[2].pos[0]=200.;myListItem.item[2].pos[1]=0.;myListItem.item[2].onpaddle=0; myListItem.item[3].pos[0]=0.;myListItem.item[3].pos[1]=0.;myListItem.item[3].onpaddle=1; /* set up the initial paddle contents */ myPaddleItem.item = 3; myPaddleItem.angle = 0.0; myPaddleItem.x = 0.0; myPaddleItem.y = 0.0; /* open the graphics window */ argInit( &cparam, 1.0, 0, 0, 0, 0 ); }
/* * Setup artoolkit */ void ARPublisher::arInit () { arInitCparam (&cam_param_); ROS_INFO ("*** Camera Parameter ***"); arParamDisp (&cam_param_); // load in the object data - trained markers and associated bitmap files if ((object = ar_object::read_ObjData (pattern_filename_, data_directory_, &objectnum)) == NULL) ROS_BREAK (); ROS_DEBUG ("Objectfile num = %d", objectnum); sz_ = cvSize (cam_param_.xsize, cam_param_.ysize); capture_ = cvCreateImage (sz_, IPL_DEPTH_8U, 4); configured_ = true; }
//initialisation artoolkit void arInit() { printf("Debut initialisation AR\n"); cout << vconf << endl; ARParam wparam; ///ouverture fenetre de dialogue de parametrage if( arVideoOpen( vconf ) < 0 ) /*exit(0)*/ printf("Connexion a la webcam impossible\n"); if( arVideoInqSize(&xsize, &ysize) < 0 ) /*exit(0)*/printf("Impossible de recuperer la taille de la video\n"); printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); ///parametrage de la camera via la fentre de dialogue precedente if( arParamLoad(cparam_name, 1, &wparam) < 0 ) { printf("Camera parameter load error !!\n"); //exit(0); } //changement des param de base par ceux choisi lors de l'affichage de la fenetre de configuration arParamChangeSize( &wparam, xsize, ysize, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); ///affichage parametres ///fin init camera ///debut init marker de detection for( int i = 0; i < 2; i++ ) { if( (object[i].patt_id=arLoadPatt(object[i].patt_name)) < 0 ) { printf("pattern load error: %s\n", object[i].patt_name); //exit(0); } } argInit( &cparam, 1.0, 0, 0, 0, 0 ); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); printf("Fin initialisation AR\n"); }
static void init( void ) { ARParam wparam; int i; /* open the video path */ if( arVideoOpen( vconf ) < 0 ) exit(0); /* find the size of the window */ if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0); printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ if( arParamLoad(cparam_name, 1, &wparam) < 0 ) { printf("Camera parameter load error !!\n"); exit(0); } arParamChangeSize( &wparam, xsize, ysize, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); /* load the paddle marker file */ if( (paddleInfo = paddleInit(paddle_name)) == NULL ) { printf("paddleInit error!!\n"); exit(0); } printf("Loaded Paddle File\n"); if( (config = arMultiReadConfigFile(config_name)) == NULL ) { printf("config data load error !!\n"); exit(0); } printf("Loaded Multi Marker File\n"); /* initialize the targets */ for (i=0;i<TARGET_NUM;i++){ myTarget[i].pos[0] = 50.0*i; myTarget[i].pos[1] = -50.0*i; myTarget[i].pos[2] = 50.0*i; myTarget[i].state = NOT_TOUCHED; } /* open the graphics window */ argInit( &cparam, 1.0, 0, 0, 0, 0 ); }
static void init( void ) { ARParam wparam; /* set the initial camera parameters */ if( arParamLoad(cparam_name, 1, &wparam) < 0 ) { printf("Camera parameter load error !!\n"); exit(0); } arParamChangeSize( &wparam, xsize, ysize, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); /* open the graphics window */ argInit( &cparam, 1.0, 0, 0, 0, 0 ); }
static int setupCamera(ARParam *cparam) { ARParam wparam; char name1[256], name2[256]; int xsize, ysize; printf("Enter camera parameter filename"); printf("(Data/camera_para.dat): "); if (fgets(name1, 256, stdin) == NULL) exit(0); if (sscanf(name1, "%s", name2) != 1) { strcpy(name2, "Data/camera_para.dat"); } // Load the camera parameters. if (arParamLoad(name2, 1, &wparam) < 0 ) { printf("Parameter load error !!\n"); return (FALSE); } // Open the video path. if (arVideoOpen(vconf) < 0) { fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoInqSize(&xsize, &ysize) < 0) return (FALSE); fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Resize for the window and init. arParamChangeSize(&wparam, xsize, ysize, cparam); fprintf(stdout, "*** Camera Parameter ***\n"); arParamDisp(cparam); arInitCparam(cparam); if (arVideoCapStart() != 0) { fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
void ARSinglePublisher::arInit () { arInitCparam (&cam_param_); ROS_INFO ("*** Camera Parameter ***"); arParamDisp (&cam_param_); // load pattern file ROS_INFO ("Loading pattern"); patt_id_ = arLoadPatt (pattern_filename_); if (patt_id_ < 0) { ROS_ERROR ("Pattern file load error: %s", pattern_filename_); ROS_BREAK (); } sz_ = cvSize (cam_param_.xsize, cam_param_.ysize); capture_ = cvCreateImage (sz_, IPL_DEPTH_8U, 4); }
void ARMultiPublisher::arInit () { arInitCparam (&cam_param_); ROS_INFO ("*** Camera Parameter ***"); arParamDisp (&cam_param_); // load in the object data - trained markers and associated bitmap files if ((object = ar_object::read_ObjData (pattern_filename_, &objectnum)) == NULL) ROS_BREAK (); ROS_DEBUG ("Objectfile num = %d", objectnum); sz_ = cvSize (cam_param_.xsize, cam_param_.ysize); #if ROS_VERSION_MINIMUM(1, 9, 0) // FIXME: Why is this not in the object cv_bridge::CvImagePtr capture_; #else // DEPRECATED: Fuerte support ends when Hydro is released capture_ = cvCreateImage (sz_, IPL_DEPTH_8U, 4); #endif }
void ARMultiPublisher::arInit() { arInitCparam(&cam_param_); ROS_INFO("Camera parameters for ARMultiPublisher are:"); arParamDisp(&cam_param_); if ((multi_marker_config_ = arMultiReadConfigFile(pattern_filename_)) == NULL) { ROS_ASSERT_MSG(false, "Could not load configurations for ARMultiPublisher."); } // load in the object data - trained markers and associated bitmap files // if ((object = ar_object::read_ObjData(pattern_filename_, &objectnum)) == NULL) // ROS_BREAK (); num_total_markers_ = multi_marker_config_->marker_num; ROS_INFO("Read >%i< objects from file.", num_total_markers_); size_ = cvSize(cam_param_.xsize, cam_param_.ysize); capture_ = cvCreateImage(size_, IPL_DEPTH_8U, 4); }
static int setupCamera(const char *cparam_name, char *vconf, ARParam *cparam) { ARParam wparam; int xsize, ysize; // Open the video path. if (arVideoOpen(vconf) < 0) { fprintf(stderr, "setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoInqSize(&xsize, &ysize) < 0) { return (FALSE); } fprintf(stdout, "Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &wparam) < 0) { fprintf(stderr, "setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); return (FALSE); } arParamChangeSize(&wparam, xsize, ysize, cparam); fprintf(stdout, "*** Camera Parameter ***\n"); arParamDisp(cparam); arInitCparam(cparam); if (arVideoCapStart() != 0) { fprintf(stderr, "setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
static void save_param(void) { char name[256]; ARParam param; int i, j; param.xsize = xsize; param.ysize = ysize; for( i = 0; i < 4; i++ ) param.dist_factor[i] = dist_factor[i]; for( j = 0; j < 3; j++ ) { for( i = 0; i < 4; i++ ) { param.mat[j][i] = mat[j][i]; } } arParamDisp( ¶m ); printf("Fielname: "); scanf( "%s", name ); arParamSave( name, 1, ¶m ); return; }
int CWebCam::SetupWebCam(const char *cparam_names, char *vconfs) { int xsize, ysize; ARParam wparam; if((ARTVideo = ar2VideoOpen(vconfs)) == 0) return(0); if(ar2VideoInqSize(ARTVideo, &xsize, &ysize) < 0) return(0); if(arParamLoad(cparam_names, 1, &wparam) < 0) return(0); arParamChangeSize(&wparam, xsize, ysize, &ARTCparam); arInitCparam(&ARTCparam); arParamDisp(&ARTCparam); ARTThreshhold = 100; arglCameraFrustumRH(&ARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, projectionMat); if(ar2VideoCapStart(ARTVideo) != 0) return(0); ar2VideoCapNext(ARTVideo); return(1); }
/* set up the application parameters - read in from command line*/ static int init(void) { char cparaname[256]; char pattname[256]; ARParam wparam; strcpy( cparaname, "Data/camera_para.dat" ); strcpy( pattname, "Data/patt.hiro" ); /* open the video path */ if( arVideoOpen( vconf ) < 0 ) exit(0); /* find the size of the window */ if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0); printf("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ if( arParamLoad(cparaname, 1, &wparam) < 0 ) { printf("Camera parameter load error !!\n"); exit(0); } arParamChangeSize( &wparam, xsize, ysize, &cparam ); arInitCparam( &cparam ); printf("*** Camera Parameter ***\n"); arParamDisp( &cparam ); /* open the graphics window */ argInit( &cparam, 2.0, 0, 2, 1, 0 ); if( (target_id = arLoadPatt(pattname)) < 0 ) { printf("Target pattern load error!!\n"); exit(0); } arDebug = 0; return 0; }
static void save_param(void) { ARParam param; int i, j; param.xsize = xsize; param.ysize = ysize; for( i = 0; i < AR_DIST_FACTOR_NUM_MAX; i++ ) param.dist_factor[i] = dist_factor[i]; for( j = 0; j < 3; j++ ) { for( i = 0; i < 4; i++ ) { param.mat[j][i] = mat[j][i]; } } param.dist_function_version = dist_function_version; arParamDisp( ¶m ); if( save_filename[0] == '\0' ) { printf("Filename: "); scanf( "%s", save_filename ); } arParamSave( save_filename, 1, ¶m ); return; }
static void init(int argc, char *argv[]) { char *vconfL = NULL; char *vconfR = NULL; char *cparaL = NULL; char *cparaR = NULL; char cparaLDefault[] = "Data/cparaL.dat"; char cparaRDefault[] = "Data/cparaR.dat"; ARParam wparam; ARGViewport viewport; int i, j; int gotTwoPartOption; int screenWidth, screenHeight, screenMargin; double wscalef, hscalef, scalef; chessboardCornerNumX = 0; chessboardCornerNumY = 0; calibImageNum = 0; patternWidth = 0.0f; i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconfL") == 0) { i++; vconfL = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--vconfR") == 0) { i++; vconfR = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cparaL") == 0) { i++; cparaL = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cparaR") == 0) { i++; cparaR = argv[i]; gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if( strncmp(argv[i], "-cornerx=", 9) == 0 ) { if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumX) != 1 ) usage(argv[0]); if( chessboardCornerNumX <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-cornery=", 9) == 0 ) { if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumY) != 1 ) usage(argv[0]); if( chessboardCornerNumY <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-imagenum=", 10) == 0 ) { if( sscanf(&(argv[i][10]), "%d", &calibImageNum) != 1 ) usage(argv[0]); if( calibImageNum <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-pattwidth=", 11) == 0 ) { if( sscanf(&(argv[i][11]), "%f", &patternWidth) != 1 ) usage(argv[0]); if( patternWidth <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-cparaL=", 8) == 0 ) { cparaL = &(argv[i][8]); } else if( strncmp(argv[i], "-cparaR=", 8) == 0 ) { cparaR = &(argv[i][8]); } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } if( chessboardCornerNumX == 0 ) chessboardCornerNumX = CHESSBOARD_CORNER_NUM_X; if( chessboardCornerNumY == 0 ) chessboardCornerNumY = CHESSBOARD_CORNER_NUM_Y; if( calibImageNum == 0 ) calibImageNum = CALIB_IMAGE_NUM; if( patternWidth == 0.0f ) patternWidth = (float)CHESSBOARD_PATTERN_WIDTH; if (!cparaL) cparaL = cparaLDefault; if (!cparaR) cparaR = cparaRDefault; ARLOG("CHESSBOARD_CORNER_NUM_X = %d\n", chessboardCornerNumX); ARLOG("CHESSBOARD_CORNER_NUM_Y = %d\n", chessboardCornerNumY); ARLOG("CHESSBOARD_PATTERN_WIDTH = %f\n", patternWidth); ARLOG("CALIB_IMAGE_NUM = %d\n", calibImageNum); ARLOG("Video parameter Left : %s\n", vconfL); ARLOG("Video parameter Right: %s\n", vconfR); ARLOG("Camera parameter Left : %s\n", cparaL); ARLOG("Camera parameter Right: %s\n", cparaR); if( (vidL=ar2VideoOpen(vconfL)) == NULL ) { ARLOGe("Cannot found the first camera.\n"); exit(0); } if( (vidR=ar2VideoOpen(vconfR)) == NULL ) { ARLOGe("Cannot found the second camera.\n"); exit(0); } if( ar2VideoGetSize(vidL, &xsizeL, &ysizeL) < 0 ) exit(0); if( ar2VideoGetSize(vidR, &xsizeR, &ysizeR) < 0 ) exit(0); if( (pixFormatL=ar2VideoGetPixelFormat(vidL)) < 0 ) exit(0); if( (pixFormatR=ar2VideoGetPixelFormat(vidR)) < 0 ) exit(0); ARLOG("Image size for the left camera = (%d,%d)\n", xsizeL, ysizeL); ARLOG("Image size for the right camera = (%d,%d)\n", xsizeR, ysizeR); if( arParamLoad(cparaL, 1, &wparam) < 0 ) { ARLOGe("Camera parameter load error !! %s\n", cparaL); exit(0); } arParamChangeSize( &wparam, xsizeL, ysizeL, ¶mL ); ARLOG("*** Camera Parameter for the left camera ***\n"); arParamDisp( ¶mL ); if( arParamLoad(cparaR, 1, &wparam) < 0 ) { ARLOGe("Camera parameter load error !! %s\n", cparaR); exit(0); } arParamChangeSize( &wparam, xsizeR, ysizeR, ¶mR ); ARLOG("*** Camera Parameter for the right camera ***\n"); arParamDisp( ¶mR ); screenWidth = glutGet(GLUT_SCREEN_WIDTH); screenHeight = glutGet(GLUT_SCREEN_HEIGHT); if (screenWidth > 0 && screenHeight > 0) { screenMargin = (int)(MAX(screenWidth, screenHeight) * SCREEN_SIZE_MARGIN); if ((screenWidth - screenMargin) < (xsizeL + xsizeR) || (screenHeight - screenMargin) < MAX(ysizeL, ysizeR)) { wscalef = (double)(screenWidth - screenMargin) / (double)(xsizeL + xsizeR); hscalef = (double)(screenHeight - screenMargin) / (double)MAX(ysizeL, ysizeR); scalef = MIN(wscalef, hscalef); ARLOG("Scaling %dx%d window by %0.3f to fit onto %dx%d screen (with %2.0f%% margin).\n", xsizeL + xsizeR, MAX(ysizeL, ysizeR), scalef, screenWidth, screenHeight, SCREEN_SIZE_MARGIN*100.0); } else { scalef = 1.0; } } else { scalef = 1.0; } /* open the graphics window */ if( argCreateWindow((int)((xsizeL + xsizeR)*scalef), (int)(MAX(ysizeL, ysizeR)*scalef)) < 0 ) { ARLOGe("Error: argCreateWindow.\n"); exit(0); } viewport.sx = 0; viewport.sy = 0; viewport.xsize = (int)(xsizeL*scalef); viewport.ysize = (int)(ysizeL*scalef); if( (vpL=argCreateViewport(&viewport)) == NULL ) { ARLOGe("Error: argCreateViewport.\n"); exit(0); } viewport.sx = (int)(xsizeL*scalef); viewport.sy = 0; viewport.xsize = (int)(xsizeR*scalef); viewport.ysize = (int)(ysizeR*scalef); if( (vpR=argCreateViewport(&viewport)) == NULL ) { ARLOGe("Error: argCreateViewport.\n"); exit(0); } argViewportSetPixFormat( vpL, pixFormatL ); argViewportSetPixFormat( vpR, pixFormatR ); argViewportSetCparam( vpL, ¶mL ); argViewportSetCparam( vpR, ¶mR ); argViewportSetDispMethod( vpL, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); argViewportSetDispMethod( vpR, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); argViewportSetDispMode(vpL, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO); argViewportSetDispMode(vpR, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO); calibImageL = cvCreateImage( cvSize(xsizeL, ysizeL), IPL_DEPTH_8U, 1); calibImageR = cvCreateImage( cvSize(xsizeR, ysizeR), IPL_DEPTH_8U, 1); arMalloc(cornersL, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY); arMalloc(cornersR, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY); arMalloc(worldCoord, ICP3DCoordT, chessboardCornerNumX*chessboardCornerNumY); for( i = 0; i < chessboardCornerNumX; i++ ) { for( j = 0; j < chessboardCornerNumY; j++ ) { worldCoord[i*chessboardCornerNumY+j].x = patternWidth*i; worldCoord[i*chessboardCornerNumY+j].y = patternWidth*j; worldCoord[i*chessboardCornerNumY+j].z = 0.0; } } arMalloc(calibData, ICPCalibDataT, calibImageNum); for( i = 0; i < calibImageNum; i++ ) { arMalloc(calibData[i].screenCoordL, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY); arMalloc(calibData[i].screenCoordR, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY); calibData[i].worldCoordL = worldCoord; calibData[i].worldCoordR = worldCoord; calibData[i].numL = chessboardCornerNumX*chessboardCornerNumY; calibData[i].numR = chessboardCornerNumX*chessboardCornerNumY; } return; }
static void init(int argc, char *argv[]) { ARParam cparam; ARGViewport viewport; ARPattHandle *arPattHandle; char vconf[512]; AR_PIXEL_FORMAT pixFormat; ARUint32 id0, id1; int i; if (argc == 1) vconf[0] = '\0'; else { strcpy(vconf, argv[1]); for (i = 2; i < argc; i++) { strcat(vconf, " "); strcat(vconf, argv[i]); } } /* open the video path */ if (arVideoOpen(vconf) < 0) exit(0); if (arVideoGetSize(&xsize, &ysize) < 0) exit(0); ARLOG("Image size (x,y) = (%d,%d)\n", xsize, ysize); if ((pixFormat = arVideoGetPixelFormat()) < 0) exit(0); if (arVideoGetId(&id0, &id1) == 0) { ARLOG("Camera ID = (%08x, %08x)\n", id1, id0); sprintf(vconf, VPARA_NAME, id1, id0); if (arVideoLoadParam(vconf) < 0) { ARLOGe("No camera setting data!!\n"); } } /* set the initial camera parameters */ if (arParamLoad(CPARA_NAME, 1, &cparam) < 0) { ARLOGe("Camera parameter load error !!\n"); exit(0); } arParamChangeSize(&cparam, xsize, ysize, &cparam); ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); if ((gCparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("Error: arParamLTCreate.\n"); exit(-1); } if ((arHandle = arCreateHandle(gCparamLT)) == NULL) { ARLOGe("Error: arCreateHandle.\n"); exit(0); } if (arSetPixelFormat(arHandle, pixFormat) < 0) { ARLOGe("Error: arSetPixelFormat.\n"); exit(0); } if ((ar3DHandle = ar3DCreateHandle(&cparam)) == NULL) { ARLOGe("Error: ar3DCreateHandle.\n"); exit(0); } if ((arPattHandle = arPattCreateHandle()) == NULL) { ARLOGe("Error: arPattCreateHandle.\n"); exit(0); } if ((patt_id = arPattLoad(arPattHandle, PATT_NAME)) < 0) { ARLOGe("pattern load error !!\n"); exit(0); } arPattAttach(arHandle, arPattHandle); /* open the graphics window */ w1 = argCreateWindow(xsize, ysize); viewport.sx = 0; viewport.sy = 0; viewport.xsize = xsize; viewport.ysize = ysize; if ((vp1 = argCreateViewport(&viewport)) == NULL) exit(0); argViewportSetCparam(vp1, &cparam); argViewportSetPixFormat(vp1, pixFormat); argViewportSetDispMethod(vp1, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME); argViewportSetDispMode(vp1, AR_GL_DISP_MODE_FIT_TO_VIEWPORT); argViewportSetDistortionMode(vp1, AR_GL_DISTORTION_COMPENSATE_ENABLE); w2 = argCreateWindow(xsize, ysize); viewport.sx = 0; viewport.sy = 0; viewport.xsize = xsize; viewport.ysize = ysize; if ((vp2 = argCreateViewport(&viewport)) == NULL) exit(0); argViewportSetCparam(vp2, &cparam); argViewportSetPixFormat(vp2, pixFormat); argViewportSetDispMethod(vp2, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME); argViewportSetDispMode(vp2, AR_GL_DISP_MODE_FIT_TO_VIEWPORT); argViewportSetDistortionMode(vp2, AR_GL_DISTORTION_COMPENSATE_DISABLE); return; }
bool ARToolKit4NFTTracker::init(int xsize,int ysize, const std::string& pattlist_name,const std::string& camera_name) { ARParam wparam; // Set the initial camera parameters. if(arParamLoad((char*)camera_name.c_str(), 1, &wparam) < 0) { std::cerr << "ERROR: Camera parameter load error." << std::endl; return false; } arParamChangeSize(&wparam, xsize, ysize, &cparam); std::cout << "*** Camera Parameter ***" << std::endl; arParamDisp( &cparam ); if( (arHandle = arCreateHandle(&cparam)) == NULL ) { std::cerr << "ERROR: arCreateHandle." << std::endl; return false; } int pixFormat = AR_PIXEL_FORMAT_BGRA; if( arSetPixelFormat(arHandle, pixFormat) < 0 ) { std::cerr << "Error: arSetPixelFormat." << std::endl; return false; } if( arSetDebugMode(arHandle, AR_DEBUG_DISABLE) < 0 ) { std::cerr << "Error: arSetDebugMode." << std::endl; return false; } if( arSetLabelingThresh(arHandle,threshold) < 0 ) { std::cerr << "Error: arSetLabelingThresh." << std::endl; return false; } arSetMarkerExtractionMode( arHandle, AR_NOUSE_TRACKING_HISTORY ); if( (ar3DHandle=ar3DCreateHandle(&cparam)) == NULL ) { std::cerr << "Error: ar3DCreateHandle." << std::endl; return false; } if( (arPattHandle=arPattCreateHandle()) == NULL ) { std::cerr << "Error: arPattCreateHandle." << std::endl; return false; } setProjection(10.0f, 10000.0f); //INIT NFT int matchingImageMode = AR2_MATCHING_FRAME_IMAGE; int matchingMethod = AR2_MATCHING_FINE; int debugMode = 0; ar2Handle = ar2CreateHandle( &cparam, pixFormat ); ar2ChangeMacthingImageMode( ar2Handle, matchingImageMode ); ar2ChangeMacthingMethod( ar2Handle, matchingMethod ); ar2ChangeDebugMode( ar2Handle, debugMode ); //arFittingMode = AR_FITTING_TO_IDEAL; //arImageProcMode = AR_IMAGE_PROC_IN_FULL; if (!setupMarkers(pattlist_name)) { std::cerr << "ERROR: Marker setup failed." << std::endl; return false; } return true; }
bool BazARTracker::init(int xsize, int ysize, const std::string& bazar_config_name, const std::string& camera_name) { /*OSGART*/ ARParam wparam; // Set the initial camera parameters. cparamName = camera_name; if(arParamLoad((char*)cparamName.c_str(), 1, &wparam) < 0) { std::cerr << "ERROR: Camera parameter load error." << std::endl; return false; } arParamChangeSize(&wparam, xsize, ysize,&(m_cparam->cparam)); arInitCparam(&(m_cparam->cparam)); arParamDisp(&(m_cparam->cparam)); /*BAZAR*/ // load BazAR's configuration files and detector parameters if (!loadBazARConfig(bazar_config_name, &bazconf)) exit(0); // load BazAR's camera calibration file if (!loadBazARCamParams((char*)(bazconf.camCalFileName),&bazconf)) exit(0); // init bazar tracker matCameraRT4_4 = cvCreateMat(4, 4, CV_64F); //64 bit double precision float g_matIntrinsic = cvCreateMat(3, 3, CV_64F); // output windows if (getDebugMode()){ cvNamedWindow("Gray", CV_WINDOW_AUTOSIZE); cvNamedWindow("Result_BAZAR", CV_WINDOW_AUTOSIZE); } // fine tuning for accuracy - careful!! detector.ransac_dist_threshold = (float)bazconf.ransac_dist_threshold; detector.max_ransac_iterations = (float)bazconf.max_ransac_iterations; detector.non_linear_refine_threshold = (float)bazconf.non_linear_refine_threshold; detector.match_score_threshold = (float)bazconf.match_score_threshold; // A lower threshold will allow detection in harder conditions, but // might lead to false positives // Train or load classifier if(!detector.build_with_cache( // hse25: no hard coded stuff :) -- jaja (char*)(bazconf.modelFileName), // mode image file name 400, // maximum number of keypoints on the model 32, // patch size in pixels 3, // yape radius. Use 3,5 or 7. 16, // number of trees for the classifier. Somewhere between 12-50 3 // number of levels in the gaussian pyramid )) { cerr << "BazARTracker: Unable to load the model image " << (char*)(bazconf.modelFileName) <<" or its classifier.\n"; return false; } // set camera parameters for BAZAR char *camCal = (char*)(bazconf.camCalFileName); char *camExt = (char*)(bazconf.camExtFileName); if(!augment.LoadOptimalStructureFromFile(camCal, camExt)) { std::cerr << "BazARTracker: couldn't load camera parameters: " << camCal << " " << camExt << std::endl; return false; } // image buffers needed for detection, conversion.. image = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 4); // captured image gray = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 1); // detector input display = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 4); // debug // use bazar's camera calibration m_cparam->cparam.mat[0][0] = bazconf.camCalMatrix[0][0]; m_cparam->cparam.mat[0][1] = bazconf.camCalMatrix[0][1]; m_cparam->cparam.mat[0][2] = bazconf.camCalMatrix[0][2]; m_cparam->cparam.mat[1][0] = bazconf.camCalMatrix[1][0]; m_cparam->cparam.mat[1][1] = bazconf.camCalMatrix[1][1]; m_cparam->cparam.mat[1][2] = bazconf.camCalMatrix[1][2]; m_cparam->cparam.mat[2][0] = bazconf.camCalMatrix[2][0]; m_cparam->cparam.mat[2][1] = bazconf.camCalMatrix[2][1]; m_cparam->cparam.mat[2][2] = bazconf.camCalMatrix[2][2]; for(int i=0; i<3; i++) { for(int j=0; j<3; j++) { cvmSet(g_matIntrinsic, i, j, m_cparam->cparam.mat[i][j]); } } // end BAZAR setProjection(10.0f, 8000.0f); setDebugMode(m_debugmode); setupMarkers(); // Success return true; }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) { ARLOGe("setupCamera(): Error: arCreateHandle.\n"); return (FALSE); } if (arSetPixelFormat(*arhandle, pixFormat) < 0) { ARLOGe("setupCamera(): Error: arSetPixelFormat.\n"); return (FALSE); } if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < 0) { ARLOGe("setupCamera(): Error: arSetDebugMode.\n"); return (FALSE); } if (arSetImageProcMode(*arhandle, AR_IMAGE_PROC_FRAME_IMAGE) < 0) { // Change to AR_IMAGE_PROC_FIELD_IMAGE if using a DVCam. ARLOGe("setupCamera(): Error: arSetImageProcMode.\n"); return (FALSE); } if ((*ar3dhandle = ar3DCreateHandle(&(*cparamLT_p)->param)) == NULL) { ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n"); return (FALSE); } if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
static void init( void ) { ARParam cparam; ARParamLT *cparamLT; ARGViewport viewport; xsize = 640; ysize = 480; ARLOGi("Image size (x,y) = (%d,%d)\n", xsize, ysize); /* set the initial camera parameters */ arParamClear(&cparam, xsize, ysize, AR_DIST_FUNCTION_VERSION_DEFAULT); ARLOG("*** Camera Parameter ***\n"); arParamDisp( &cparam ); //COVHI10445 ignored as false positive, i.e. cparam->m[3][4] uninitialized. cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET); arHandle = arCreateHandle(cparamLT); if( arHandle == NULL ) { ARLOGe("Error: arCreateHandle.\n"); exit(0); } arSetPixelFormat( arHandle, PIXEL_FORMAT ); arSetLabelingMode( arHandle, AR_LABELING_BLACK_REGION ); arSetImageProcMode( arHandle, AR_IMAGE_PROC_FRAME_IMAGE ); ar3DHandle = ar3DCreateHandle( &cparam ); if( ar3DHandle == NULL ) { ARLOGe("Error: ar3DCreateHandle.\n"); exit(0); } /* open the graphics window */ viewport.sx = 0; viewport.sy = 0; viewport.xsize = xsize; viewport.ysize = ysize; vp = argCreateViewport( &viewport ); if( vp == NULL ) exit(0); argViewportSetCparam( vp, &cparam ); argViewportSetPixFormat( vp, PIXEL_FORMAT ); argViewportSetDispMode( vp, AR_GL_DISP_MODE_FIT_TO_VIEWPORT ); argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_GL_DRAW_PIXELS ); //argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); //argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FIELD ); argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_DISABLE ); //argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_ENABLE ); #if 0 if( argSetFullScreenConfig("1024x768") == 0 ) { ARLOGe("Full screen is not possible.\n"); exit(0); } //argGetWindowSizeFullScreen( &viewport.xsize, &viewport.ysize ); viewport.sx = 0; viewport.sy = 0; viewport.xsize = 1024; viewport.ysize = 768; argViewportSetViewportFullScreen( vpL, &viewport ); viewport.sx = 1024; argViewportSetViewportFullScreen( vpR, &viewport ); #endif }
static void nativeVideoGetCparamCallback(const ARParam *cparam_p, void *userdata) { // Load the camera parameters, resize for the window and init. ARParam cparam; if (cparam_p) cparam = *cparam_p; else { LOGE("Unable to automatically determine camera parameters. Using default.\n"); if (arParamLoad(cparaName, 1, &cparam) < 0) { LOGE("Error: Unable to load parameter file %s for camera.\n", cparaName); return; } } if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) { #ifdef DEBUG LOGI("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); #endif arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam); } #ifdef DEBUG LOGI("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((gCparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { LOGE("Error: arParamLTCreate.\n"); return; } videoInited = true; // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRHf(&gCparamLT->param, NEAR_PLANE, FAR_PLANE, cameraLens); cameraPoseValid = FALSE; // Init AR. arHandle = arCreateHandle(gCparamLT); if (arHandle == NULL) { LOGE("Error creating AR handle"); return; } arPattAttach(arHandle, arPattHandle); if (arSetPixelFormat(arHandle, gPixFormat) < 0) { LOGE("Error setting pixel format"); return; } ar3DHandle = ar3DCreateHandle(&gCparamLT->param); if (ar3DHandle == NULL) { LOGE("Error creating 3D handle"); return; } // Other ARToolKit setup. arSetMarkerExtractionMode(arHandle, AR_USE_TRACKING_HISTORY_V2); //arSetMarkerExtractionMode(arHandle, AR_NOUSE_TRACKING_HISTORY); //arSetLabelingThreshMode(arHandle, AR_LABELING_THRESH_MODE_MANUAL); // Uncomment to use manual thresholding. // Set the pattern detection mode (template (pictorial) vs. matrix (barcode) based on // the marker types as defined in the marker config. file. arSetPatternDetectionMode(arHandle, arPattDetectionMode); // Default = AR_TEMPLATE_MATCHING_COLOR // Other application-wide marker options. Once set, these apply to all markers in use in the application. // If you are using standard ARToolKit picture (template) markers, leave commented to use the defaults. // If you are usign a different marker design (see http://www.artoolworks.com/support/app/marker.php ) // then uncomment and edit as instructed by the marker design application. //arSetLabelingMode(arHandle, AR_LABELING_BLACK_REGION); // Default = AR_LABELING_BLACK_REGION //arSetBorderSize(arHandle, 0.25f); // Default = 0.25f //arSetMatrixCodeType(arHandle, AR_MATRIX_CODE_3x3); // Default = AR_MATRIX_CODE_3x3 }
/* * Class: com_clab_artoolkit_port_JARToolkit * Method: JARParamDisplay * Signature: ()V */ JNIEXPORT void JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_paramDisplay(JNIEnv *, jobject) { arParamDisp( cparam ); }
static void calib(void) { ARParam param; CvMat *objectPoints; CvMat *imagePoints; CvMat *pointCounts; CvMat *intrinsics; CvMat *distortionCoeff; CvMat *rotationVectors; CvMat *translationVectors; CvMat *rotationVector; CvMat *rotationMatrix; float intr[3][4]; float dist[4]; ARdouble trans[3][4]; ARdouble cx, cy, cz, hx, hy, h, sx, sy, ox, oy, err; int i, j, k, l; objectPoints = cvCreateMat(capturedImageNum * chessboardCornerNumX * chessboardCornerNumY, 3, CV_32FC1); imagePoints = cvCreateMat(capturedImageNum * chessboardCornerNumX * chessboardCornerNumY, 2, CV_32FC1); pointCounts = cvCreateMat(capturedImageNum, 1, CV_32SC1); intrinsics = cvCreateMat(3, 3, CV_32FC1); distortionCoeff = cvCreateMat(1, 4, CV_32FC1); rotationVectors = cvCreateMat(capturedImageNum, 3, CV_32FC1); translationVectors = cvCreateMat(capturedImageNum, 3, CV_32FC1); rotationVector = cvCreateMat(1, 3, CV_32FC1); rotationMatrix = cvCreateMat(3, 3, CV_32FC1); l = 0; for (k = 0; k < capturedImageNum; k++) { for (i = 0; i < chessboardCornerNumX; i++) { for (j = 0; j < chessboardCornerNumY; j++) { ((float*)(objectPoints->data.ptr + objectPoints->step * l))[0] = patternWidth * i; ((float*)(objectPoints->data.ptr + objectPoints->step * l))[1] = patternWidth * j; ((float*)(objectPoints->data.ptr + objectPoints->step * l))[2] = 0.0f; ((float*)(imagePoints->data.ptr + imagePoints->step * l))[0] = cornerSet[l].x; ((float*)(imagePoints->data.ptr + imagePoints->step * l))[1] = cornerSet[l].y; l++; } } ((int*)(pointCounts->data.ptr))[k] = chessboardCornerNumX * chessboardCornerNumY; } cvCalibrateCamera2(objectPoints, imagePoints, pointCounts, cvSize(xsize, ysize), intrinsics, distortionCoeff, rotationVectors, translationVectors, 0); for (j = 0; j < 3; j++) { for (i = 0; i < 3; i++) { intr[j][i] = ((float*)(intrinsics->data.ptr + intrinsics->step * j))[i]; } intr[j][3] = 0.0f; } for (i = 0; i < 4; i++) { dist[i] = ((float*)(distortionCoeff->data.ptr))[i]; } convParam(intr, dist, xsize, ysize, ¶m); // COVHI10434 ignored. arParamDisp(¶m); l = 0; for (k = 0; k < capturedImageNum; k++) { for (i = 0; i < 3; i++) { ((float*)(rotationVector->data.ptr))[i] = ((float*)(rotationVectors->data.ptr + rotationVectors->step * k))[i]; } cvRodrigues2(rotationVector, rotationMatrix); for (j = 0; j < 3; j++) { for (i = 0; i < 3; i++) { trans[j][i] = ((float*)(rotationMatrix->data.ptr + rotationMatrix->step * j))[i]; } trans[j][3] = ((float*)(translationVectors->data.ptr + translationVectors->step * k))[j]; } // arParamDispExt(trans); err = 0.0; for (i = 0; i < chessboardCornerNumX; i++) { for (j = 0; j < chessboardCornerNumY; j++) { cx = trans[0][0] * patternWidth * i + trans[0][1] * patternWidth * j + trans[0][3]; cy = trans[1][0] * patternWidth * i + trans[1][1] * patternWidth * j + trans[1][3]; cz = trans[2][0] * patternWidth * i + trans[2][1] * patternWidth * j + trans[2][3]; hx = param.mat[0][0] * cx + param.mat[0][1] * cy + param.mat[0][2] * cz + param.mat[0][3]; hy = param.mat[1][0] * cx + param.mat[1][1] * cy + param.mat[1][2] * cz + param.mat[1][3]; h = param.mat[2][0] * cx + param.mat[2][1] * cy + param.mat[2][2] * cz + param.mat[2][3]; if (h == 0.0) continue; sx = hx / h; sy = hy / h; arParamIdeal2Observ(param.dist_factor, sx, sy, &ox, &oy, param.dist_function_version); sx = ((float*)(imagePoints->data.ptr + imagePoints->step * l))[0]; sy = ((float*)(imagePoints->data.ptr + imagePoints->step * l))[1]; err += (ox - sx) * (ox - sx) + (oy - sy) * (oy - sy); l++; } } err = sqrt(err / (chessboardCornerNumX * chessboardCornerNumY)); ARLOG("Err[%2d]: %f[pixel]\n", k + 1, err); } saveParam(¶m); cvReleaseMat(&objectPoints); cvReleaseMat(&imagePoints); cvReleaseMat(&pointCounts); cvReleaseMat(&intrinsics); cvReleaseMat(&distortionCoeff); cvReleaseMat(&rotationVectors); cvReleaseMat(&translationVectors); cvReleaseMat(&rotationVector); cvReleaseMat(&rotationMatrix); }