int main(int argc, char *argv[]) { // ---------------------------------------------------------------------------- // Library inits. // glutInit(&argc, argv); // ---------------------------------------------------------------------------- // Hardware setup. // if (!setupCamera(&gARTCparam)) { fprintf(stderr, "main(): Unable to set up AR camera.\n"); exit(-1); } // ---------------------------------------------------------------------------- // Library setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE); glutInitWindowSize(gARTCparam.xsize, gARTCparam.ysize); glutCreateWindow(argv[0]); // Setup argl library for current context. if ((gArglSettings = arglSetupForCurrentContext()) == NULL) { fprintf(stderr, "main(): arglSetupForCurrentContext() returned error.\n"); exit(-1); } arMalloc(gARTsaveImage, ARUint8, gARTCparam.xsize * gARTCparam.ysize * AR_PIX_SIZE_DEFAULT); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMouseFunc(Mouse); glutMainLoop(); return (0); }
int arSetDebugMode( ARHandle *handle, int mode ) { if( handle == NULL ) return -1; if (handle->arDebug != mode) { handle->arDebug = mode; #if !AR_DISABLE_LABELING_DEBUG_MODE if (mode == AR_DEBUG_DISABLE) { free(handle->labelInfo.bwImage); handle->labelInfo.bwImage = NULL; } else { arMalloc(handle->labelInfo.bwImage, ARUint8, handle->xsize * handle->ysize); } #endif } return 0; }
static void defocus_image( ARUint8 *img, int xsize, int ysize, int n ) { ARUint8 *wimg; int isize; ARUint8 *p1, *p2; int i, j, k, w; isize = xsize * ysize; arMalloc( wimg, ARUint8, xsize*ysize ); for( k = 0; k < n; k++ ) { if( k%2 == 0 ) { p1 = img; p2 = wimg; } else { p1 = wimg; p2 = img; } for( j = 0; j < ysize; j++ ) { for( i = 0; i < xsize; i++ ) { if( i == 0 || j == 0 || i == xsize-1 || j == ysize-1 ) { *(p2++) = *(p1++); continue; } w = *(p1-xsize-1) + *(p1-xsize) + *(p1-xsize+1) + *(p1-1) + *(p1) + *(p1+1) + *(p1+xsize-1) + *(p1+xsize) + *(p1+xsize+1); *(p2++) = w / 9; p1++; } } } if( n%2 == 1 ) { p1 = wimg; p2 = img; for( i = 0; i < xsize*ysize; i++ ) *(p2++) = *(p1++); } free(wimg); return; }
AR2ImageSetT *ar2GenImageSet( ARUint8 *image, int xsize, int ysize, int nc, float dpi, float dpi_list[], int dpi_num ) { AR2ImageSetT *imageSet; int i; if( nc != 1 && nc != 3 ) return NULL; if( dpi_num <= 0 ) return NULL; if( dpi_list[0] > dpi ) return NULL; for( i = 1; i < dpi_num; i++ ) { if( dpi_list[i] > dpi_list[0] ) return NULL; } arMalloc( imageSet, AR2ImageSetT, 1 ); imageSet->num = dpi_num; arMalloc( imageSet->scale, AR2ImageT*, imageSet->num ); imageSet->scale[0] = ar2GenImageLayer1( image, xsize, ysize, nc, dpi, dpi_list[0] ); for( i = 1; i < dpi_num; i++ ) { imageSet->scale[i] = ar2GenImageLayer2( imageSet->scale[0], dpi_list[i] ); } return imageSet; }
static GLuint argGenImageTexture(int tx, int ty, int full_half, GLenum target) { static ARUint8 *tmpImage = NULL; GLuint glid; glGenTextures(1, &glid); glBindTexture(target, glid); glTexParameterf(target, GL_TEXTURE_WRAP_S, GL_CLAMP); glTexParameterf(target, GL_TEXTURE_WRAP_T, GL_CLAMP); // glTexParameterf( target, GL_TEXTURE_MAG_FILTER, GL_NEAREST ); glTexParameterf(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); if (full_half == 0) { ty /= 2; } #ifdef __APPLE__ // glTexParameterf(target, GL_TEXTURE_PRIORITY, 0.0); // glTexParameteri(target, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_CACHED_APPLE); // glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, 1); #endif if (tmpImage != NULL) free(tmpImage); arMalloc(tmpImage, ARUint8, tx * ty); glTexImage2D(target, 0, 3, tx, ty, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, tmpImage); glBindTexture(target, 0); return glid; }
AR2VideoParamT *ar2VideoOpen( char *config ) { AR2VideoParamT *vid; char *a, line[256]; int i; arMalloc( vid, AR2VideoParamT, 1 ); vid->did = 0; vid->format = AR_VIDEO_INTERLEAVED; vid->packing = AR_VIDEO_RGB_8; if( DEFAULT_VIDEO_SIZE == VIDEO_FULL ) { vid->zoom = AR_VIDEO_1_P_1; } else { vid->zoom = AR_VIDEO_1_P_2; } vid->buf_size = -1; a = config; if( a != NULL) { for(;;) { while( *a == ' ' || *a == '\t' ) a++; if( *a == '\0' ) break; if( strncmp( a, "-size=", 6 ) == 0 ) { if( strncmp( &a[6], "FULL", 4 ) == 0 ) vid->zoom = AR_VIDEO_1_P_1; else if( strncmp( &a[6], "HALF", 4 ) == 0 ) vid->zoom = AR_VIDEO_1_P_2; else { arVideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-device=", 8 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[8], "%d", &vid->did ) == 0 ) { arVideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-bufsize=", 9 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[9], "%d", &vid->buf_size ) == 0 ) { arVideoDispOption(); free( vid ); return 0; } } else { arVideoDispOption(); free( vid ); return 0; } while( *a != ' ' && *a != '\t' && *a != '\0') a++; } } if( init == 0 ) { if( arVideoOpen2() < 0 ) { free( vid ); return 0; } init = 1; } if( arVideoSetupDevice2(vid->did, vid->format, vid->packing, vid->zoom) < 0 ) { free( vid ); return 0; } if( vid->buf_size > 0 ) { arVideoSetBufferSize2( vid->did, vid->buf_size ); } return vid; }
AR_DLL_API ARMultiMarkerInfoT *arMultiReadConfigFile( const char *filename ) { FILE *fp=fopen(filename,"r"); if(fp == NULL){ return NULL; } char buf[256]; get_buff(buf, 256, fp); int num; if( sscanf(buf, "%d", &num) != 1 ) { fclose(fp); return NULL; } ARMultiEachMarkerInfoT *marker; ARMultiMarkerInfoT *marker_info; arMalloc(marker,ARMultiEachMarkerInfoT,num); std::string dir=getdir(filename); for(int i = 0; i < num; i++ ) { get_buff(buf, 256, fp); char buf1[256]; if( sscanf(buf, "%s", buf1) != 1 ) { fclose(fp); free(marker); return NULL; } if( (marker[i].patt_id = arLoadPatt((dir+"/"+buf1).c_str())) < 0 ) { fclose(fp); free(marker); return NULL; } get_buff(buf, 256, fp); if( sscanf(buf, "%lf", &marker[i].width) != 1 ) { fclose(fp); free(marker); return NULL; } get_buff(buf, 256, fp); if( sscanf(buf, "%lf %lf", &marker[i].center[0], &marker[i].center[1]) != 2 ) { fclose(fp); free(marker); return NULL; } for(int j = 0; j < 3; j++ ) { get_buff(buf, 256, fp); if( sscanf(buf, "%lf %lf %lf %lf", &marker[i].trans[j][0], &marker[i].trans[j][1], &marker[i].trans[j][2], &marker[i].trans[j][3]) != 4 ) { fclose(fp); free(marker); return NULL; } } arUtilMatInv( marker[i].trans, marker[i].itrans ); double wpos3d[4][2]; wpos3d[0][0] = marker[i].center[0] - marker[i].width/2.0; wpos3d[0][1] = marker[i].center[1] + marker[i].width/2.0; wpos3d[1][0] = marker[i].center[0] + marker[i].width/2.0; wpos3d[1][1] = marker[i].center[1] + marker[i].width/2.0; wpos3d[2][0] = marker[i].center[0] + marker[i].width/2.0; wpos3d[2][1] = marker[i].center[1] - marker[i].width/2.0; wpos3d[3][0] = marker[i].center[0] - marker[i].width/2.0; wpos3d[3][1] = marker[i].center[1] - marker[i].width/2.0; for(int j = 0; j < 4; j++ ) { marker[i].pos3d[j][0] = marker[i].trans[0][0] * wpos3d[j][0] + marker[i].trans[0][1] * wpos3d[j][1] + marker[i].trans[0][3]; marker[i].pos3d[j][1] = marker[i].trans[1][0] * wpos3d[j][0] + marker[i].trans[1][1] * wpos3d[j][1] + marker[i].trans[1][3]; marker[i].pos3d[j][2] = marker[i].trans[2][0] * wpos3d[j][0] + marker[i].trans[2][1] * wpos3d[j][1] + marker[i].trans[2][3]; } } fclose(fp); marker_info = (ARMultiMarkerInfoT *)malloc( sizeof(ARMultiMarkerInfoT) ); if( marker_info == NULL ) {free(marker); return NULL;} marker_info->marker = marker; marker_info->marker_num = num; marker_info->prevF = 0; return marker_info; }
static float ar2GetTransMat( ICPHandleT *icpHandle, float initConv[3][4], float pos2d[][2], float pos3d[][3], int num, float conv[3][4], int robustMode ) { ICPDataT data; float dx, dy, dz; ARdouble initMat[3][4], mat[3][4]; ARdouble err; int i, j; arMalloc( data.screenCoord, ICP2DCoordT, num ); arMalloc( data.worldCoord, ICP3DCoordT, num ); dx = dy = dz = 0.0; for( i = 0; i < num; i++ ) { dx += pos3d[i][0]; dy += pos3d[i][1]; dz += pos3d[i][2]; } dx /= num; dy /= num; dz /= num; for( i = 0; i < num; i++ ) { data.screenCoord[i].x = pos2d[i][0]; data.screenCoord[i].y = pos2d[i][1]; data.worldCoord[i].x = pos3d[i][0] - dx; data.worldCoord[i].y = pos3d[i][1] - dy; data.worldCoord[i].z = pos3d[i][2] - dz; } data.num = num; for( j = 0; j < 3; j++ ) { for( i = 0; i < 3; i++ ) initMat[j][i] = (ARdouble)(initConv[j][i]); } initMat[0][3] = (ARdouble)(initConv[0][0] * dx + initConv[0][1] * dy + initConv[0][2] * dz + initConv[0][3]); initMat[1][3] = (ARdouble)(initConv[1][0] * dx + initConv[1][1] * dy + initConv[1][2] * dz + initConv[1][3]); initMat[2][3] = (ARdouble)(initConv[2][0] * dx + initConv[2][1] * dy + initConv[2][2] * dz + initConv[2][3]); if( robustMode == 0 ) { if( icpPoint( icpHandle, &data, initMat, mat, &err ) < 0 ) { err = 100000000.0F; } } else { if( icpPointRobust( icpHandle, &data, initMat, mat, &err ) < 0 ) { err = 100000000.0F; } } free( data.screenCoord ); free( data.worldCoord ); for( j = 0; j < 3; j++ ) { for( i = 0; i < 3; i++ ) conv[j][i] = (float)mat[j][i]; } conv[0][3] = (float)(mat[0][3] - mat[0][0] * dx - mat[0][1] * dy - mat[0][2] * dz); conv[1][3] = (float)(mat[1][3] - mat[1][0] * dx - mat[1][1] * dy - mat[1][2] * dz); conv[2][3] = (float)(mat[2][3] - mat[2][0] * dx - mat[2][1] * dy - mat[2][2] * dz); return (float)err; }
static ARdouble arGetTransMatMultiSquareStereo2(AR3DStereoHandle *handle, ARMarkerInfo *marker_infoL, int marker_numL, ARMarkerInfo *marker_infoR, int marker_numR, ARMultiMarkerInfoT *config, int robustFlag) { ARdouble *pos2dL = NULL, *pos3dL = NULL; ARdouble *pos2dR = NULL, *pos3dR = NULL; ARdouble trans1[3][4], trans2[3][4]; ARdouble err, err2; int max, maxArea; int vnumL, vnumR; int dir; int i, j, k; for( i = 0; i < config->marker_num; i++ ) { k = -1; if( config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_TEMPLATE ) { for( j = 0; j < marker_numL; j++ ) { if( marker_infoL[j].idPatt != config->marker[i].patt_id ) continue; if( marker_infoL[j].cfPatt < 0.50 ) continue; if( k == -1 ) k = j; else if( marker_infoL[k].cfPatt < marker_infoL[j].cfPatt ) k = j; } config->marker[i].visible = k; if( k >= 0 ) { marker_infoL[k].dir = marker_infoL[k].dirPatt; } } else { for( j = 0; j < marker_numL; j++ ) { if( marker_infoL[j].idMatrix != config->marker[i].patt_id ) continue; if( marker_infoL[j].cfMatrix < 0.50 ) continue; if( k == -1 ) k = j; else if( marker_infoL[k].cfMatrix < marker_infoL[j].cfMatrix ) k = j; } config->marker[i].visible = k; if( k >= 0 ) { marker_infoL[k].dir = marker_infoL[k].dirMatrix; } } k = -1; if( config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_TEMPLATE ) { for( j = 0; j < marker_numR; j++ ) { if( marker_infoR[j].idPatt != config->marker[i].patt_id ) continue; if( marker_infoR[j].cfPatt < 0.50 ) continue; if( k == -1 ) k = j; else if( marker_infoR[k].cfPatt < marker_infoR[j].cfPatt ) k = j; } config->marker[i].visibleR = k; if( k >= 0 ) { marker_infoR[k].dir = marker_infoR[k].dirPatt; } } else { for( j = 0; j < marker_numR; j++ ) { if( marker_infoR[j].idMatrix != config->marker[i].patt_id ) continue; if( marker_infoR[j].cfMatrix < 0.50 ) continue; if( k == -1 ) k = j; else if( marker_infoR[k].cfMatrix < marker_infoR[j].cfMatrix ) k = j; } config->marker[i].visibleR = k; if( k >= 0 ) { marker_infoR[k].dir = marker_infoR[k].dirMatrix; } } } vnumL = 0; for( i = 0; i < config->marker_num; i++ ) { if( (j=config->marker[i].visible) == -1 ) continue; err = arGetTransMatSquareStereo( handle, &marker_infoL[j], NULL, config->marker[i].width, trans2 ); if( err > THRESH_1 ) { config->marker[i].visible = -1; //ARLOG("err = %f\n", err); continue; } if( vnumL == 0 || maxArea < marker_infoL[j].area ) { maxArea = marker_infoL[j].area; max = i; for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) trans1[j][k] = trans2[j][k]; } } vnumL++; } vnumR = 0; for( i = 0; i < config->marker_num; i++ ) { if( (j=config->marker[i].visibleR) == -1 ) continue; err = arGetTransMatSquareStereo( handle, NULL, &marker_infoR[j], config->marker[i].width, trans2 ); if( err > THRESH_1 ) { config->marker[i].visibleR = -1; //ARLOG("err = %f\n", err); continue; } if( (vnumL == 0 && vnumR == 0) || maxArea < marker_infoR[j].area ) { maxArea = marker_infoR[j].area; max = i; for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) trans1[j][k] = trans2[j][k]; } } vnumR++; } //ARLOG("vnumL=%d, vnumR=%d\n", vnumL, vnumR); if( (vnumL == 0 && vnumR == 0) || (vnumL < config->min_submarker && vnumR < config->min_submarker) ) { config->prevF = 0; //ARLOG("**** NG.\n"); return -1; } if(vnumL > 0) { arMalloc(pos2dL, ARdouble, vnumL*4*2); arMalloc(pos3dL, ARdouble, vnumL*4*3); } if(vnumR > 0) { arMalloc(pos2dR, ARdouble, vnumR*4*2); arMalloc(pos3dR, ARdouble, vnumR*4*3); } j = 0; for( i = 0; i < config->marker_num; i++ ) { if( (k=config->marker[i].visible) < 0 ) continue; dir = marker_infoL[k].dir; pos2dL[j*8+0] = marker_infoL[k].vertex[(4-dir)%4][0]; pos2dL[j*8+1] = marker_infoL[k].vertex[(4-dir)%4][1]; pos2dL[j*8+2] = marker_infoL[k].vertex[(5-dir)%4][0]; pos2dL[j*8+3] = marker_infoL[k].vertex[(5-dir)%4][1]; pos2dL[j*8+4] = marker_infoL[k].vertex[(6-dir)%4][0]; pos2dL[j*8+5] = marker_infoL[k].vertex[(6-dir)%4][1]; pos2dL[j*8+6] = marker_infoL[k].vertex[(7-dir)%4][0]; pos2dL[j*8+7] = marker_infoL[k].vertex[(7-dir)%4][1]; pos3dL[j*12+0] = config->marker[i].pos3d[0][0]; pos3dL[j*12+1] = config->marker[i].pos3d[0][1]; pos3dL[j*12+2] = config->marker[i].pos3d[0][2]; pos3dL[j*12+3] = config->marker[i].pos3d[1][0]; pos3dL[j*12+4] = config->marker[i].pos3d[1][1]; pos3dL[j*12+5] = config->marker[i].pos3d[1][2]; pos3dL[j*12+6] = config->marker[i].pos3d[2][0]; pos3dL[j*12+7] = config->marker[i].pos3d[2][1]; pos3dL[j*12+8] = config->marker[i].pos3d[2][2]; pos3dL[j*12+9] = config->marker[i].pos3d[3][0]; pos3dL[j*12+10] = config->marker[i].pos3d[3][1]; pos3dL[j*12+11] = config->marker[i].pos3d[3][2]; j++; } j = 0; for( i = 0; i < config->marker_num; i++ ) { if( (k=config->marker[i].visibleR) < 0 ) continue; dir = marker_infoR[k].dir; pos2dR[j*8+0] = marker_infoR[k].vertex[(4-dir)%4][0]; pos2dR[j*8+1] = marker_infoR[k].vertex[(4-dir)%4][1]; pos2dR[j*8+2] = marker_infoR[k].vertex[(5-dir)%4][0]; pos2dR[j*8+3] = marker_infoR[k].vertex[(5-dir)%4][1]; pos2dR[j*8+4] = marker_infoR[k].vertex[(6-dir)%4][0]; pos2dR[j*8+5] = marker_infoR[k].vertex[(6-dir)%4][1]; pos2dR[j*8+6] = marker_infoR[k].vertex[(7-dir)%4][0]; pos2dR[j*8+7] = marker_infoR[k].vertex[(7-dir)%4][1]; pos3dR[j*12+0] = config->marker[i].pos3d[0][0]; pos3dR[j*12+1] = config->marker[i].pos3d[0][1]; pos3dR[j*12+2] = config->marker[i].pos3d[0][2]; pos3dR[j*12+3] = config->marker[i].pos3d[1][0]; pos3dR[j*12+4] = config->marker[i].pos3d[1][1]; pos3dR[j*12+5] = config->marker[i].pos3d[1][2]; pos3dR[j*12+6] = config->marker[i].pos3d[2][0]; pos3dR[j*12+7] = config->marker[i].pos3d[2][1]; pos3dR[j*12+8] = config->marker[i].pos3d[2][2]; pos3dR[j*12+9] = config->marker[i].pos3d[3][0]; pos3dR[j*12+10] = config->marker[i].pos3d[3][1]; pos3dR[j*12+11] = config->marker[i].pos3d[3][2]; j++; } if( config->prevF == 0 ) { arUtilMatMul( (const ARdouble (*)[4])trans1, (const ARdouble (*)[4])config->marker[max].itrans, trans2 ); if( robustFlag ) { err = arGetTransMatStereo( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.8 ); err = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.6 ); err = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.4 ); err = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.0 ); err = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); } } } } } else { err = arGetTransMatStereo( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); } } else { arUtilMatMul( (const ARdouble (*)[4])trans1, (const ARdouble (*)[4])config->marker[max].itrans, trans2 ); if( robustFlag ) { err2 = arGetTransMatStereo( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereo( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.8 ); err2 = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereoRobust( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.6 ); err2 = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereoRobust( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.4 ); err2 = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereoRobust( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= THRESH_2 ) { icpStereoSetInlierProbability( handle->icpStereoHandle, 0.0 ); err2 = arGetTransMatStereoRobust( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereoRobust( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } } } } } else { err2 = arGetTransMatStereo( handle, trans2, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, trans1 ); err = arGetTransMatStereo( handle, config->trans, (ARdouble (*)[2])pos2dL, (ARdouble (*)[3])pos3dL, vnumL*4, (ARdouble (*)[2])pos2dR, (ARdouble (*)[3])pos3dR, vnumR*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } } if( vnumL > 0 ) { free(pos3dL); free(pos2dL); } if( vnumR > 0 ) { free(pos3dR); free(pos2dR); } if( err < THRESH_2 ) { config->prevF = 1; } else { config->prevF = 0; } return err; }
static AR2ImageT *ar2GenImageLayer1( ARUint8 *image, int xsize, int ysize, int nc, float srcdpi, float dstdpi ) { AR2ImageT *dst; ARUint8 *p1, *p2; int wx, wy; int sx, sy, ex, ey; int ii, jj, iii, jjj; int co, value; wx = (int)lroundf(xsize * dstdpi / srcdpi); wy = (int)lroundf(ysize * dstdpi / srcdpi); arMalloc( dst, AR2ImageT, 1 ); dst->xsize = wx; dst->ysize = wy; dst->dpi = dstdpi; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( int i = 0; i < AR2_BLUR_IMAGE_MAX; i++ ) { arMalloc( dst->imgWBlur[i], ARUint8, wx*wy ); } p2 = dst->imgBWBlur[0]; #else arMalloc( dst->imgBW, ARUint8, wx*wy ); p2 = dst->imgBW; #endif // Scale down, nearest neighbour. for( jj = 0; jj < wy; jj++ ) { sy = (int)lroundf( jj * srcdpi / dstdpi); ey = (int)lroundf((jj+1) * srcdpi / dstdpi) - 1; if( ey >= ysize ) ey = ysize - 1; for( ii = 0; ii < wx; ii++ ) { sx = (int)lroundf( ii * srcdpi / dstdpi); ex = (int)lroundf((ii+1) * srcdpi / dstdpi) - 1; if( ex >= xsize ) ex = xsize - 1; co = value = 0; if( nc == 1 ) { for( jjj = sy; jjj <= ey; jjj++ ) { p1 = &(image[(jjj*xsize+sx)*nc]); for( iii = sx; iii <= ex; iii++ ) { value += *(p1++); co++; } } } else { for( jjj = sy; jjj <= ey; jjj++ ) { p1 = &(image[(jjj*xsize+sx)*nc]); for( iii = sx; iii <= ex; iii++ ) { value += *(p1++); value += *(p1++); value += *(p1++); co+=3; } } } *(p2++) = value / co; } } #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( int i = 1; i < AR2_BLUR_IMAGE_MAX; i++ ) { p1 = dst->imgBWBlue[0]; p2 = dst->imgBWBlue[i]; for( int j = 0; j < wx*wy; j++ ) *(p2++) = *(p1++); defocus_image( dst->imgBWBlur[i], wx, wy, 2 ); } #else //defocus_image( dst->imgBW, wx, wy, 3 ); #endif return dst; }
int main( int argc, char *argv[] ) { AR2JpegImageT *jpegImage = NULL; ARUint8 *image = NULL; AR2ImageSetT *imageSet = NULL; AR2FeatureMapT *featureMap = NULL; AR2FeatureSetT *featureSet = NULL; KpmRefDataSet *refDataSet = NULL; float scale1, scale2; int procMode; char buf[1024]; int num; int i, j; char *sep = NULL; time_t clock; int maxFeatureNum; int err; for( i = 1; i < argc; i++ ) { if( strncmp(argv[i], "-dpi=", 5) == 0 ) { if( sscanf(&argv[i][5], "%f", &dpi) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-sd_thresh=", 11) == 0 ) { if( sscanf(&argv[i][11], "%f", &sd_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-max_thresh=", 12) == 0 ) { if( sscanf(&argv[i][12], "%f", &max_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-min_thresh=", 12) == 0 ) { if( sscanf(&argv[i][12], "%f", &min_thresh) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-feature_density=", 13) == 0 ) { if( sscanf(&argv[i][13], "%d", &featureDensity) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-level=", 7) == 0 ) { if( sscanf(&argv[i][7], "%d", &tracking_extraction_level) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-leveli=", 8) == 0 ) { if( sscanf(&argv[i][8], "%d", &initialization_extraction_level) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-max_dpi=", 9) == 0 ) { if( sscanf(&argv[i][9], "%f", &dpiMax) != 1 ) usage(argv[0]); } else if( strncmp(argv[i], "-min_dpi=", 9) == 0 ) { if( sscanf(&argv[i][9], "%f", &dpiMin) != 1 ) usage(argv[0]); } else if( strcmp(argv[i], "-background") == 0 ) { background = 1; } else if( strcmp(argv[i], "-nofset") == 0 ) { genfset = 0; } else if( strcmp(argv[i], "-fset") == 0 ) { genfset = 1; } else if( strcmp(argv[i], "-nofset2") == 0 ) { ARLOGe("Error: -nofset2 option no longer supported as of ARToolKit v5.3.\n"); exit(-1); } else if( strcmp(argv[i], "-fset2") == 0 ) { ARLOGe("Error: -fset2 option no longer supported as of ARToolKit v5.3.\n"); exit(-1); } else if( strcmp(argv[i], "-nofset3") == 0 ) { genfset3 = 0; } else if( strcmp(argv[i], "-fset3") == 0 ) { genfset3 = 1; } else if( strncmp(argv[i], "-log=", 5) == 0 ) { strncpy(logfile, &(argv[i][5]), sizeof(logfile) - 1); logfile[sizeof(logfile) - 1] = '\0'; // Ensure NULL termination. } else if( strncmp(argv[i], "-loglevel=", 10) == 0 ) { if (strcmp(&(argv[i][10]), "DEBUG") == 0) arLogLevel = AR_LOG_LEVEL_DEBUG; else if (strcmp(&(argv[i][10]), "INFO") == 0) arLogLevel = AR_LOG_LEVEL_INFO; else if (strcmp(&(argv[i][10]), "WARN") == 0) arLogLevel = AR_LOG_LEVEL_WARN; else if (strcmp(&(argv[i][10]), "ERROR") == 0) arLogLevel = AR_LOG_LEVEL_ERROR; else usage(argv[0]); } else if( strncmp(argv[i], "-exitcode=", 10) == 0 ) { strncpy(exitcodefile, &(argv[i][10]), sizeof(exitcodefile) - 1); exitcodefile[sizeof(exitcodefile) - 1] = '\0'; // Ensure NULL termination. } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "-?") == 0) { usage(argv[0]); } else if( filename[0] == '\0' ) { strncpy(filename, argv[i], sizeof(filename) - 1); filename[sizeof(filename) - 1] = '\0'; // Ensure NULL termination. } else { ARLOGe("Error: unrecognised option '%s'\n", argv[i]); usage(argv[0]); } } // Do some checks on the input. if (filename[0] == '\0') { ARLOGe("Error: no input file specified. Exiting.\n"); usage(argv[0]); } sep = strrchr(filename, '.'); if (!sep || (strcmp(sep, ".jpeg") && strcmp(sep, ".jpg") && strcmp(sep, ".jpe") && strcmp(sep, ".JPEG") && strcmp(sep, ".JPE") && strcmp(sep, ".JPG"))) { ARLOGe("Error: input file must be a JPEG image (with suffix .jpeg/.jpg/.jpe). Exiting.\n"); usage(argv[0]); } if (background) { #if HAVE_DAEMON_FUNC if (filename[0] != '/' || logfile[0] != '/' || exitcodefile[0] != '/') { ARLOGe("Error: -background flag requires full pathname of files (input, -log or -exitcode) to be specified. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (tracking_extraction_level == -1 && (sd_thresh == -1.0 || min_thresh == -1.0 || max_thresh == -1.0)) { ARLOGe("Error: -background flag requires -level or -sd_thresh, -min_thresh and -max_thresh -to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (initialization_extraction_level == -1 && (featureDensity == -1)) { ARLOGe("Error: -background flag requires -leveli or -surf_thresh to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (dpi == -1.0) { ARLOGe("Error: -background flag requires -dpi to be set. Exiting.\n"); EXIT(E_BAD_PARAMETER); } if (dpiMin != -1.0f && (dpiMin <= 0.0f || dpiMin > dpi)) { ARLOGe("Error: -min_dpi must be greater than 0 and less than or equal to -dpi. Exiting.n\n"); EXIT(E_BAD_PARAMETER); } if (dpiMax != -1.0f && (dpiMax < dpiMin || dpiMax > dpi)) { ARLOGe("Error: -max_dpi must be greater than or equal to -min_dpi and less than or equal to -dpi. Exiting.n\n"); EXIT(E_BAD_PARAMETER); } #else ARLOGe("Error: -background flag not supported on this operating system. Exiting.\n"); exit(E_BACKGROUND_OPERATION_UNSUPPORTED); #endif } if (background) { #if HAVE_DAEMON_FUNC // Daemonize. if (daemon(0, 0) == -1) { perror("Unable to detach from controlling terminal"); EXIT(E_UNABLE_TO_DETACH_FROM_CONTROLLING_TERMINAL); } // At this point, stdin, stdout and stderr point to /dev/null. #endif } if (logfile[0]) { if (!freopen(logfile, "a", stdout) || !freopen(logfile, "a", stderr)) ARLOGe("Unable to redirect stdout or stderr to logfile.\n"); } if (exitcodefile[0]) { atexit(write_exitcode); } // Print the start date and time. clock = time(NULL); if (clock != (time_t)-1) { struct tm *timeptr = localtime(&clock); if (timeptr) { char stime[26+8] = ""; if (strftime(stime, sizeof(stime), "%Y-%m-%d %H:%M:%S %z", timeptr)) // e.g. "1999-12-31 23:59:59 NZDT". ARLOGi("--\nGenerator started at %s\n", stime); } } if (genfset) { if (tracking_extraction_level == -1 && (sd_thresh == -1.0 || min_thresh == -1.0 || max_thresh == -1.0 || occ_size == -1)) { do { printf("Select extraction level for tracking features, 0(few) <--> 4(many), [default=%d]: ", TRACKING_EXTRACTION_LEVEL_DEFAULT); if( fgets(buf, sizeof(buf), stdin) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if (buf[0] == '\n') tracking_extraction_level = TRACKING_EXTRACTION_LEVEL_DEFAULT; else sscanf(buf, "%d", &tracking_extraction_level); } while (tracking_extraction_level < 0 || tracking_extraction_level > 4); } switch (tracking_extraction_level) { case 0: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L0; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L0; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L0; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE; break; case 1: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L1; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L1; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L1; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE; break; case 2: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L2; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L2; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L2; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*2/3; break; case 3: if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L3; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L3; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L3; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*2/3; break; case 4: // Same as 3, but with smaller AR2_DEFAULT_OCCUPANCY_SIZE. if( sd_thresh == -1.0f ) sd_thresh = AR2_DEFAULT_SD_THRESH_L3; if( min_thresh == -1.0f ) min_thresh = AR2_DEFAULT_MIN_SIM_THRESH_L3; if( max_thresh == -1.0f ) max_thresh = AR2_DEFAULT_MAX_SIM_THRESH_L3; if( occ_size == -1 ) occ_size = AR2_DEFAULT_OCCUPANCY_SIZE*1/2; break; default: // We only get to here if the parameters are already set. break; } ARLOGi("MAX_THRESH = %f\n", max_thresh); ARLOGi("MIN_THRESH = %f\n", min_thresh); ARLOGi("SD_THRESH = %f\n", sd_thresh); } if (genfset3) { if (initialization_extraction_level == -1 && featureDensity == -1) { do { printf("Select extraction level for initializing features, 0(few) <--> 3(many), [default=%d]: ", INITIALIZATION_EXTRACTION_LEVEL_DEFAULT); if( fgets(buf,1024,stdin) == NULL ) EXIT(E_USER_INPUT_CANCELLED); if (buf[0] == '\n') initialization_extraction_level = INITIALIZATION_EXTRACTION_LEVEL_DEFAULT; else sscanf(buf, "%d", &initialization_extraction_level); } while (initialization_extraction_level < 0 || initialization_extraction_level > 3); } switch(initialization_extraction_level) { case 0: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L0; break; default: case 1: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L1; break; case 2: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L2; break; case 3: if( featureDensity == -1 ) featureDensity = KPM_SURF_FEATURE_DENSITY_L3; break; } ARLOGi("SURF_FEATURE = %d\n", featureDensity); } if ((err = readImageFromFile(filename, &image, &xsize, &ysize, &nc, &dpi)) != 0) { ARLOGe("Error reading image from file '%s'.\n", filename); EXIT(err); } setDPI(); ARLOGi("Generating ImageSet...\n"); ARLOGi(" (Source image xsize=%d, ysize=%d, channels=%d, dpi=%.1f).\n", xsize, ysize, nc, dpi); imageSet = ar2GenImageSet( image, xsize, ysize, nc, dpi, dpi_list, dpi_num ); ar2FreeJpegImage(&jpegImage); if( imageSet == NULL ) { ARLOGe("ImageSet generation error!!\n"); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); ar2UtilRemoveExt( filename ); ARLOGi("Saving to %s.iset...\n", filename); if( ar2WriteImageSet( filename, imageSet ) < 0 ) { ARLOGe("Save error: %s.iset\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); if (genfset) { arMalloc( featureSet, AR2FeatureSetT, 1 ); // A featureSet with a single image, arMalloc( featureSet->list, AR2FeaturePointsT, imageSet->num ); // and with 'num' scale levels of this image. featureSet->num = imageSet->num; ARLOGi("Generating FeatureList...\n"); for( i = 0; i < imageSet->num; i++ ) { ARLOGi("Start for %f dpi image.\n", imageSet->scale[i]->dpi); featureMap = ar2GenFeatureMap( imageSet->scale[i], AR2_DEFAULT_TS1*AR2_TEMP_SCALE, AR2_DEFAULT_TS2*AR2_TEMP_SCALE, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE1, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE2, AR2_DEFAULT_MAX_SIM_THRESH2, AR2_DEFAULT_SD_THRESH2 ); if( featureMap == NULL ) { ARLOGe("Error!!\n"); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); featureSet->list[i].coord = ar2SelectFeature2( imageSet->scale[i], featureMap, AR2_DEFAULT_TS1*AR2_TEMP_SCALE, AR2_DEFAULT_TS2*AR2_TEMP_SCALE, AR2_DEFAULT_GEN_FEATURE_MAP_SEARCH_SIZE2, occ_size, max_thresh, min_thresh, sd_thresh, &num ); if( featureSet->list[i].coord == NULL ) num = 0; featureSet->list[i].num = num; featureSet->list[i].scale = i; scale1 = 0.0f; for( j = 0; j < imageSet->num; j++ ) { if( imageSet->scale[j]->dpi < imageSet->scale[i]->dpi ) { if( imageSet->scale[j]->dpi > scale1 ) scale1 = imageSet->scale[j]->dpi; } } if( scale1 == 0.0f ) { featureSet->list[i].mindpi = imageSet->scale[i]->dpi * 0.5f; } else { /* scale2 = imageSet->scale[i]->dpi; scale = sqrtf( scale1 * scale2 ); featureSet->list[i].mindpi = scale2 / ((scale2/scale - 1.0f)*1.1f + 1.0f); */ featureSet->list[i].mindpi = scale1; } scale1 = 0.0f; for( j = 0; j < imageSet->num; j++ ) { if( imageSet->scale[j]->dpi > imageSet->scale[i]->dpi ) { if( scale1 == 0.0f || imageSet->scale[j]->dpi < scale1 ) scale1 = imageSet->scale[j]->dpi; } } if( scale1 == 0.0f ) { featureSet->list[i].maxdpi = imageSet->scale[i]->dpi * 2.0f; } else { //scale2 = imageSet->scale[i]->dpi * 1.2f; scale2 = imageSet->scale[i]->dpi; /* scale = sqrtf( scale1 * scale2 ); featureSet->list[i].maxdpi = scale2 * ((scale/scale2 - 1.0f)*1.1f + 1.0f); */ featureSet->list[i].maxdpi = scale2*0.8f + scale1*0.2f; } ar2FreeFeatureMap( featureMap ); } ARLOGi(" Done.\n"); ARLOGi("Saving FeatureSet...\n"); if( ar2SaveFeatureSet( filename, "fset", featureSet ) < 0 ) { ARLOGe("Save error: %s.fset\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); ar2FreeFeatureSet( &featureSet ); } if (genfset3) { ARLOGi("Generating FeatureSet3...\n"); refDataSet = NULL; procMode = KpmProcFullSize; for( i = 0; i < imageSet->num; i++ ) { //if( imageSet->scale[i]->dpi > 100.0f ) continue; maxFeatureNum = featureDensity * imageSet->scale[i]->xsize * imageSet->scale[i]->ysize / (480*360); ARLOGi("(%d, %d) %f[dpi]\n", imageSet->scale[i]->xsize, imageSet->scale[i]->ysize, imageSet->scale[i]->dpi); if( kpmAddRefDataSet ( #if AR2_CAPABLE_ADAPTIVE_TEMPLATE imageSet->scale[i]->imgBWBlur[1], #else imageSet->scale[i]->imgBW, #endif AR_PIXEL_FORMAT_MONO, imageSet->scale[i]->xsize, imageSet->scale[i]->ysize, imageSet->scale[i]->dpi, procMode, KpmCompNull, maxFeatureNum, 1, i, &refDataSet) < 0 ) { // Page number set to 1 by default. ARLOGe("Error at kpmAddRefDataSet.\n"); EXIT(E_DATA_PROCESSING_ERROR); } } ARLOGi(" Done.\n"); ARLOGi("Saving FeatureSet3...\n"); if( kpmSaveRefDataSet(filename, "fset3", refDataSet) != 0 ) { ARLOGe("Save error: %s.fset2\n", filename ); EXIT(E_DATA_PROCESSING_ERROR); } ARLOGi(" Done.\n"); kpmDeleteRefDataSet( &refDataSet ); } ar2FreeImageSet( &imageSet ); // Print the start date and time. clock = time(NULL); if (clock != (time_t)-1) { struct tm *timeptr = localtime(&clock); if (timeptr) { char stime[26+8] = ""; if (strftime(stime, sizeof(stime), "%Y-%m-%d %H:%M:%S %z", timeptr)) // e.g. "1999-12-31 23:59:59 NZDT". ARLOGi("Generator finished at %s\n--\n", stime); } } exitcode = E_NO_ERROR; return (exitcode); }
static ARdouble arGetTransMatMultiSquare2(AR3DHandle *handle, ARMarkerInfo *marker_info, int marker_num, ARMultiMarkerInfoT *config, int robustFlag) { ARdouble *pos2d, *pos3d; ARdouble trans1[3][4], trans2[3][4]; ARdouble err, err2; int max, maxArea; int vnum; int dir; int i, j, k; //char mes[12]; //ARLOG("-- Pass1--\n"); for( i = 0; i < config->marker_num; i++ ) { k = -1; if( config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_TEMPLATE ) { for( j = 0; j < marker_num; j++ ) { if( marker_info[j].idPatt != config->marker[i].patt_id ) continue; if( marker_info[j].cfPatt < config->cfPattCutoff ) continue; if( k == -1 ) k = j; else if( marker_info[k].cfPatt < marker_info[j].cfPatt ) k = j; } config->marker[i].visible = k; if( k >= 0 ) marker_info[k].dir = marker_info[k].dirPatt; } else { // config->marker[i].patt_type == AR_MULTI_PATTERN_TYPE_MATRIX for( j = 0; j < marker_num; j++ ) { // Check if we need to examine the globalID rather than patt_id. if (marker_info[j].idMatrix == 0 && marker_info[j].globalID != 0ULL) { if( marker_info[j].globalID != config->marker[i].globalID ) continue; } else { if( marker_info[j].idMatrix != config->marker[i].patt_id ) continue; } if( marker_info[j].cfMatrix < config->cfMatrixCutoff ) continue; if( k == -1 ) k = j; else if( marker_info[k].cfMatrix < marker_info[j].cfMatrix ) k = j; } config->marker[i].visible = k; if( k >= 0 ) marker_info[k].dir = marker_info[k].dirMatrix; } //if(k>=0) ARLOG(" *%d\n",i); } //ARLOG("-- Pass2--\n"); vnum = 0; for( i = 0; i < config->marker_num; i++ ) { if( (j=config->marker[i].visible) < 0 ) continue; //glColor3f( 1.0, 1.0, 0.0 ); //sprintf(mes,"%d",i); //argDrawStringsByIdealPos( mes, marker_info[j].pos[0], marker_info[j].pos[1] ); err = arGetTransMatSquare(handle, &marker_info[j], config->marker[i].width, trans2); //ARLOG(" [%d:dir=%d] err = %f (%f,%f,%f)\n", i, marker_info[j].dir, err, trans2[0][3], trans2[1][3], trans2[2][3]); if( err > AR_MULTI_POSE_ERROR_CUTOFF_EACH_DEFAULT ) { config->marker[i].visible = -1; if (marker_info[j].cutoffPhase == AR_MARKER_INFO_CUTOFF_PHASE_NONE) marker_info[j].cutoffPhase = AR_MARKER_INFO_CUTOFF_PHASE_POSE_ERROR; continue; } //ARLOG(" *%d\n",i); // Use the largest (in terms of 2D coordinates) marker's pose estimate as the // input for the initial estimate for the pose estimator. if( vnum == 0 || maxArea < marker_info[j].area ) { maxArea = marker_info[j].area; max = i; for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) trans1[j][k] = trans2[j][k]; } } vnum++; } if( vnum == 0 || vnum < config->min_submarker) { config->prevF = 0; return -1; } arUtilMatMul( (const ARdouble (*)[4])trans1, (const ARdouble (*)[4])config->marker[max].itrans, trans2 ); arMalloc(pos2d, ARdouble, vnum*4*2); arMalloc(pos3d, ARdouble, vnum*4*3); j = 0; for( i = 0; i < config->marker_num; i++ ) { if( (k=config->marker[i].visible) < 0 ) continue; dir = marker_info[k].dir; pos2d[j*8+0] = marker_info[k].vertex[(4-dir)%4][0]; pos2d[j*8+1] = marker_info[k].vertex[(4-dir)%4][1]; pos2d[j*8+2] = marker_info[k].vertex[(5-dir)%4][0]; pos2d[j*8+3] = marker_info[k].vertex[(5-dir)%4][1]; pos2d[j*8+4] = marker_info[k].vertex[(6-dir)%4][0]; pos2d[j*8+5] = marker_info[k].vertex[(6-dir)%4][1]; pos2d[j*8+6] = marker_info[k].vertex[(7-dir)%4][0]; pos2d[j*8+7] = marker_info[k].vertex[(7-dir)%4][1]; pos3d[j*12+0] = config->marker[i].pos3d[0][0]; pos3d[j*12+1] = config->marker[i].pos3d[0][1]; pos3d[j*12+2] = config->marker[i].pos3d[0][2]; pos3d[j*12+3] = config->marker[i].pos3d[1][0]; pos3d[j*12+4] = config->marker[i].pos3d[1][1]; pos3d[j*12+5] = config->marker[i].pos3d[1][2]; pos3d[j*12+6] = config->marker[i].pos3d[2][0]; pos3d[j*12+7] = config->marker[i].pos3d[2][1]; pos3d[j*12+8] = config->marker[i].pos3d[2][2]; pos3d[j*12+9] = config->marker[i].pos3d[3][0]; pos3d[j*12+10] = config->marker[i].pos3d[3][1]; pos3d[j*12+11] = config->marker[i].pos3d[3][2]; j++; } if( config->prevF == 0 ) { if( robustFlag ) { err = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.8 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.6 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.4 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.0 ); err = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); } } } } } else { err = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); } free(pos3d); free(pos2d); } else { if( robustFlag ) { err2 = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMat( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.8 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.6 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.4 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } if( err >= AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT ) { icpSetInlierProbability( handle->icpHandle, 0.0 ); err2 = arGetTransMatRobust( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMatRobust( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } } } } } else { err2 = arGetTransMat( handle, trans2, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, trans1 ); err = arGetTransMat( handle, config->trans, (ARdouble (*)[2])pos2d, (ARdouble (*)[3])pos3d, vnum*4, config->trans ); if( err2 < err ) { for( j = 0; j < 3; j++ ) for( i = 0; i < 4; i++ ) config->trans[j][i] = trans1[j][i]; err = err2; } } free(pos3d); free(pos2d); } if (err < AR_MULTI_POSE_ERROR_CUTOFF_COMBINED_DEFAULT) config->prevF = 1; else { config->prevF = 0; for (i = 0; i < config->marker_num; i++) { if ((k = config->marker[i].visible) < 0) continue; if (marker_info[k].cutoffPhase == AR_MARKER_INFO_CUTOFF_PHASE_NONE) marker_info[k].cutoffPhase = AR_MARKER_INFO_CUTOFF_PHASE_POSE_ERROR_MULTI; } } return err; }
AR2SurfaceSetT *ar2ReadSurfaceSet( const char *filename, const char *ext, ARPattHandle *pattHandle ) { AR2SurfaceSetT *surfaceSet; FILE *fp = NULL; int readMode; char buf[256], name[256]; int i, j, k; if( ext == NULL || *ext == '\0' || strcmp(ext,"fset") == 0 ) { strncpy(name, filename, sizeof(name) - 1); name[sizeof(name) - 1] = '\0'; readMode = 0; } else { char namebuf[512]; sprintf(namebuf, "%s.%s", filename, ext); if ((fp = fopen(namebuf,"r")) == NULL) { ARLOGe("Error opening file '%s': ", filename); ARLOGperror(NULL); return (NULL); } readMode = 1; } arMalloc(surfaceSet, AR2SurfaceSetT, 1); if( readMode ) { if( get_buff(buf, 256, fp) == NULL ) { fclose(fp); free(surfaceSet); return (NULL); } if( sscanf(buf, "%d", &i) != 1 ) { fclose(fp); free(surfaceSet); return (NULL); } if( i < 1 ) { fclose(fp); free(surfaceSet); return (NULL); } surfaceSet->num = i; surfaceSet->contNum = 0; } else { surfaceSet->num = 1; surfaceSet->contNum = 0; } arMalloc(surfaceSet->surface, AR2SurfaceT, surfaceSet->num); for( i = 0; i < surfaceSet->num; i++ ) { ARLOGi("\n### Surface No.%d ###\n", i+1); if( readMode ) { if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%s", name) != 1 ) break; ar2UtilRemoveExt( name ); } ARLOGi(" Read ImageSet.\n"); surfaceSet->surface[i].imageSet = ar2ReadImageSet( name ); if( surfaceSet->surface[i].imageSet == NULL ) { ARLOGe("Error opening file '%s.iset'.\n", name); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); ARLOGi(" Read FeatureSet.\n"); surfaceSet->surface[i].featureSet = ar2ReadFeatureSet( name, "fset" ); if( surfaceSet->surface[i].featureSet == NULL ) { ARLOGe("Error opening file '%s.fset'.\n", name); ar2FreeImageSet(&surfaceSet->surface[i].imageSet); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); if (pattHandle) { ARLOGi(" Read MarkerSet.\n"); ar2UtilRemoveExt( name ); surfaceSet->surface[i].markerSet = ar2ReadMarkerSet( name, "mrk", pattHandle ); if( surfaceSet->surface[i].markerSet == NULL ) { ARLOGe("Error opening file '%s.mrk'.\n", name); ar2FreeFeatureSet(&surfaceSet->surface[i].featureSet); ar2FreeImageSet(&surfaceSet->surface[i].imageSet); free(surfaceSet->surface); free(surfaceSet); if (fp) fclose(fp); //COVHI10426 return (NULL); } ARLOGi(" end.\n"); } else { surfaceSet->surface[i].markerSet = NULL; } if (readMode) { if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[0][0]), &(surfaceSet->surface[i].trans[0][1]), &(surfaceSet->surface[i].trans[0][2]), &(surfaceSet->surface[i].trans[0][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[1][0]), &(surfaceSet->surface[i].trans[1][1]), &(surfaceSet->surface[i].trans[1][2]), &(surfaceSet->surface[i].trans[1][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } if( get_buff(buf, 256, fp) == NULL ) break; if( sscanf(buf, "%f %f %f %f", &(surfaceSet->surface[i].trans[2][0]), &(surfaceSet->surface[i].trans[2][1]), &(surfaceSet->surface[i].trans[2][2]), &(surfaceSet->surface[i].trans[2][3])) != 4 ) { ARLOGe("Transformation matrix read error!!\n"); fclose(fp); exit(0); } } else { for( j = 0; j < 3; j++ ) { for( k = 0; k < 4; k++ ) { surfaceSet->surface[i].trans[j][k] = (j == k)? 1.0f: 0.0f; } } } arUtilMatInvf( (const float (*)[4])surfaceSet->surface[i].trans, surfaceSet->surface[i].itrans ); ar2UtilReplaceExt( name, 256, "jpg"); arMalloc( surfaceSet->surface[i].jpegName, char, 256); strncpy( surfaceSet->surface[i].jpegName, name, 256 ); } if (fp) fclose(fp); //COVHI10459 if (i < surfaceSet->num) exit(0); return surfaceSet; }
AR2VideoParamT *ar2VideoOpen( char *config_in ) { AR2VideoParamT *vid; struct video_capability vd; struct video_channel vc[MAXCHANNEL]; struct video_picture vp; char *config, *a, line[256]; int i; int adjust = 1; /* If no config string is supplied, we should use the environment variable, otherwise set a sane default */ if (!config_in || !(config_in[0])) { /* None suppplied, lets see if the user supplied one from the shell */ char *envconf = getenv ("ARTOOLKIT_CONFIG"); if (envconf && envconf[0]) { config = envconf; printf ("Using config string from environment [%s].\n", envconf); } else { config = NULL; printf ("No video config string supplied, using defaults.\n"); } } else { config = config_in; printf ("Using supplied video config string [%s].\n", config_in); } arMalloc( vid, AR2VideoParamT, 1 ); strcpy( vid->dev, DEFAULT_VIDEO_DEVICE ); vid->channel = DEFAULT_VIDEO_CHANNEL; vid->width = DEFAULT_VIDEO_WIDTH; vid->height = DEFAULT_VIDEO_HEIGHT; #if (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_BGRA) vid->palette = VIDEO_PALETTE_RGB32; /* palette format */ #elif (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_BGR) || (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_RGB) vid->palette = VIDEO_PALETTE_RGB24; /* palette format */ #endif vid->contrast = -1.; vid->brightness = -1.; vid->saturation = -1.; vid->hue = -1.; vid->whiteness = -1.; vid->mode = DEFAULT_VIDEO_MODE; vid->debug = 0; vid->videoBuffer=NULL; a = config; if( a != NULL) { for(;;) { while( *a == ' ' || *a == '\t' ) a++; if( *a == '\0' ) break; if( strncmp( a, "-dev=", 5 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[5], "%s", vid->dev ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-channel=", 9 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[9], "%d", &vid->channel ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-width=", 7 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[7], "%d", &vid->width ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-height=", 8 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[8], "%d", &vid->height ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-palette=", 9 ) == 0 ) { if( strncmp( &a[9], "RGB", 3) == 0 ) { #if (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_BGRA) vid->palette = VIDEO_PALETTE_RGB32; /* palette format */ #elif (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_BGR)|| (AR_DEFAULT_PIXEL_FORMAT == AR_PIXEL_FORMAT_RGB) vid->palette = VIDEO_PALETTE_RGB24; /* palette format */ #endif } else if( strncmp( &a[9], "YUV420P", 7 ) == 0 ) { vid->palette = VIDEO_PALETTE_YUV420P; } } else if( strncmp ( a, "-noadjust", 9 ) == 0 ) { adjust = 0; } else if( strncmp( a, "-contrast=", 10 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[10], "%lf", &vid->contrast ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-brightness=", 12 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[12], "%lf", &vid->brightness ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-saturation=", 12 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[12], "%lf", &vid->saturation ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-hue=", 5 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[5], "%lf", &vid->hue ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-whiteness=", 11 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[11], "%lf", &vid->whiteness ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-color=", 7 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[7], "%lf", &vid->saturation ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-mode=", 6 ) == 0 ) { if( strncmp( &a[6], "PAL", 3 ) == 0 ) vid->mode = VIDEO_MODE_PAL; else if( strncmp( &a[6], "NTSC", 4 ) == 0 ) vid->mode = VIDEO_MODE_NTSC; else if( strncmp( &a[6], "SECAM", 5 ) == 0 ) vid->mode = VIDEO_MODE_SECAM; else { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-debug", 6 ) == 0 ) { vid->debug = 1; } else { ar2VideoDispOption(); free( vid ); return 0; } while( *a != ' ' && *a != '\t' && *a != '\0') a++; } } vid->fd = open(vid->dev, O_RDWR);// O_RDONLY ? if(vid->fd < 0){ printf("video device (%s) open failed\n",vid->dev); free( vid ); return 0; } if(ioctl(vid->fd,VIDIOCGCAP,&vd) < 0){ printf("ioctl failed\n"); free( vid ); return 0; } if( vid->debug ) { printf("=== debug info ===\n"); printf(" vd.name = %s\n",vd.name); printf(" vd.channels = %d\n",vd.channels); printf(" vd.maxwidth = %d\n",vd.maxwidth); printf(" vd.maxheight = %d\n",vd.maxheight); printf(" vd.minwidth = %d\n",vd.minwidth); printf(" vd.minheight = %d\n",vd.minheight); } /* adjust capture size if needed */ if (adjust) { if (vid->width >= vd.maxwidth) vid->width = vd.maxwidth; if (vid->height >= vd.maxheight) vid->height = vd.maxheight; if (vid->debug) printf ("arVideoOpen: width/height adjusted to (%d, %d)\n", vid->width, vid->height); } /* check capture size */ if(vd.maxwidth < vid->width || vid->width < vd.minwidth || vd.maxheight < vid->height || vid->height < vd.minheight ) { printf("arVideoOpen: width or height oversize \n"); free( vid ); return 0; } /* adjust channel if needed */ if (adjust) { if (vid->channel >= vd.channels) vid->channel = 0; if (vid->debug) printf ("arVideoOpen: channel adjusted to 0\n"); } /* check channel */ if(vid->channel < 0 || vid->channel >= vd.channels){ printf("arVideoOpen: channel# is not valid. \n"); free( vid ); return 0; } if( vid->debug ) { printf("==== capture device channel info ===\n"); } for(i = 0;i < vd.channels && i < MAXCHANNEL; i++){ vc[i].channel = i; if(ioctl(vid->fd,VIDIOCGCHAN,&vc[i]) < 0){ printf("error: acquireing channel(%d) info\n",i); free( vid ); return 0; } if( vid->debug ) { printf(" channel = %d\n", vc[i].channel); printf(" name = %s\n", vc[i].name); printf(" tuners = %d", vc[i].tuners); printf(" flag = 0x%08x",vc[i].flags); if(vc[i].flags & VIDEO_VC_TUNER) printf(" TUNER"); if(vc[i].flags & VIDEO_VC_AUDIO) printf(" AUDIO"); printf("\n"); printf(" vc[%d].type = 0x%08x", i, vc[i].type); if(vc[i].type & VIDEO_TYPE_TV) printf(" TV"); if(vc[i].type & VIDEO_TYPE_CAMERA) printf(" CAMERA"); printf("\n"); } } /* select channel */ vc[vid->channel].norm = vid->mode; /* 0: PAL 1: NTSC 2:SECAM 3:AUTO */ if(ioctl(vid->fd, VIDIOCSCHAN, &vc[vid->channel]) < 0){ printf("error: selecting channel %d\n", vid->channel); free( vid ); return 0; } if(ioctl(vid->fd, VIDIOCGPICT, &vp)) { printf("error: getting palette\n"); free( vid ); return 0; } if( vid->debug ) { printf("=== debug info ===\n"); printf(" vp.brightness= %d\n",vp.brightness); printf(" vp.hue = %d\n",vp.hue); printf(" vp.colour = %d\n",vp.colour); printf(" vp.contrast = %d\n",vp.contrast); printf(" vp.whiteness = %d\n",vp.whiteness); printf(" vp.depth = %d\n",vp.depth); printf(" vp.palette = %d\n",vp.palette); } /* set video picture */ if ((vid->brightness+1.)>0.001) vp.brightness = 32767 * 2.0 *vid->brightness; if ((vid->contrast+1.)>0.001) vp.contrast = 32767 * 2.0 *vid->contrast; if ((vid->hue+1.)>0.001) vp.hue = 32767 * 2.0 *vid->hue; if ((vid->whiteness+1.)>0.001) vp.whiteness = 32767 * 2.0 *vid->whiteness; if ((vid->saturation+1.)>0.001) vp.colour = 32767 * 2.0 *vid->saturation; vp.depth = 24; vp.palette = vid->palette; if(ioctl(vid->fd, VIDIOCSPICT, &vp)) { printf("error: setting configuration !! bad palette mode..\n TIPS:try other palette mode (or with new failure contact ARToolKit Developer)\n"); free( vid ); return 0; } if (vid->palette==VIDEO_PALETTE_YUV420P) arMalloc( vid->videoBuffer, ARUint8, vid->width*vid->height*3 ); if( vid->debug ) { if(ioctl(vid->fd, VIDIOCGPICT, &vp)) { printf("error: getting palette\n"); free( vid ); return 0; } printf("=== debug info ===\n"); printf(" vp.brightness= %d\n",vp.brightness); printf(" vp.hue = %d\n",vp.hue); printf(" vp.colour = %d\n",vp.colour); printf(" vp.contrast = %d\n",vp.contrast); printf(" vp.whiteness = %d\n",vp.whiteness); printf(" vp.depth = %d\n",vp.depth); printf(" vp.palette = %d\n",vp.palette); } /* get mmap info */ if(ioctl(vid->fd,VIDIOCGMBUF,&vid->vm) < 0){ printf("error: videocgmbuf\n"); free( vid ); return 0; } if( vid->debug ) { printf("===== Image Buffer Info =====\n"); printf(" size = %d[bytes]\n", vid->vm.size); printf(" frames = %d\n", vid->vm.frames); } if(vid->vm.frames < 2){ printf("this device can not be supported by libARvideo.\n"); printf("(vm.frames < 2)\n"); free( vid ); return 0; } /* get memory mapped io */ if((vid->map = (ARUint8 *)mmap(0, vid->vm.size, PROT_READ|PROT_WRITE, MAP_SHARED, vid->fd, 0)) < 0){ printf("error: mmap\n"); free( vid ); return 0; } /* setup for vmm */ vid->vmm.frame = 0; vid->vmm.width = vid->width; vid->vmm.height = vid->height; vid->vmm.format= vid->palette; vid->video_cont_num = -1; #ifdef USE_EYETOY JPEGToRGBInit(vid->width,vid->height); #endif return vid; }
static void init(int argc, char *argv[]) { ARGViewport viewport; char *vconf = NULL; int i; int gotTwoPartOption; int screenWidth, screenHeight, screenMargin; chessboardCornerNumX = 0; chessboardCornerNumY = 0; calibImageNum = 0; patternWidth = 0.0f; arMalloc(cwd, char, MAXPATHLEN); if (!getcwd(cwd, MAXPATHLEN)) ARLOGe("Unable to read current working directory.\n"); else ARLOG("Current working directory is '%s'\n", cwd); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strncmp(argv[i], "-cornerx=", 9) == 0) { if (sscanf(&(argv[i][9]), "%d", &chessboardCornerNumX) != 1) usage(argv[0]); if (chessboardCornerNumX <= 0) usage(argv[0]); } else if (strncmp(argv[i], "-cornery=", 9) == 0) { if (sscanf(&(argv[i][9]), "%d", &chessboardCornerNumY) != 1) usage(argv[0]); if (chessboardCornerNumY <= 0) usage(argv[0]); } else if (strncmp(argv[i], "-imagenum=", 10) == 0) { if (sscanf(&(argv[i][10]), "%d", &calibImageNum) != 1) usage(argv[0]); if (calibImageNum <= 0) usage(argv[0]); } else if (strncmp(argv[i], "-pattwidth=", 11) == 0) { if (sscanf(&(argv[i][11]), "%f", &patternWidth) != 1) usage(argv[0]); if (patternWidth <= 0) usage(argv[0]); } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } if (chessboardCornerNumX == 0) chessboardCornerNumX = CHESSBOARD_CORNER_NUM_X; if (chessboardCornerNumY == 0) chessboardCornerNumY = CHESSBOARD_CORNER_NUM_Y; if (calibImageNum == 0) calibImageNum = CALIB_IMAGE_NUM; if (patternWidth == 0.0f) patternWidth = (float)CHESSBOARD_PATTERN_WIDTH; ARLOG("CHESSBOARD_CORNER_NUM_X = %d\n", chessboardCornerNumX); ARLOG("CHESSBOARD_CORNER_NUM_Y = %d\n", chessboardCornerNumY); ARLOG("CHESSBOARD_PATTERN_WIDTH = %f\n", patternWidth); ARLOG("CALIB_IMAGE_NUM = %d\n", calibImageNum); ARLOG("Video parameter: %s\n", vconf); if (arVideoOpen(vconf) < 0) exit(0); if (arVideoGetSize(&xsize, &ysize) < 0) exit(0); ARLOG("Image size (x,y) = (%d,%d)\n", xsize, ysize); if ((pixFormat = arVideoGetPixelFormat()) == AR_PIXEL_FORMAT_INVALID) exit(0); screenWidth = glutGet(GLUT_SCREEN_WIDTH); screenHeight = glutGet(GLUT_SCREEN_HEIGHT); if (screenWidth > 0 && screenHeight > 0) { screenMargin = (int)(MAX(screenWidth, screenHeight) * SCREEN_SIZE_MARGIN); if ((screenWidth - screenMargin) < xsize || (screenHeight - screenMargin) < ysize) { viewport.xsize = screenWidth - screenMargin; viewport.ysize = screenHeight - screenMargin; ARLOG("Scaling window to fit onto %dx%d screen (with %2.0f%% margin).\n", screenWidth, screenHeight, SCREEN_SIZE_MARGIN * 100.0); } else { viewport.xsize = xsize; viewport.ysize = ysize; } } else { viewport.xsize = xsize; viewport.ysize = ysize; } viewport.sx = 0; viewport.sy = 0; if ((vp = argCreateViewport(&viewport)) == NULL) exit(0); argViewportSetImageSize(vp, xsize, ysize); argViewportSetPixFormat(vp, pixFormat); argViewportSetDispMethod(vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME); argViewportSetDistortionMode(vp, AR_GL_DISTORTION_COMPENSATE_DISABLE); argViewportSetDispMode(vp, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO); // Set up the grayscale image. arIPI = arImageProcInit(xsize, ysize, pixFormat, 1); // 1 -> always copy, since we need OpenCV to be able to wrap the memory. if (!arIPI) { ARLOGe("Error initialising image processing.\n"); exit(-1); } calibImage = cvCreateImageHeader(cvSize(xsize, ysize), IPL_DEPTH_8U, 1); cvSetData(calibImage, arIPI->image, xsize); // Last parameter is rowBytes. // Allocate space for results. arMalloc(corners, CvPoint2D32f, chessboardCornerNumX * chessboardCornerNumY); arMalloc(cornerSet, CvPoint2D32f, chessboardCornerNumX * chessboardCornerNumY * calibImageNum); }
AR2ImageSetT *ar2ReadImageSet( char *filename ) { FILE *fp; AR2JpegImageT *jpgImage; AR2ImageSetT *imageSet; float dpi; int i, k1; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE int j, k2; ARUint *p1, *p2; #endif size_t len; const char ext[] = ".iset"; char *buf; len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator. arMalloc(buf, char, len); sprintf(buf, "%s%s", filename, ext); fp = fopen(buf, "rb"); free(buf); if (!fp) { ARLOGe("Error: unable to open file '%s%s' for reading.\n", filename, ext); return (NULL); } arMalloc( imageSet, AR2ImageSetT, 1 ); if( fread(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1 || imageSet->num <= 0) { ARLOGe("Error reading imageSet.\n"); goto bail; } ARLOGi("Imageset contains %d images.\n", imageSet->num); arMalloc( imageSet->scale, AR2ImageT*, imageSet->num ); arMalloc( imageSet->scale[0], AR2ImageT, 1 ); jpgImage = ar2ReadJpegImage2(fp); // Caller must free result. if( jpgImage == NULL || jpgImage->nc != 1 ) { ARLOGw("Falling back to reading '%s%s' in ARToolKit v4.x format.\n", filename, ext); free(imageSet->scale[0]); free(imageSet->scale); free(imageSet); if( jpgImage == NULL ) { rewind(fp); return ar2ReadImageSetOld(fp); } free(jpgImage); //COVHI10396 fclose(fp); return NULL; } imageSet->scale[0]->xsize = jpgImage->xsize; imageSet->scale[0]->ysize = jpgImage->ysize; imageSet->scale[0]->dpi = jpgImage->dpi; // The dpi value is not read correctly by jpeglib embedded in OpenCV 2.2.x. #if AR2_CAPABLE_ADAPTIVE_TEMPLATE imageSet->scale[0]->imgBWBlur[0] = jpgImage->image; // Create the blurred images. for( j = 1; j < AR2_BLUR_IMAGE_MAX; j++ ) { arMalloc( imageSet->scale[0]->imgBWBlur[j], ARUint8, imageSet->scale[0]->xsize * imageSet->scale[0]->ysize); p1 = dst->imgBWBlur[0]; p2 = dst->imgBWBlur[i]; for( k1 = 0; k1 < imageSet->scale[0]->xsize * imageSet->scale[0]->ysize; k1++ ) *(p2++) = *(p1++); defocus_image( imageSet->scale[0]->imgBWBlur[j], imageSet->scale[0]->xsize, imageSet->scale[0]->ysize, 3 ); } #else imageSet->scale[0]->imgBW = jpgImage->image; #endif free(jpgImage); // Minify for the other scales. // First, find the list of scales we wrote into the file. fseek(fp, (long)(-(int)sizeof(dpi)*(imageSet->num - 1)), SEEK_END); for( i = 1; i < imageSet->num; i++ ) { if( fread(&dpi, sizeof(dpi), 1, fp) != 1 ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } imageSet->scale[i] = ar2GenImageLayer2( imageSet->scale[0], dpi ); if( imageSet->scale[i] == NULL ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } } fclose(fp); return imageSet; bail1: free(imageSet->scale); bail: free(imageSet); fclose(fp); return NULL; }
int ar2GetBestMatching( ARUint8 *img, ARUint8 *mfImage, int xsize, int ysize, AR_PIXEL_FORMAT pixFormat, AR2TemplateT *mtemp, int rx, int ry, int search[3][2], int *bx, int *by, float *val) { int search_flag[] = {USE_SEARCH1, USE_SEARCH2, USE_SEARCH3}; int px, py, sx, sy, ex, ey; int yts1, yts2; int keep_num; int cx[KEEP_NUM], cy[KEEP_NUM]; int cval[KEEP_NUM]; int wval, wval2; int i, j, l; int ii; int ret; ARUint8 *pmf; #if 0 #else ARUint32 *subImage1, *p11, *p12, w1; ARUint32 *subImage2, *p21, *p22, w2; ARUint32 subImage11[AR2_TEMP_SCALE]; ARUint32 subImage21[AR2_TEMP_SCALE]; ARUint8 *p3, *p4; #endif // First pass: initialise. yts1 = mtemp->yts1; yts2 = mtemp->yts2; for( ii = 0; ii < 3; ii++ ) { if( search_flag[ii] == 0 ) continue; if( search[ii][0] < 0 ) break; // "Snap" position to centre of grid square. px = (search[ii][0]/(SKIP_INTERVAL + 1))*(SKIP_INTERVAL + 1) + (SKIP_INTERVAL + 1)/2; py = (search[ii][1]/(SKIP_INTERVAL + 1))*(SKIP_INTERVAL + 1) + (SKIP_INTERVAL + 1)/2; sx = px - rx; // Start position in x. if( sx < 0 ) sx = 0; ex = px + rx; // End position in x. if( ex >= xsize ) ex = xsize - 1; sy = py - ry; // Start position in y. if( sy < 0 ) sy = 0; ey = py + ry; // End position in y. if( ey >= ysize ) ey = ysize - 1; // Initialise mfImage by writing 0s into the potential search space. for( j = sy; j <= ey; j++ ) { pmf = &mfImage[j*xsize + sx]; for( i = sx; i <= ex; i++ ) { *(pmf++) = 0; } } } // Second pass: get candidates. keep_num = 0; ret = 1; for( ii = 0; ii < 3; ii++ ) { if( search_flag[ii] == 0 ) continue; if( search[ii][0] < 0 ) { if( ret ) return -1; // If we haven't got at least one starting point for a search, bail out. else break; } px = (search[ii][0]/(SKIP_INTERVAL + 1))*(SKIP_INTERVAL + 1) + (SKIP_INTERVAL + 1)/2; py = (search[ii][1]/(SKIP_INTERVAL + 1))*(SKIP_INTERVAL + 1) + (SKIP_INTERVAL + 1)/2; for( j = py - ry; j <= py + ry; j += SKIP_INTERVAL + 1 ) { if( j - yts1*AR2_TEMP_SCALE < 0 ) continue; if( j + yts2*AR2_TEMP_SCALE >= ysize ) break; for( i = px - rx; i <= px + rx; i += SKIP_INTERVAL + 1 ) { if( i - mtemp->xts1*AR2_TEMP_SCALE < 0 ) continue; if( i + mtemp->xts2*AR2_TEMP_SCALE >= xsize ) break; if( mfImage[j*xsize + i] ) continue; // Skip pixels already matched. mfImage[j*xsize + i] = 1; // Mark this pixel as matched. if( ar2GetBestMatchingSubFine(img, xsize, ysize, pixFormat, mtemp, i, j, &wval) < 0 ) { continue; } ret = 0; updateCandidate(i, j, wval, &keep_num, cx, cy, cval); } } } // Third pass. Determine best candidate. wval2 = 0; ret = -1; #if 0 for(l = 0; l < keep_num; l++) { for( j = cy[l] - SKIP_INTERVAL; j <= cy[l] + SKIP_INTERVAL; j++ ) { if( j - mtemp->yts1*AR2_TEMP_SCALE < 0 ) continue; if( j + mtemp->yts2*AR2_TEMP_SCALE >= ysize ) break; for( i = cx[l] - SKIP_INTERVAL; i <= cx[l] + SKIP_INTERVAL; i++ ) { if( i - mtemp->xts1*AR2_TEMP_SCALE < 0 ) continue; if( i + mtemp->xts2*AR2_TEMP_SCALE >= xsize ) break; if( ar2GetBestMatchingSubFine(img, xsize, ysize, pixFormat, mtemp, i, j, &wval) < 0 ) { continue; } if( wval > wval2 ) { *bx = i; *by = j; wval2 = wval; *val = (float)wval / 10000.0f; ret = 0; } } } } #else arMalloc( subImage1, ARUint32, ( (mtemp->xsize + 1)*AR2_TEMP_SCALE + (SKIP_INTERVAL*2)) * ((mtemp->ysize + 1)*AR2_TEMP_SCALE + (SKIP_INTERVAL*2) ) ); arMalloc( subImage2, ARUint32, ( (mtemp->xsize + 1)*AR2_TEMP_SCALE + (SKIP_INTERVAL*2)) * ((mtemp->ysize + 1)*AR2_TEMP_SCALE + (SKIP_INTERVAL*2) ) ); for(l = 0; l < keep_num; l++) { if( mtemp->validNum != mtemp->xsize*mtemp->ysize || (pixFormat != AR_PIXEL_FORMAT_MONO && pixFormat != AR_PIXEL_FORMAT_420v && pixFormat != AR_PIXEL_FORMAT_420f && pixFormat != AR_PIXEL_FORMAT_NV21) || cy[l] - SKIP_INTERVAL - mtemp->yts1*AR2_TEMP_SCALE < 0 || cy[l] + SKIP_INTERVAL + mtemp->yts2*AR2_TEMP_SCALE >= ysize || cx[l] - SKIP_INTERVAL - mtemp->xts1*AR2_TEMP_SCALE < 0 || cx[l] + SKIP_INTERVAL + mtemp->xts2*AR2_TEMP_SCALE >= xsize ) { for( j = cy[l] - SKIP_INTERVAL; j <= cy[l] + SKIP_INTERVAL; j++ ) { if( j - mtemp->yts1*AR2_TEMP_SCALE < 0 ) continue; if( j + mtemp->yts2*AR2_TEMP_SCALE >= ysize ) break; for( i = cx[l] - SKIP_INTERVAL; i <= cx[l] + SKIP_INTERVAL; i++ ) { if( i - mtemp->xts1*AR2_TEMP_SCALE < 0 ) continue; if( i + mtemp->xts2*AR2_TEMP_SCALE >= xsize ) break; if( ar2GetBestMatchingSubFine(img, xsize, ysize, pixFormat, mtemp, i, j, &wval) < 0 ) { continue; } if( wval > wval2 ) { *bx = i; *by = j; wval2 = wval; *val = (float)wval / 10000.0f; ret = 0; } } } } else { // Optimised case for mono incoming image. int px1 = (mtemp->xsize + 1)*AR2_TEMP_SCALE + (SKIP_INTERVAL*2); int py1 = mtemp->ysize*AR2_TEMP_SCALE + (SKIP_INTERVAL*2); int px2 = cx[l] - SKIP_INTERVAL - mtemp->xts1*AR2_TEMP_SCALE; int py2 = cy[l] - SKIP_INTERVAL - mtemp->yts1*AR2_TEMP_SCALE; int px3 = px1 - AR2_TEMP_SCALE; p11 = p12 = subImage1; p21 = p22 = subImage2; for( j = 0; j < AR2_TEMP_SCALE*px1; j++ ) { *(p11++) = 0; *(p21++) = 0; } p3 = p4 = &img[py2*xsize + px2]; for( j = 0; j < py1; j++ ) { for( i = 0; i < AR2_TEMP_SCALE; i++ ) { *(p11++) = 0; *(p21++) = 0; subImage11[i] = 0; subImage21[i] = 0; } p12 += AR2_TEMP_SCALE; p22 += AR2_TEMP_SCALE; for( i = 0; i < px3; i++) { w1 = subImage11[i%AR2_TEMP_SCALE] += (*p3); w2 = subImage21[i%AR2_TEMP_SCALE] += (*p3)*(*p3); p3++; *(p11++) = w1 + *(p12++); *(p21++) = w2 + *(p22++); } p3 = p4 += xsize; } for( j = 0; j < SKIP_INTERVAL*2 + 1; j++ ) { for( i = 0; i < SKIP_INTERVAL*2 + 1; i++) { if( ar2GetBestMatchingSubFineOpt(img, xsize, ysize, px2 + i, py2 + j, mtemp, subImage1, subImage2, i + AR2_TEMP_SCALE, j + AR2_TEMP_SCALE, &wval) < 0 ) { continue; } if( wval > wval2 ) { *bx = cx[l] - SKIP_INTERVAL + i; *by = cy[l] - SKIP_INTERVAL + j; wval2 = wval; *val = (float)wval / 10000; ret = 0; } } } } } free(subImage1); free(subImage2); #endif return ret; }
static AR2ImageSetT *ar2ReadImageSetOld( FILE *fp ) { AR2ImageSetT *imageSet; int i, k; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE int j, l; #endif arMalloc( imageSet, AR2ImageSetT, 1 ); if( fread(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1 || imageSet->num <= 0) { ARLOGe("Error reading imageSet.\n"); goto bail; } arMalloc( imageSet->scale, AR2ImageT*, imageSet->num ); for( i = 0; i < imageSet->num; i++ ) { arMalloc( imageSet->scale[i], AR2ImageT, 1 ); } for( i = 0; i < imageSet->num; i++ ) { if( fread(&(imageSet->scale[i]->xsize), sizeof(imageSet->scale[i]->xsize), 1, fp) != 1 ) { for( k = 0; k < i; k++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( l = 0; l < AR2_BLUR_IMAGE_MAX; l++) free(imageSet->scale[k]->imgBWBlur[l]); #else free(imageSet->scale[k]->imgBW); #endif } for( k = 0; k < imageSet->num; k++ ) free(imageSet->scale[k]); goto bail1; } if( fread(&(imageSet->scale[i]->ysize), sizeof(imageSet->scale[i]->ysize), 1, fp) != 1 ) { for( k = 0; k < i; k++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( l = 0; l < AR2_BLUR_IMAGE_MAX; l++) free(imageSet->scale[k]->imgBWBlur[l]); #else free(imageSet->scale[k]->imgBW); #endif } for( k = 0; k < imageSet->num; k++ ) free(imageSet->scale[k]); goto bail1; } if( fread(&(imageSet->scale[i]->dpi), sizeof(imageSet->scale[i]->dpi), 1, fp) != 1 ) { for( k = 0; k < i; k++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( l = 0; l < AR2_BLUR_IMAGE_MAX; l++) free(imageSet->scale[k]->imgBWBlur[l]); #else free(imageSet->scale[k]->imgBW); #endif } for( k = 0; k < imageSet->num; k++ ) free(imageSet->scale[k]); goto bail1; } #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( j = 0; j < AR2_BLUR_IMAGE_MAX; j++ ) { arMalloc( imageSet->scale[i]->imgBWBlur[j], ARUint8, imageSet->scale[i]->xsize * imageSet->scale[i]->ysize); } #else arMalloc( imageSet->scale[i]->imgBW, ARUint8, imageSet->scale[i]->xsize * imageSet->scale[i]->ysize); #endif #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( j = 0; j < AR2_BLUR_IMAGE_MAX; j++ ) { if( fread(imageSet->scale[i]->imgBWBlur[j], sizeof(ARUint8), imageSet->scale[i]->xsize * imageSet->scale[i]->ysize, fp) != imageSet->scale[i]->xsize * imageSet->scale[i]->ysize ) { for( k = 0; k <= i; k++ ) { for( l = 0; l < AR2_BLUR_IMAGE_MAX; l++) free(imageSet->scale[k]->imgBWBlur[l]); } for( k = 0; k < imageSet->num; k++ ) free(imageSet->scale[k]); goto bail1; } } #else if( fread(imageSet->scale[i]->imgBW, sizeof(ARUint8), imageSet->scale[i]->xsize * imageSet->scale[i]->ysize, fp) != imageSet->scale[i]->xsize * imageSet->scale[i]->ysize ) { for( k = 0; k <= i; k++ ) { free(imageSet->scale[k]->imgBW); } for( k = 0; k < imageSet->num; k++ ) free(imageSet->scale[k]); goto bail1; } #endif } fclose(fp); return imageSet; bail1: free(imageSet->scale); bail: free(imageSet); fclose(fp); return NULL; }
ARFloat Tracker::rppMultiGetTransMat(ARMarkerInfo *marker_info, int marker_num, ARMultiMarkerInfoT *config) { rpp_float err = 1e+20; rpp_mat R, R_init; rpp_vec t; std::map<int, int> marker_id_freq; for (int i = 0; i < marker_num; i++) { const int m_patt_id = marker_info[i].id; if (m_patt_id >= 0) { std::map<int, int>::iterator iter = marker_id_freq.find(m_patt_id); if (iter == marker_id_freq.end()) { marker_id_freq.insert(std::make_pair(int(m_patt_id), 1)); } else { ((*iter).second)++; } } } std::deque<std::pair<int, int> > config_patt_id; for (int j = 0; j < config->marker_num; j++) config_patt_id.push_back(std::make_pair(int(j), int(config->marker[j].patt_id))); std::map<int, int> m2c_idx; for (int m = 0; m < marker_num; m++) { const int m_patt_id = marker_info[m].id; bool ignore_marker = (m_patt_id < 0); std::map<int, int>::iterator m_iter = marker_id_freq.find(m_patt_id); if (m_iter != marker_id_freq.end()) ignore_marker |= ((*m_iter).second > 1); if (!ignore_marker) { std::deque<std::pair<int, int> >::iterator c_iter = config_patt_id.begin(); if (c_iter != config_patt_id.end()) do { const int patt_id = (*c_iter).second; if (marker_info[m].id == patt_id) { m2c_idx.insert(std::make_pair(int(m), int((*c_iter).first))); config_patt_id.erase(c_iter); c_iter = config_patt_id.end(); continue; } else { c_iter++; } } while (c_iter != config_patt_id.end()); } } // ---------------------------------------------------------------------- const unsigned int n_markers = (unsigned int) m2c_idx.size(); const unsigned int n_pts = 4 * n_markers; if (n_markers == 0) return (-1); rpp_vec *ppos2d = NULL, *ppos3d = NULL; arMalloc( ppos2d, rpp_vec, n_pts); arMalloc( ppos3d, rpp_vec, n_pts); memset(ppos2d, 0, sizeof(rpp_vec) * n_pts); memset(ppos3d, 0, sizeof(rpp_vec) * n_pts); const rpp_float iprts_z = 1; int p = 0; for (std::map<int, int>::iterator iter = m2c_idx.begin(); iter != m2c_idx.end(); iter++) { const int m = (*iter).first; const int c = (*iter).second; const int dir = marker_info[m].dir; const int v_idx[4] = { (4 - dir) % 4, (5 - dir) % 4, (6 - dir) % 4, (7 - dir) % 4 }; for (int i = 0; i < 4; i++) for (int j = 0; j < 3; j++) { ppos2d[p + i][j] = (rpp_float) (j == 2 ? iprts_z : marker_info[m].vertex[v_idx[i]][j]); ppos3d[p + i][j] = (rpp_float) config->marker[c].pos3d[i][j]; } p += 4; } const rpp_float cc[2] = { arCamera->mat[0][2], arCamera->mat[1][2] }; const rpp_float fc[2] = { arCamera->mat[0][0], arCamera->mat[1][1] }; robustPlanarPose(err, R, t, cc, fc, ppos3d, ppos2d, n_pts, R_init, true, 0, 0, 0); for (int k = 0; k < 3; k++) { config->trans[k][3] = (ARFloat) t[k]; for (int j = 0; j < 3; j++) config->trans[k][j] = (ARFloat) R[k][j]; } if (ppos2d != NULL) free(ppos2d); if (ppos3d != NULL) free(ppos3d); if (err > 1e+10) return (-1); // an actual error has occurred in robustPlanarPose() return (ARFloat(err)); // NOTE: err is a real number from the interval [0,1e+10] }
AR2VideoParamT *ar2VideoOpen( char *config_in ) { // Warning, this function leaks badly when an error occurs. AR2VideoParamT *vid; struct v4l2_capability vd; struct v4l2_format fmt; struct v4l2_input ipt; struct v4l2_requestbuffers req; char *config, *a, line[256]; int value; /* If no config string is supplied, we should use the environment variable, otherwise set a sane default */ if (!config_in || !(config_in[0])) { /* None suppplied, lets see if the user supplied one from the shell */ char *envconf = getenv ("ARTOOLKIT_CONFIG"); if (envconf && envconf[0]) { config = envconf; printf ("Using config string from environment [%s].\n", envconf); } else { config = NULL; printf ("No video config string supplied, using defaults.\n"); } } else { config = config_in; printf ("Using supplied video config string [%s].\n", config_in); } arMalloc( vid, AR2VideoParamT, 1 ); strcpy( vid->dev, DEFAULT_VIDEO_DEVICE ); vid->width = DEFAULT_VIDEO_WIDTH; vid->height = DEFAULT_VIDEO_HEIGHT; vid->palette = V4L2_PIX_FMT_YUYV; /* palette format */ vid->contrast = -1; vid->brightness = -1; vid->saturation = -1; vid->hue = -1; vid->gamma = -1; vid->exposure = -1; vid->gain = -1; vid->mode = V4L2_STD_NTSC; //vid->debug = 0; vid->debug = 1; vid->channel = 0; vid->videoBuffer=NULL; a = config; if( a != NULL) { for(;;) { while( *a == ' ' || *a == '\t' ) a++; if( *a == '\0' ) break; if( strncmp( a, "-dev=", 5 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[5], "%s", vid->dev ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-channel=", 9 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[9], "%d", &vid->channel ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-width=", 7 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[7], "%d", &vid->width ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-height=", 8 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[8], "%d", &vid->height ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-palette=", 9 ) == 0 ) { if( strncmp( &a[9], "GREY", 4) == 0 ) { vid->palette = V4L2_PIX_FMT_GREY; } else if( strncmp( &a[9], "HI240", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_HI240; } else if( strncmp( &a[9], "RGB565", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_RGB565; } else if( strncmp( &a[9], "RGB555", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_RGB555; } else if( strncmp( &a[9], "BGR24", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_BGR24; } else if( strncmp( &a[9], "BGR32", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_BGR32; } else if( strncmp( &a[9], "YUYV", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_YUYV; } else if( strncmp( &a[9], "UYVY", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_UYVY; } else if( strncmp( &a[9], "Y41P", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_Y41P; } else if( strncmp( &a[9], "YUV422P", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_YUV422P; } else if( strncmp( &a[9], "YUV411P", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_YUV411P; } else if( strncmp( &a[9], "YVU420", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_YVU420; } else if( strncmp( &a[9], "YVU410", 3) == 0 ) { vid->palette = V4L2_PIX_FMT_YVU410; } } else if( strncmp( a, "-contrast=", 10 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[10], "%d", &vid->contrast ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-brightness=", 12 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[12], "%d", &vid->brightness ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-saturation=", 12 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[12], "%d", &vid->saturation ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-hue=", 5 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[5], "%d", &vid->hue ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-gamma=", 7 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[7], "%d", &vid->gamma ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-exposure=", 10 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[10], "%d", &vid->exposure ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-gain=", 6 ) == 0 ) { sscanf( a, "%s", line ); if( sscanf( &line[6], "%d", &vid->gain ) == 0 ) { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-mode=", 6 ) == 0 ) { if( strncmp( &a[6], "PAL", 3 ) == 0 ) vid->mode = V4L2_STD_PAL; else if( strncmp( &a[6], "NTSC", 4 ) == 0 ) vid->mode = V4L2_STD_NTSC; else if( strncmp( &a[6], "SECAM", 5 ) == 0 ) vid->mode = V4L2_STD_SECAM; else { ar2VideoDispOption(); free( vid ); return 0; } } else if( strncmp( a, "-debug", 6 ) == 0 ) { vid->debug = 1; } else { ar2VideoDispOption(); free( vid ); return 0; } while( *a != ' ' && *a != '\t' && *a != '\0') a++; } } vid->fd = open(vid->dev, O_RDWR);// O_RDONLY ? if(vid->fd < 0){ printf("video device (%s) open failed\n",vid->dev); free( vid ); return 0; } if(ioctl(vid->fd,VIDIOC_QUERYCAP,&vd) < 0){ printf("ioctl failed\n"); free( vid ); return 0; } if (!(vd.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "Device does not support streaming i/o\n"); } if(vid->debug ) { printf("=== debug info ===\n"); printf(" vd.driver = %s\n",vd.driver); printf(" vd.card = %s\n",vd.card); printf(" vd.bus_info = %s\n",vd.bus_info); printf(" vd.version = %d\n",vd.version); printf(" vd.capabilities = %d\n",vd.capabilities); } memset(&fmt, 0, sizeof(fmt)); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; #if 1 fmt.fmt.pix.width = vid->width; fmt.fmt.pix.height = vid->height; fmt.fmt.pix.pixelformat = vid->palette; fmt.fmt.pix.field = V4L2_FIELD_NONE; #else fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; #endif if (ioctl (vid->fd, VIDIOC_S_FMT, &fmt) < 0) { close(vid->fd); free( vid ); printf("ar2VideoOpen: Error setting video format (%d)\n", errno); return 0; } // Get actual camera settings vid->palette = fmt.fmt.pix.pixelformat; vid->width = fmt.fmt.pix.width; vid->height = fmt.fmt.pix.height; if (vid->debug) { printf(" Width: %d\n", fmt.fmt.pix.width); printf(" Height: %d\n", fmt.fmt.pix.height); printPalette(fmt.fmt.pix.pixelformat); } memset(&ipt, 0, sizeof(ipt)); ipt.index = vid->channel; ipt.std = vid->mode; if(ioctl(vid->fd,VIDIOC_ENUMINPUT,&ipt) < 0) { printf("arVideoOpen: Error querying input device type\n"); close(vid->fd); free( vid ); return 0; } if (vid->debug) { if (ipt.type == V4L2_INPUT_TYPE_TUNER) { printf(" Type: Tuner\n"); } if (ipt.type == V4L2_INPUT_TYPE_CAMERA) { printf(" Type: Camera\n"); } } // Set channel if (ioctl(vid->fd, VIDIOC_S_INPUT, &ipt)) { printf("arVideoOpen: Error setting video input\n"); close(vid->fd); free( vid ); return 0; } // Attempt to set some camera controls setControl(vid->fd, V4L2_CID_BRIGHTNESS, vid->brightness); setControl(vid->fd, V4L2_CID_CONTRAST, vid->contrast); setControl(vid->fd, V4L2_CID_SATURATION, vid->saturation); setControl(vid->fd, V4L2_CID_HUE, vid->hue); setControl(vid->fd, V4L2_CID_GAMMA, vid->gamma); setControl(vid->fd, V4L2_CID_EXPOSURE, vid->exposure); setControl(vid->fd, V4L2_CID_GAIN, vid->gain); // Print out current control values if(vid->debug ) { if (!getControl(vid->fd, V4L2_CID_BRIGHTNESS, &value)) { printf("Brightness: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_CONTRAST, &value)) { printf("Contrast: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_SATURATION, &value)) { printf("Saturation: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_HUE, &value)) { printf("Hue: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_GAMMA, &value)) { printf("Gamma: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_EXPOSURE, &value)) { printf("Exposure: %d\n", value); } if (!getControl(vid->fd, V4L2_CID_GAIN, &value)) { printf("Gain: %d\n", value); } } // if (vid->palette==V4L2_PIX_FMT_YUYV) arMalloc( vid->videoBuffer, ARUint8, vid->width*vid->height*3 ); // Setup memory mapping memset(&req, 0, sizeof(req)); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (ioctl(vid->fd, VIDIOC_REQBUFS, &req)) { printf("Error calling VIDIOC_REQBUFS\n"); close(vid->fd); if(vid->videoBuffer!=NULL) free(vid->videoBuffer); free( vid ); return 0; } if (req.count < 2) { printf("this device can not be supported by libARvideo.\n"); printf("(req.count < 2)\n"); close(vid->fd); if(vid->videoBuffer!=NULL) free(vid->videoBuffer); free( vid ); return 0; } vid->buffers = (struct buffer*)calloc(req.count , sizeof(*vid->buffers)); if (vid->buffers == NULL ) { printf("ar2VideoOpen: Error allocating buffer memory\n"); close(vid->fd); if(vid->videoBuffer!=NULL) free(vid->videoBuffer); free( vid ); return 0; } for (vid->n_buffers = 0; vid->n_buffers < req.count; ++vid->n_buffers) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = vid->n_buffers; if (ioctl (vid->fd, VIDIOC_QUERYBUF, &buf)) { printf ("error VIDIOC_QUERYBUF\n"); close(vid->fd); if(vid->videoBuffer!=NULL) free(vid->videoBuffer); free( vid ); return 0; } vid->buffers[vid->n_buffers].length = buf.length; vid->buffers[vid->n_buffers].start = mmap (NULL /* start anywhere */, buf.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, vid->fd, buf.m.offset); if (MAP_FAILED == vid->buffers[vid->n_buffers].start) { printf("Error mmap\n"); close(vid->fd); if(vid->videoBuffer!=NULL) free(vid->videoBuffer); free( vid ); return 0; } } vid->video_cont_num = -1; return vid; }
ARMultiMarkerInfoT *arMultiReadConfigFile( const char *filename, ARPattHandle *pattHandle ) { FILE *fp; ARMultiEachMarkerInfoT *marker; ARMultiMarkerInfoT *marker_info; ARdouble wpos3d[4][2]; char buf[256], pattPath[2048], dummy; int num; int patt_type = 0; int i, j; if ((fp = fopen(filename, "r")) == NULL) { ARLOGe("Error: unable to open multimarker config file '%s'.\n", filename); ARLOGperror(NULL); return NULL; } get_buff(buf, 256, fp); if( sscanf(buf, "%d", &num) != 1 ) { ARLOGe("Error processing multimarker config file '%s': First line must be number of marker configs to read.\n", filename); fclose(fp); return NULL; } ARLOGd("Reading %d markers from multimarker file '%s'\n", num, filename); arMalloc(marker, ARMultiEachMarkerInfoT, num); for( i = 0; i < num; i++ ) { get_buff(buf, 256, fp); if (sscanf(buf, #if defined(__LP64__) && !defined(__APPLE__) "%lu%c", #else "%llu%c", #endif &(marker[i].globalID), &dummy) != 1) { // Try first as matrix code. if (!pattHandle) { ARLOGe("Error processing multimarker config file '%s': pattern '%s' specified in multimarker configuration while in barcode-only mode.\n", filename, buf); goto bail; } if (!arUtilGetDirectoryNameFromPath(pattPath, filename, sizeof(pattPath), 1)) { // Get directory prefix. ARLOGe("Error processing multimarker config file '%s': Unable to determine directory name.\n", filename); goto bail; } strncat(pattPath, buf, sizeof(pattPath) - strlen(pattPath) - 1); // Add name of file to open. if ((marker[i].patt_id = arPattLoad(pattHandle, pattPath)) < 0) { ARLOGe("Error processing multimarker config file '%s': Unable to load pattern '%s'.\n", filename, pattPath); goto bail; } marker[i].patt_type = AR_MULTI_PATTERN_TYPE_TEMPLATE; patt_type |= 0x01; } else { if ((marker[i].globalID & 0xffff8000ULL) == 0ULL) marker[i].patt_id = (int)(marker[i].globalID & 0x00007fffULL); // If upper 33 bits are zero, use lower 31 bits as regular matrix code. else marker[i].patt_id = 0; ARLOGd("Marker %3d is matrix code %llu.\n", i + 1, marker[i].globalID); marker[i].patt_type = AR_MULTI_PATTERN_TYPE_MATRIX; patt_type |= 0x02; } get_buff(buf, 256, fp); if( sscanf(buf, #ifdef ARDOUBLE_IS_FLOAT "%f", #else "%lf", #endif &marker[i].width) != 1 ) { ARLOGe("Error processing multimarker config file '%s', marker definition %3d: First line must be pattern width.\n", filename, i + 1); goto bail; } j = 0; get_buff(buf, 256, fp); if( sscanf(buf, #ifdef ARDOUBLE_IS_FLOAT "%f %f %f %f", #else "%lf %lf %lf %lf", #endif &marker[i].trans[j][0], &marker[i].trans[j][1], &marker[i].trans[j][2], &marker[i].trans[j][3]) != 4 ) { // Perhaps this is an old ARToolKit v2.x multimarker file? // If so, then the next line is two values (center) and should be skipped. float t1, t2; if( sscanf(buf, "%f %f", &t1, &t2) != 2 ) { ARLOGe("Error processing multimarker config file '%s', marker definition %3d: Lines 2 - 4 must be marker transform.\n", filename, i + 1); goto bail; } } else j++; do { get_buff(buf, 256, fp); if( sscanf(buf, #ifdef ARDOUBLE_IS_FLOAT "%f %f %f %f", #else "%lf %lf %lf %lf", #endif &marker[i].trans[j][0], &marker[i].trans[j][1], &marker[i].trans[j][2], &marker[i].trans[j][3]) != 4 ) { ARLOGe("Error processing multimarker config file '%s', marker definition %3d: Lines 2 - 4 must be marker transform.\n", filename, i + 1); goto bail; } j++; } while (j < 3); arUtilMatInv( (const ARdouble (*)[4])marker[i].trans, marker[i].itrans ); wpos3d[0][0] = -marker[i].width/2.0; wpos3d[0][1] = marker[i].width/2.0; wpos3d[1][0] = marker[i].width/2.0; wpos3d[1][1] = marker[i].width/2.0; wpos3d[2][0] = marker[i].width/2.0; wpos3d[2][1] = -marker[i].width/2.0; wpos3d[3][0] = -marker[i].width/2.0; wpos3d[3][1] = -marker[i].width/2.0; for( j = 0; j < 4; j++ ) { marker[i].pos3d[j][0] = marker[i].trans[0][0] * wpos3d[j][0] + marker[i].trans[0][1] * wpos3d[j][1] + marker[i].trans[0][3]; marker[i].pos3d[j][1] = marker[i].trans[1][0] * wpos3d[j][0] + marker[i].trans[1][1] * wpos3d[j][1] + marker[i].trans[1][3]; marker[i].pos3d[j][2] = marker[i].trans[2][0] * wpos3d[j][0] + marker[i].trans[2][1] * wpos3d[j][1] + marker[i].trans[2][3]; } } fclose(fp); arMalloc(marker_info, ARMultiMarkerInfoT, 1); marker_info->marker = marker; marker_info->marker_num = num; marker_info->prevF = 0; if( (patt_type & 0x03) == 0x03 ) marker_info->patt_type = AR_MULTI_PATTERN_DETECTION_MODE_TEMPLATE_AND_MATRIX; else if( patt_type & 0x01 ) marker_info->patt_type = AR_MULTI_PATTERN_DETECTION_MODE_TEMPLATE; else marker_info->patt_type = AR_MULTI_PATTERN_DETECTION_MODE_MATRIX; marker_info->cfPattCutoff = AR_MULTI_CONFIDENCE_PATTERN_CUTOFF_DEFAULT; marker_info->cfMatrixCutoff = AR_MULTI_CONFIDENCE_MATRIX_CUTOFF_DEFAULT; return marker_info; bail: fclose(fp); free(marker); return NULL; }
ARMultiMarkerInfoT* Tracker::arMultiReadConfigFile(const char *filename) { FILE *fp; ARMultiEachMarkerInfoT *marker; ARMultiMarkerInfoT *marker_info; ARFloat wpos3d[4][2]; char buf[256], buf1[256]; int num; int i, j; setlocale(LC_NUMERIC, "C"); if ((fp = fopen(filename, "r")) == NULL) return NULL; get_buff(buf, 256, fp); if (sscanf(buf, "%d", &num) != 1) { fclose(fp); return NULL; } arMalloc(marker, ARMultiEachMarkerInfoT, num); for (i = 0; i < num; i++) { get_buff(buf, 256, fp); if (sscanf(buf, "%s", buf1) != 1) { fclose(fp); free(marker); return NULL; } // Added by Daniel: if the markername is an integer number // we directly interprete this as the marker id (used for // id-based markers) if (isNumber(buf1)) marker[i].patt_id = atoi(buf1); else if ((marker[i].patt_id = arLoadPatt(buf1)) < 0) { fclose(fp); free(marker); return NULL; } get_buff(buf, 256, fp); #ifdef _USE_DOUBLE_ if( sscanf(buf, "%lf", &marker[i].width) != 1 ) { #else if (sscanf(buf, "%f", &marker[i].width) != 1) { #endif fclose(fp); free(marker); return NULL; } get_buff(buf, 256, fp); #ifdef _USE_DOUBLE_ if( sscanf(buf, "%lf %lf", &marker[i].center[0], &marker[i].center[1]) != 2 ) { #else if (sscanf(buf, "%f %f", &marker[i].center[0], &marker[i].center[1]) != 2) { #endif fclose(fp); free(marker); return NULL; } for (j = 0; j < 3; j++) { get_buff(buf, 256, fp); if (sscanf(buf, #ifdef _USE_DOUBLE_ "%lf %lf %lf %lf", #else "%f %f %f %f", #endif &marker[i].trans[j][0], &marker[i].trans[j][1], &marker[i].trans[j][2], &marker[i].trans[j][3]) != 4) { fclose(fp); free(marker); return NULL; } } arUtilMatInv(marker[i].trans, marker[i].itrans); wpos3d[0][0] = marker[i].center[0] - marker[i].width * 0.5f; wpos3d[0][1] = marker[i].center[1] + marker[i].width * 0.5f; wpos3d[1][0] = marker[i].center[0] + marker[i].width * 0.5f; wpos3d[1][1] = marker[i].center[1] + marker[i].width * 0.5f; wpos3d[2][0] = marker[i].center[0] + marker[i].width * 0.5f; wpos3d[2][1] = marker[i].center[1] - marker[i].width * 0.5f; wpos3d[3][0] = marker[i].center[0] - marker[i].width * 0.5f; wpos3d[3][1] = marker[i].center[1] - marker[i].width * 0.5f; for (j = 0; j < 4; j++) { marker[i].pos3d[j][0] = marker[i].trans[0][0] * wpos3d[j][0] + marker[i].trans[0][1] * wpos3d[j][1] + marker[i].trans[0][3]; marker[i].pos3d[j][1] = marker[i].trans[1][0] * wpos3d[j][0] + marker[i].trans[1][1] * wpos3d[j][1] + marker[i].trans[1][3]; marker[i].pos3d[j][2] = marker[i].trans[2][0] * wpos3d[j][0] + marker[i].trans[2][1] * wpos3d[j][1] + marker[i].trans[2][3]; } } fclose(fp); setlocale(LC_NUMERIC, "C"); marker_info = (ARMultiMarkerInfoT *) malloc(sizeof(ARMultiMarkerInfoT)); if (marker_info == NULL) { free(marker); return NULL; } marker_info->marker = marker; marker_info->marker_num = num; marker_info->prevF = 0; return marker_info; } } // namespace ARToolKitPlus
AR2MarkerSetT* ar2ReadMarkerSet(char *filename, char *ext, ARPattHandle *pattHandle) { // COVHI10394 FILE *fp = NULL; AR2MarkerSetT *markerSet = NULL; char buf[256], buf1[256] /*, buf2[256]*/; int i, j; char namebuf[512]; sprintf(namebuf, "%s.%s", filename, ext); if ((fp = fopen(namebuf, "r")) == NULL) return NULL; arMalloc(markerSet, AR2MarkerSetT, 1); if (get_buff(buf, 256, fp) == NULL) { free(markerSet); markerSet = NULL; goto done; } if (sscanf(buf, "%d", &(markerSet->num)) != 1) { free(markerSet); markerSet = NULL; goto done; } if (markerSet->num <= 0) { free(markerSet); markerSet = NULL; goto done; } arMalloc(markerSet->marker, AR2MarkerT, markerSet->num); for (i = 0; i < markerSet->num; i++) { if (get_buff(buf, 256, fp) == NULL) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } if (sscanf(buf, "%s", buf1) != 1) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } // ar2UtilDivideExt(buf1, buf, buf2); if ((markerSet->marker[i].pattId = arPattLoad(pattHandle, buf1)) < 0) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } if (get_buff(buf, 256, fp) == NULL) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } if (sscanf(buf, "%f", &(markerSet->marker[i].width)) != 1) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } for (j = 0; j < 3; j++) { if (get_buff(buf, 256, fp) == NULL) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } if (sscanf(buf, "%f %f %f %f", &(markerSet->marker[i].transI2M[j][0]), &(markerSet->marker[i].transI2M[j][1]), &(markerSet->marker[i].transI2M[j][2]), &(markerSet->marker[i].transI2M[j][3])) != 4) { free(markerSet->marker); free(markerSet); markerSet = NULL; goto done; } } } done: fclose(fp); return markerSet; }
static AR2ImageT *ar2GenImageLayer2( AR2ImageT *src, float dpi ) { AR2ImageT *dst; ARUint8 *p1, *p2; int wx, wy; int sx, sy, ex, ey; int ii, jj, iii, jjj; int co, value; wx = (int)lroundf(src->xsize * dpi / src->dpi); wy = (int)lroundf(src->ysize * dpi / src->dpi); arMalloc( dst, AR2ImageT, 1 ); dst->xsize = wx; dst->ysize = wy; dst->dpi = dpi; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( int i = 0; i < AR2_BLUR_IMAGE_MAX; i++ ) { arMalloc( dst->imgBWBlur[i], ARUint8, wx*wy ); } p2 = dst->imgBWBlue[0]; #else arMalloc( dst->imgBW, ARUint8, wx*wy ); p2 = dst->imgBW; #endif for( jj = 0; jj < wy; jj++ ) { sy = (int)lroundf( jj * src->dpi / dpi); ey = (int)lroundf((jj+1) * src->dpi / dpi) - 1; if( ey >= src->ysize ) ey = src->ysize - 1; for( ii = 0; ii < wx; ii++ ) { sx = (int)lroundf( ii * src->dpi / dpi); ex = (int)lroundf((ii+1) * src->dpi / dpi) - 1; if( ex >= src->xsize ) ex = src->xsize - 1; co = value = 0; for( jjj = sy; jjj <= ey; jjj++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE p1 = &(src->imgBWBlur[0][jjj*src->xsize+sx]); #else p1 = &(src->imgBW[jjj*src->xsize+sx]); #endif for( iii = sx; iii <= ex; iii++ ) { value += *(p1++); co++; } } *(p2++) = value / co; } } #if AR2_CAPABLE_ADAPTIVE_TEMPLATE defocus_image( dst->imageBWBlur[0], wx, wy, 3 ); for( int i = 1; i < AR2_BLUR_IMAGE_MAX; i++ ) { p1 = dst->imgBWBlue[0]; p2 = dst->imgBWBlue[i]; for( int j = 0; j < wx*wy; j++ ) *(p2++) = *(p1++); defocus_image( dst->imgBWBlur[i], wx, wy, 2 ); } #else //defocus_image( dst->imgBW, wx, wy, 3 ); #endif return dst; }
static void init(int argc, char *argv[]) { char *vconfL = NULL; char *vconfR = NULL; char *cparaL = NULL; char *cparaR = NULL; char cparaLDefault[] = "Data/cparaL.dat"; char cparaRDefault[] = "Data/cparaR.dat"; ARParam wparam; ARGViewport viewport; int i, j; int gotTwoPartOption; int screenWidth, screenHeight, screenMargin; double wscalef, hscalef, scalef; chessboardCornerNumX = 0; chessboardCornerNumY = 0; calibImageNum = 0; patternWidth = 0.0f; i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconfL") == 0) { i++; vconfL = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--vconfR") == 0) { i++; vconfR = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cparaL") == 0) { i++; cparaL = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cparaR") == 0) { i++; cparaR = argv[i]; gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if( strncmp(argv[i], "-cornerx=", 9) == 0 ) { if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumX) != 1 ) usage(argv[0]); if( chessboardCornerNumX <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-cornery=", 9) == 0 ) { if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumY) != 1 ) usage(argv[0]); if( chessboardCornerNumY <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-imagenum=", 10) == 0 ) { if( sscanf(&(argv[i][10]), "%d", &calibImageNum) != 1 ) usage(argv[0]); if( calibImageNum <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-pattwidth=", 11) == 0 ) { if( sscanf(&(argv[i][11]), "%f", &patternWidth) != 1 ) usage(argv[0]); if( patternWidth <= 0 ) usage(argv[0]); } else if( strncmp(argv[i], "-cparaL=", 8) == 0 ) { cparaL = &(argv[i][8]); } else if( strncmp(argv[i], "-cparaR=", 8) == 0 ) { cparaR = &(argv[i][8]); } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } if( chessboardCornerNumX == 0 ) chessboardCornerNumX = CHESSBOARD_CORNER_NUM_X; if( chessboardCornerNumY == 0 ) chessboardCornerNumY = CHESSBOARD_CORNER_NUM_Y; if( calibImageNum == 0 ) calibImageNum = CALIB_IMAGE_NUM; if( patternWidth == 0.0f ) patternWidth = (float)CHESSBOARD_PATTERN_WIDTH; if (!cparaL) cparaL = cparaLDefault; if (!cparaR) cparaR = cparaRDefault; ARLOG("CHESSBOARD_CORNER_NUM_X = %d\n", chessboardCornerNumX); ARLOG("CHESSBOARD_CORNER_NUM_Y = %d\n", chessboardCornerNumY); ARLOG("CHESSBOARD_PATTERN_WIDTH = %f\n", patternWidth); ARLOG("CALIB_IMAGE_NUM = %d\n", calibImageNum); ARLOG("Video parameter Left : %s\n", vconfL); ARLOG("Video parameter Right: %s\n", vconfR); ARLOG("Camera parameter Left : %s\n", cparaL); ARLOG("Camera parameter Right: %s\n", cparaR); if( (vidL=ar2VideoOpen(vconfL)) == NULL ) { ARLOGe("Cannot found the first camera.\n"); exit(0); } if( (vidR=ar2VideoOpen(vconfR)) == NULL ) { ARLOGe("Cannot found the second camera.\n"); exit(0); } if( ar2VideoGetSize(vidL, &xsizeL, &ysizeL) < 0 ) exit(0); if( ar2VideoGetSize(vidR, &xsizeR, &ysizeR) < 0 ) exit(0); if( (pixFormatL=ar2VideoGetPixelFormat(vidL)) < 0 ) exit(0); if( (pixFormatR=ar2VideoGetPixelFormat(vidR)) < 0 ) exit(0); ARLOG("Image size for the left camera = (%d,%d)\n", xsizeL, ysizeL); ARLOG("Image size for the right camera = (%d,%d)\n", xsizeR, ysizeR); if( arParamLoad(cparaL, 1, &wparam) < 0 ) { ARLOGe("Camera parameter load error !! %s\n", cparaL); exit(0); } arParamChangeSize( &wparam, xsizeL, ysizeL, ¶mL ); ARLOG("*** Camera Parameter for the left camera ***\n"); arParamDisp( ¶mL ); if( arParamLoad(cparaR, 1, &wparam) < 0 ) { ARLOGe("Camera parameter load error !! %s\n", cparaR); exit(0); } arParamChangeSize( &wparam, xsizeR, ysizeR, ¶mR ); ARLOG("*** Camera Parameter for the right camera ***\n"); arParamDisp( ¶mR ); screenWidth = glutGet(GLUT_SCREEN_WIDTH); screenHeight = glutGet(GLUT_SCREEN_HEIGHT); if (screenWidth > 0 && screenHeight > 0) { screenMargin = (int)(MAX(screenWidth, screenHeight) * SCREEN_SIZE_MARGIN); if ((screenWidth - screenMargin) < (xsizeL + xsizeR) || (screenHeight - screenMargin) < MAX(ysizeL, ysizeR)) { wscalef = (double)(screenWidth - screenMargin) / (double)(xsizeL + xsizeR); hscalef = (double)(screenHeight - screenMargin) / (double)MAX(ysizeL, ysizeR); scalef = MIN(wscalef, hscalef); ARLOG("Scaling %dx%d window by %0.3f to fit onto %dx%d screen (with %2.0f%% margin).\n", xsizeL + xsizeR, MAX(ysizeL, ysizeR), scalef, screenWidth, screenHeight, SCREEN_SIZE_MARGIN*100.0); } else { scalef = 1.0; } } else { scalef = 1.0; } /* open the graphics window */ if( argCreateWindow((int)((xsizeL + xsizeR)*scalef), (int)(MAX(ysizeL, ysizeR)*scalef)) < 0 ) { ARLOGe("Error: argCreateWindow.\n"); exit(0); } viewport.sx = 0; viewport.sy = 0; viewport.xsize = (int)(xsizeL*scalef); viewport.ysize = (int)(ysizeL*scalef); if( (vpL=argCreateViewport(&viewport)) == NULL ) { ARLOGe("Error: argCreateViewport.\n"); exit(0); } viewport.sx = (int)(xsizeL*scalef); viewport.sy = 0; viewport.xsize = (int)(xsizeR*scalef); viewport.ysize = (int)(ysizeR*scalef); if( (vpR=argCreateViewport(&viewport)) == NULL ) { ARLOGe("Error: argCreateViewport.\n"); exit(0); } argViewportSetPixFormat( vpL, pixFormatL ); argViewportSetPixFormat( vpR, pixFormatR ); argViewportSetCparam( vpL, ¶mL ); argViewportSetCparam( vpR, ¶mR ); argViewportSetDispMethod( vpL, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); argViewportSetDispMethod( vpR, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); argViewportSetDispMode(vpL, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO); argViewportSetDispMode(vpR, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO); calibImageL = cvCreateImage( cvSize(xsizeL, ysizeL), IPL_DEPTH_8U, 1); calibImageR = cvCreateImage( cvSize(xsizeR, ysizeR), IPL_DEPTH_8U, 1); arMalloc(cornersL, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY); arMalloc(cornersR, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY); arMalloc(worldCoord, ICP3DCoordT, chessboardCornerNumX*chessboardCornerNumY); for( i = 0; i < chessboardCornerNumX; i++ ) { for( j = 0; j < chessboardCornerNumY; j++ ) { worldCoord[i*chessboardCornerNumY+j].x = patternWidth*i; worldCoord[i*chessboardCornerNumY+j].y = patternWidth*j; worldCoord[i*chessboardCornerNumY+j].z = 0.0; } } arMalloc(calibData, ICPCalibDataT, calibImageNum); for( i = 0; i < calibImageNum; i++ ) { arMalloc(calibData[i].screenCoordL, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY); arMalloc(calibData[i].screenCoordR, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY); calibData[i].worldCoordL = worldCoord; calibData[i].worldCoordR = worldCoord; calibData[i].numL = chessboardCornerNumX*chessboardCornerNumY; calibData[i].numR = chessboardCornerNumX*chessboardCornerNumY; } return; }
static void mainLoop(void) { static AR2VideoBufferT buff = {0}; static int oldImageMode = -1; static int oldDispMode = -1; static int oldDistMode = -1; ARdouble patt_trans[3][4]; int i, j; if (!buff.buff) { arMalloc(buff.buff, ARUint8, xsize*ysize*PIXEL_SIZE); } if( oldImageMode != 0 && imageMode == 0 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } i = 0; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } i = 639; for( j = 0; j < ysize; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 0; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } j = 479; for( i = 0; i < xsize; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } oldImageMode = 0; } if( oldImageMode != 1 && imageMode == 1 ) { for( j = 0; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( j = 1; j < 480; j += 2 ) { for( i = 0; i < 640; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 1; } if( oldImageMode != 2 && imageMode == 2 ) { for( i = 0; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 255; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 0; } } for( i = 1; i < 640; i += 2 ) { for( j = 0; j < 480; j++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 0; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 255; } } oldImageMode = 2; } if( oldImageMode != 3 && imageMode == 3 ) { for( i = 0; i < xsize*ysize; i++ ) { buff.buff[i*PIXEL_SIZE+0] = 200; buff.buff[i*PIXEL_SIZE+1] = 200; buff.buff[i*PIXEL_SIZE+2] = 200; } for( j = 190; j < 291; j++ ) { for( i = 280; i < 381; i++ ) { buff.buff[(j*xsize+i)*PIXEL_SIZE+0] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+1] = 20; buff.buff[(j*xsize+i)*PIXEL_SIZE+2] = 20; } } oldImageMode = 3; } /* detect the markers in the video frame */ if (arDetectMarker(arHandle, &buff) < 0) { cleanup(); exit(0); } glClearColor( 0.0f, 0.0f, 0.0f, 0.0f ); glClearDepth( 1.0f ); glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT); if( oldDispMode != 0 && dispMode == 0 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_GL_DRAW_PIXELS ); oldDispMode = 0; debugReportMode(vp); } else if( oldDispMode != 1 && dispMode == 1 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME ); oldDispMode = 1; debugReportMode(vp); } else if( oldDispMode != 2 && dispMode == 2 ) { argViewportSetDispMethod( vp, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FIELD ); oldDispMode = 2; debugReportMode(vp); } if( oldDistMode != 0 && distMode == 0 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_DISABLE ); oldDistMode = 0; } if( oldDistMode != 1 && distMode == 1 ) { argViewportSetDistortionMode( vp, AR_GL_DISTORTION_COMPENSATE_ENABLE ); oldDistMode = 1; } argDrawMode2D(vp); argDrawImage(buff.buff); if( imageMode == 3 ) { glLineWidth( 1.0f ); glColor3f( 0.0f, 1.0f, 0.0f ); argDrawSquareByIdealPos( arHandle->markerInfo[0].vertex ); glColor3f( 1.0f, 0.0f, 0.0f ); argDrawLineByIdealPos( 0.0, 0.0, 640.0, 0.0 ); argDrawLineByIdealPos( 0.0, 479.0, 640.0, 479.0 ); argDrawLineByIdealPos( 0.0, -1.0, 0.0, 479.0 ); argDrawLineByIdealPos( 639.0, -1.0, 639.0, 479.0 ); argDrawLineByIdealPos( 0.0, 188.0, 639.0, 188.0 ); argDrawLineByIdealPos( 0.0, 292.0, 639.0, 292.0 ); argDrawLineByIdealPos( 278.0, 0.0, 278.0, 479.0 ); argDrawLineByIdealPos( 382.0, 0.0, 382.0, 479.0 ); } if( arHandle->marker_num == 0 ) { argSwapBuffers(); return; } arGetTransMatSquare(ar3DHandle, &(arHandle->markerInfo[0]), SQUARE_WIDTH, patt_trans); draw(patt_trans); argSwapBuffers(); }