bool videoAndroidNativeCaptureClose(VIDEO_ANDROID_NATIVE_CAPTURE **nc_p) { ARLOGd("videoAndroidNativeCaptureClose().\n"); if (!nc_p || !*nc_p) return (false); // Sanity check. if ((*nc_p)->frameBuffers[0] || (*nc_p)->frameBuffers[1]) { ARLOGw("Warning: videoAndroidNativeCaptureClose called without call to videoAndroidNativeCaptureStop.\n"); videoAndroidNativeCaptureStop(*nc_p); } pthread_mutex_destroy(&((*nc_p)->frameLock)); pthread_cond_destroy(&((*nc_p)->frameReadyNotifierThreadCondGo)); if ((*nc_p)->ca) { // ca->disconnect() will be automatically called inside destructor; delete((*nc_p)->ca); (*nc_p)->ca = NULL; } free(*nc_p); *nc_p = NULL; ARLOGd("/videoAndroidNativeCaptureClose.\n"); return (true); }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } return (TRUE); }
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p, ARHandle **arhandle, AR3DHandle **ar3dhandle) { ARParam cparam; int xsize, ysize; AR_PIXEL_FORMAT pixFormat; // Open the video path. if (arVideoOpen(vconf) < 0) { ARLOGe("setupCamera(): Unable to open connection to camera.\n"); return (FALSE); } // Find the size of the window. if (arVideoGetSize(&xsize, &ysize) < 0) { ARLOGe("setupCamera(): Unable to determine camera frame size.\n"); arVideoClose(); return (FALSE); } ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize); // Get the format in which the camera is returning pixels. pixFormat = arVideoGetPixelFormat(); if (pixFormat == AR_PIXEL_FORMAT_INVALID) { ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n"); arVideoClose(); return (FALSE); } // Load the camera parameters, resize for the window and init. if (arParamLoad(cparam_name, 1, &cparam) < 0) { ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name); arVideoClose(); return (FALSE); } if (cparam.xsize != xsize || cparam.ysize != ysize) { ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); arParamChangeSize(&cparam, xsize, ysize, &cparam); } #ifdef DEBUG ARLOG("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { ARLOGe("setupCamera(): Error: arParamLTCreate.\n"); arVideoClose(); return (FALSE); } if ((*arhandle = arCreateHandle(*cparamLT_p)) == NULL) { ARLOGe("setupCamera(): Error: arCreateHandle.\n"); return (FALSE); } if (arSetPixelFormat(*arhandle, pixFormat) < 0) { ARLOGe("setupCamera(): Error: arSetPixelFormat.\n"); return (FALSE); } if (arSetDebugMode(*arhandle, AR_DEBUG_DISABLE) < 0) { ARLOGe("setupCamera(): Error: arSetDebugMode.\n"); return (FALSE); } if (arSetImageProcMode(*arhandle, AR_IMAGE_PROC_FRAME_IMAGE) < 0) { // Change to AR_IMAGE_PROC_FIELD_IMAGE if using a DVCam. ARLOGe("setupCamera(): Error: arSetImageProcMode.\n"); return (FALSE); } if ((*ar3dhandle = ar3DCreateHandle(&(*cparamLT_p)->param)) == NULL) { ARLOGe("setupCamera(): Error: ar3DCreateHandle.\n"); return (FALSE); } if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } return (TRUE); }
AR2ImageSetT *ar2ReadImageSet( char *filename ) { FILE *fp; AR2JpegImageT *jpgImage; AR2ImageSetT *imageSet; float dpi; int i, k1; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE int j, k2; ARUint *p1, *p2; #endif size_t len; const char ext[] = ".iset"; char *buf; len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator. arMalloc(buf, char, len); sprintf(buf, "%s%s", filename, ext); fp = fopen(buf, "rb"); free(buf); if (!fp) { ARLOGe("Error: unable to open file '%s%s' for reading.\n", filename, ext); return (NULL); } arMalloc( imageSet, AR2ImageSetT, 1 ); if( fread(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1 || imageSet->num <= 0) { ARLOGe("Error reading imageSet.\n"); goto bail; } ARLOGi("Imageset contains %d images.\n", imageSet->num); arMalloc( imageSet->scale, AR2ImageT*, imageSet->num ); arMalloc( imageSet->scale[0], AR2ImageT, 1 ); jpgImage = ar2ReadJpegImage2(fp); // Caller must free result. if( jpgImage == NULL || jpgImage->nc != 1 ) { ARLOGw("Falling back to reading '%s%s' in ARToolKit v4.x format.\n", filename, ext); free(imageSet->scale[0]); free(imageSet->scale); free(imageSet); if( jpgImage == NULL ) { rewind(fp); return ar2ReadImageSetOld(fp); } free(jpgImage); //COVHI10396 fclose(fp); return NULL; } imageSet->scale[0]->xsize = jpgImage->xsize; imageSet->scale[0]->ysize = jpgImage->ysize; imageSet->scale[0]->dpi = jpgImage->dpi; // The dpi value is not read correctly by jpeglib embedded in OpenCV 2.2.x. #if AR2_CAPABLE_ADAPTIVE_TEMPLATE imageSet->scale[0]->imgBWBlur[0] = jpgImage->image; // Create the blurred images. for( j = 1; j < AR2_BLUR_IMAGE_MAX; j++ ) { arMalloc( imageSet->scale[0]->imgBWBlur[j], ARUint8, imageSet->scale[0]->xsize * imageSet->scale[0]->ysize); p1 = dst->imgBWBlur[0]; p2 = dst->imgBWBlur[i]; for( k1 = 0; k1 < imageSet->scale[0]->xsize * imageSet->scale[0]->ysize; k1++ ) *(p2++) = *(p1++); defocus_image( imageSet->scale[0]->imgBWBlur[j], imageSet->scale[0]->xsize, imageSet->scale[0]->ysize, 3 ); } #else imageSet->scale[0]->imgBW = jpgImage->image; #endif free(jpgImage); // Minify for the other scales. // First, find the list of scales we wrote into the file. fseek(fp, (long)(-(int)sizeof(dpi)*(imageSet->num - 1)), SEEK_END); for( i = 1; i < imageSet->num; i++ ) { if( fread(&dpi, sizeof(dpi), 1, fp) != 1 ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } imageSet->scale[i] = ar2GenImageLayer2( imageSet->scale[0], dpi ); if( imageSet->scale[i] == NULL ) { for( k1 = 0; k1 < i; k1++ ) { #if AR2_CAPABLE_ADAPTIVE_TEMPLATE for( k2 = 0; k2 < AR2_BLUR_IMAGE_MAX; k2++ ) free(imageSet->scale[k1]->imgBWBlur[k2]); #else free(imageSet->scale[k1]->imgBW); #endif free(imageSet->scale[k1]); } goto bail1; } } fclose(fp); return imageSet; bail1: free(imageSet->scale); bail: free(imageSet); fclose(fp); return NULL; }