// FIXME: error handling static void opencv_exec(unsigned char* src, int src_size, unsigned char* path, int path_size, int w, int h, /*out*/ void** descriptor) { char ext[256]; // NULL terminated cstr int ext_pos = (sizeof(ext) < (path_size+1)) ? path_size+1 - sizeof(ext) : 0; memcpy(ext, path, path_size - ext_pos); ext[path_size - ext_pos] = 0; CvMat tmp = cvMat(16384, 16384, CV_8UC4, (void*)src); CvMat* src_img = cvDecodeImageM(&tmp, CV_LOAD_IMAGE_COLOR); CvMat dst_img = cvMat(h, w, src_img->type, NULL); cvCreateData(&dst_img); cvResize(src_img, &dst_img, CV_INTER_AREA); // FIXME: using CV_INTER_AREA only if got scaled down CvMat* enc_img = cvEncodeImage(ext, &dst_img, 0); //out: cvReleaseMat(&src_img); cvReleaseData(&dst_img); *descriptor = (void*)enc_img; }
CvImage& copy( const CvImage& another ) { if( !imageData || width != another.width || height != another.height ) { cvReleaseData( this ); cvResetImageROI( this ); cvInitImageHeader( this, cvSize( another.width, another.height), another.depth, another.nChannels, another.origin, another.align ); cvCreateImageData( this ); if( another.roi ) cvSetImageROI( this, cvGetImageROI( &another )); } cvCopy( (IplImage *)&another, this ); return *this; }
int main(int argc, char *argv[], char *env[]) { IplImage *src; struct intern_bitmap *bm; int i; /* Argumente testen */ if (argc == 1) { printf("usage: \n\t%s <ein Bild>\n",argv[0]); return 1; } /* init OpenCV */ #ifdef DEBUG cvInitSystem(argc, argv); #endif for (i = 1; i < argc; i++ ) { /* Bild Lesen */ src = cvLoadImage(argv[i], CV_LOAD_IMAGE_GRAYSCALE); if (!src) { printf("Error: cvLoadImage()\n"); return 1; } /* das original Bild anzeigen */ #ifdef DEBUG cvNamedWindow("Demo Window", CV_WINDOW_AUTOSIZE); cvShowImage("Demo Window", src); cvWaitKey(-1); cvDestroyWindow("Demo Window"); #endif bm = preprocess(src); ocr_bestpassend(bm, ergebnis, ERGEBNIS_LAENGE); bm_release(bm); cvReleaseData(src); printf("Ergebnis: %s\n", ergebnis); } return 0; }
CV_IMPL void cvReleaseImage( IplImage ** image ) { CV_FUNCNAME( "cvReleaseImage" ); __BEGIN__ if( !image ) CV_ERROR( CV_StsNullPtr, "" ); if( *image ) { IplImage* img = *image; *image = 0; cvReleaseData( img ); cvReleaseImageHeader( &img ); } __END__; }
// deallocate the CvMat structure and underlying date CV_IMPL void cvReleaseMat( CvMat** array ) { CV_FUNCNAME( "cvReleaseMat" ); __BEGIN__; if( !array ) CV_ERROR_FROM_CODE( CV_HeaderIsNull ); if( *array ) { CvMat* arr = *array; if( !_CV_IS_ARR( arr )) CV_ERROR_FROM_CODE( !arr ? CV_StsNullPtr : CV_StsBadFlag ); *array = 0; CV_CALL( cvReleaseData( arr )); CV_CALL( cvReleaseMatHeader( &arr )); } __END__; }
// resize capture->frame appropriately depending on camera and capture settings static int icvResizeFrame(CvCaptureCAM_DC1394 * capture){ if(capture->convert){ // resize if sizes are different, formats are different // or conversion option has changed if(capture->camera->frame_width != capture->frame.width || capture->camera->frame_height != capture->frame.height || capture->frame.depth != 8 || capture->frame.nChannels != 3 || capture->frame.imageData == NULL || capture->buffer_is_writeable == 0) { if(capture->frame.imageData && capture->buffer_is_writeable){ cvReleaseData( &(capture->frame)); } cvInitImageHeader( &capture->frame, cvSize( capture->camera->frame_width, capture->camera->frame_height ), IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 ); cvCreateData( &(capture->frame) ); capture->buffer_is_writeable = 1; } } else { // free image data if allocated by opencv if(capture->buffer_is_writeable){ cvReleaseData(&(capture->frame)); } // figure out number of channels and bpp int bpp = 8; int nch = 3; int width = capture->camera->frame_width; int height = capture->camera->frame_height; double code = CV_FOURCC('B','G','R',0); switch(capture->color_mode){ case COLOR_FORMAT7_YUV422: nch = 2; code = CV_FOURCC('Y','4','2','2'); break; case COLOR_FORMAT7_MONO8: code = CV_FOURCC('Y',0,0,0); nch = 1; break; case COLOR_FORMAT7_YUV411: code = CV_FOURCC('Y','4','1','1'); width *= 2; nch = 3; //yy[u/v] break; case COLOR_FORMAT7_YUV444: code = CV_FOURCC('Y','U','V',0); nch = 3; break; case COLOR_FORMAT7_MONO16: code = CV_FOURCC('Y',0,0,0); bpp = IPL_DEPTH_16S; nch = 1; break; case COLOR_FORMAT7_RGB16: bpp = IPL_DEPTH_16S; nch = 3; break; default: break; } // reset image header cvInitImageHeader( &capture->frame,cvSize( width, height ), bpp, nch, IPL_ORIGIN_TL, 4 ); //assert(capture->frame.imageSize == capture->camera->quadlets_per_frame*4); capture->buffer_is_writeable = 0; } return 1; }
void processImagePair(const char *file1, const char *file2, CvVideoWriter *out, struct CvMat *currentOrientation) { // Load two images and allocate other structures IplImage* imgA = cvLoadImage(file1, CV_LOAD_IMAGE_GRAYSCALE); IplImage* imgB = cvLoadImage(file2, CV_LOAD_IMAGE_GRAYSCALE); IplImage* imgBcolor = cvLoadImage(file2); CvSize img_sz = cvGetSize( imgA ); int win_size = 15; // Get the features for tracking IplImage* eig_image = cvCreateImage( img_sz, IPL_DEPTH_32F, 1 ); IplImage* tmp_image = cvCreateImage( img_sz, IPL_DEPTH_32F, 1 ); int corner_count = MAX_CORNERS; CvPoint2D32f* cornersA = new CvPoint2D32f[ MAX_CORNERS ]; cvGoodFeaturesToTrack( imgA, eig_image, tmp_image, cornersA, &corner_count, 0.05, 3.0, 0, 3, 0, 0.04 ); fprintf(stderr, "%s: Corner count = %d\n", file1, corner_count); cvFindCornerSubPix( imgA, cornersA, corner_count, cvSize( win_size, win_size ), cvSize( -1, -1 ), cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 50, 0.03 ) ); // Call Lucas Kanade algorithm char features_found[ MAX_CORNERS ]; float feature_errors[ MAX_CORNERS ]; CvSize pyr_sz = cvSize( imgA->width+8, imgB->height/3 ); IplImage* pyrA = cvCreateImage( pyr_sz, IPL_DEPTH_32F, 1 ); IplImage* pyrB = cvCreateImage( pyr_sz, IPL_DEPTH_32F, 1 ); CvPoint2D32f* cornersB = new CvPoint2D32f[ MAX_CORNERS ]; calcNecessaryImageRotation(imgA); cvCalcOpticalFlowPyrLK( imgA, imgB, pyrA, pyrB, cornersA, cornersB, corner_count, cvSize( win_size, win_size ), 5, features_found, feature_errors, cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.3 ), 0 ); CvMat *transform = cvCreateMat(3,3, CV_32FC1); CvMat *invTransform = cvCreateMat(3,3, CV_32FC1); // Find a homography based on the gradient CvMat cornersAMat = cvMat(1, corner_count, CV_32FC2, cornersA); CvMat cornersBMat = cvMat(1, corner_count, CV_32FC2, cornersB); cvFindHomography(&cornersAMat, &cornersBMat, transform, CV_RANSAC, 15, NULL); cvInvert(transform, invTransform); cvMatMul(currentOrientation, invTransform, currentOrientation); // save the translated image IplImage* trans_image = cvCloneImage(imgBcolor); cvWarpPerspective(imgBcolor, trans_image, currentOrientation, CV_INTER_CUBIC+CV_WARP_FILL_OUTLIERS); printf("%s:\n", file1); PrintMat(currentOrientation); // cvSaveImage(out, trans_image); cvWriteFrame(out, trans_image); cvReleaseImage(&eig_image); cvReleaseImage(&tmp_image); cvReleaseImage(&trans_image); cvReleaseImage(&imgA); cvReleaseImage(&imgB); cvReleaseImage(&imgBcolor); cvReleaseImage(&pyrA); cvReleaseImage(&pyrB); cvReleaseData(transform); delete [] cornersA; delete [] cornersB; }
int main(void){ IplImage *theFrame, *theFrame2, *theFrame3; CvCapture *theCam=cvCreateCameraCapture(-1); char theChar; CvSize size; CvPoint theCentroid; CvSeq* theContour; ttModels theModels; ttInit(&theModels); if(!theCam) { printf("\nCamera not found\n"); return(0); } /*theFrame=cvLoadImage("colormap.png",1); theFrame2=cvLoadImage("colormap.png",1);//*/ size=cvSize(cvGetCaptureProperty(theCam,CV_CAP_PROP_FRAME_WIDTH),cvGetCaptureProperty(theCam,CV_CAP_PROP_FRAME_HEIGHT)); theFrame2=cvCreateImage(size,IPL_DEPTH_8U,1); theFrame3=cvCreateImage(size,IPL_DEPTH_8U,3); cvNamedWindow("win1",1); cvNamedWindow("win2",1); cvMoveWindow("win1",0,0); cvMoveWindow("win2",700,0); /*cvNamedWindow("H",0); cvNamedWindow("S",0); cvNamedWindow("V",0); cvMoveWindow("H",0,500); cvMoveWindow("S",350,500); cvMoveWindow("V",700,500);//*/ while ((theChar=cvWaitKey(20))!=0x1B){ theFrame=cvQueryFrame(theCam); cvCopy(theFrame,theFrame3,NULL); cvZero(theFrame2); ttSegmenter(theFrame,theFrame2,1); ttImprover(theFrame2,theFrame2); //printf("nchannels %d\n",theFrame2->nChannels); //printf("chingao!\n"); cvShowImage("win2",theFrame2); theContour=ttContours(theFrame2,2,&theModels,NULL); //cvZero(out); if (theContour==NULL) continue; theCentroid=ttFindCentroid(theContour); cvCircle(theFrame3,theCentroid,1,CV_RGB(255,255,255),1,8,0); cvCircle(theFrame3,theCentroid,6,CV_RGB(255,0,0),1,8,0); //cvCircle(theFrame3,theCentroid,11,CV_RGB(255,255,255),1,8,0); //cvCircle(theFrame3,theCentroid,16,CV_RGB(255,0,0),1,8,0); //cvCircle(theFrame3,ttFindCentroid(theContour),15,CV_RGB(255,255,255),1,8,0); cvDrawContours(theFrame3,theContour,CV_RGB(255,255,255),CV_RGB(255,255,255),1,5,8,cvPoint(0,0)); cvShowImage("win1",theFrame3);//*/ } cvDestroyAllWindows(); cvReleaseData(theFrame); cvReleaseData(theFrame2); cvReleaseData(theFrame3); cvReleaseCapture(&theCam); return(0); }