Exemple #1
0
// --------------------------------------------------------------------------
void BlobModel::UpdateHeatMap(IplImage* motionmap) {
	// for each head candidate draw a circle on the floor; add it to the heatmap image
	// NOTE: due to scaling, circle becomes an ellipse
	double BLOB_RADIUS = 50; // blob projection on the floor average radius in centimeters 
	bool oneclose = false;
	for (int i=0;i<blob.GetCount();i++) {
		CvSeq* heads = doc->blobmodel.blob[i]->heads;
		for (int j=0;j<heads->total;j++) {
			BlobRay* br = (BlobRay*)cvGetSeqElem(heads, j);
			CvPoint3D32f head, foot;
			doc->cameramodel.coordsImage2RealSameXY_Feet2Floor(cvPointTo32f(br->p1), cvPointTo32f(br->p2), &head, &foot);
			// ignore short candidates
			if (head.z < doc->bodymodel.m_minHeight)
				continue;
			// ignore repeating artifact closeby candidates
			if (oneclose)
				continue;
			if (d(foot) < BLOB_RADIUS*2)
				oneclose = true;

			CvPoint c = doc->floormodel.coordsReal2Floor(foot);
			CvSize axes = doc->floormodel.sizeReal2Floor(cvSize2D32f(BLOB_RADIUS, BLOB_RADIUS)); 
			cvZero(motionmaptemp);
			cvEllipse(motionmaptemp, c, axes, 0, 0, 360, cvScalar(1), CV_FILLED);
			cvAcc(motionmaptemp, motionmap);
		}
	}
}
static char triangleInRectangleTest(CvSeq* c, struct Triangle* t) {
    if( cvPointPolygonTest( c, cvPointTo32f( t->pt[0]), 0) > 0 ) {
        if ( cvPointPolygonTest( c, cvPointTo32f( t->pt[1]), 0 ) > 0 ){
            if ( cvPointPolygonTest( c, cvPointTo32f( t->pt[2] ), 0 ) > 0 ){
                return 1;
            }
        }
    } else
        return 0;
}
Exemple #3
0
// ------------------------------------------------------------------------
void FloorModel::GetMinMaxXY(CvPoint2D32f& cammin, CvPoint2D32f& cammax) {
	if (m_extreme.GetCount() == 0)
		return;
	// find 3D coordinates of 4 image corners on the floor
	CvPoint3D32f p = doc->cameramodel.coordsImage2Real(cvPointTo32f(m_extreme[0]), 0);
	cammin.x = p.x; cammin.y = p.y;	cammax.x = p.x; cammax.y = p.y;
	for (int i=1;i<m_extreme.GetCount();i++) {
		p = doc->cameramodel.coordsImage2Real(cvPointTo32f(m_extreme[i]), 0);
		cammin.x = min(p.x, cammin.x); cammin.y = min(p.y, cammin.y);
		cammax.x = max(p.x, cammax.x); cammax.y = max(p.y, cammax.y);
	}
}
void CornerPointMatchTwoViewPointTracker::compute(const IplImage* im1, 
												  const IplImage* im2, 
												  std::vector<CvPoint2D32f>& x1s, 
												  std::vector<CvPoint2D32f>& x2s) 
{
	vector<CvPoint> tx1s, tx2s;
	compute(im1, im2, tx1s, tx2s);
	size_t size = corners1.size();
	x1s.resize(size);
	x2s.resize(size);
	for (size_t i = 0; i < size; i++) {
		x1s[i] = cvPointTo32f(corners1[i]);
		x2s[i] = cvPointTo32f(corners2[i]);
	}
}
main( int argc, char* argv[] ) {

    // Choose a negative floating point number.  Take its absolute value,
    // round it, and then take its ceiling and floor.
    double a = -1.23;
    printf( "CV_IABS(a) = %d\n", CV_IABS(a) );
    printf( "cvRound(a) = %d\n", cvRound(a) );
    printf( "cvCeil(a) = %d\n", cvCeil(a) );
    printf( "cvFloor(a) = %d\n", cvFloor(a) );


    // Generate some random numbers.
    CvRNG rngState = cvRNG(-1);
    for (int i = 0; i < 10; i++) {
        printf( "%u %f\n", cvRandInt( &rngState ),
                           cvRandReal( &rngState ) );
    }

    // Create a floating point CvPoint2D32f and convert it to an integer
    // CvPoint.
    CvPoint2D32f point_float1 = cvPoint2D32f(1.0, 2.0);
    CvPoint point_int1 = cvPointFrom32f( point_float1 );

    // Convert a CvPoint to a CvPoint2D32f.
    CvPoint point_int2 = cvPoint(3, 4);
    CvPoint2D32f point_float2 = cvPointTo32f( point_int2 );

}
Exemple #6
0
CvScalar contour_average(CvContour* contour, IplImage* image)
{
    CvRect rect = ((CvContour*)contour)->rect;
    
    CvScalar average = cvScalarAll(0);
    int count = 0;
    for(int x = rect.x; x < (rect.x+rect.width); x++) {
        for(int y = rect.y; y < (rect.y+rect.height); y++) {
            if(cvPointPolygonTest(contour, cvPointTo32f(cvPoint(x,y)),0) == 100) {
                CvScalar s = cvGet2D(image,y,x);
                average.val[0] += s.val[0];
                average.val[1] += s.val[1];
                average.val[2] += s.val[2];
                
                count++;
            }
        }
    }
    
    for(int i = 0; i < 3; i++){
        average.val[i] /= count;
    }
    
    return average;
}
void MatchTemplatePlugin::ProcessStatic
( int i, ImagePlus *img, ImagePlus *oimg,
 int method, CvSize winsize, IplImage* &map){
	CvRect orect = cvBoundingRect(oimg->contourArray[i],1);
	RestrictRectLoc(orect, cvRect(0,0,img->orig->width,img->orig->height));
	cvSetImageROI(oimg->orig, orect);
	CvRect rect = cvRect(MAX(0,orect.x-winsize.width), MAX(0,orect.y-winsize.height),orect.width+2*winsize.width, orect.height+2*winsize.height);
	rect.width = MIN(rect.width,oimg->orig->width-rect.x);
	rect.height = MIN(rect.height,oimg->orig->height-rect.y);
	cvSetImageROI(img->orig, rect);

	CvSize mapsize = MyPoint(MyPoint(rect)-MyPoint(orect)+wxPoint(1,1)).ToCvSize();
	if (map && MyPoint(cvGetSize(map))!=MyPoint(mapsize))
		cvReleaseImage(&map);
	if( !map )
        map = cvCreateImage(mapsize, IPL_DEPTH_32F, 1);

	cvMatchTemplate( img->orig, oimg->orig, map, method );
	cvResetImageROI(img->orig);
	cvResetImageROI(oimg->orig);
	CvPoint minloc;
	CvPoint maxloc;
	double minval, maxval;
	cvMinMaxLoc( map, &minval, &maxval, &minloc, &maxloc);
	bool minisbest = (method == CV_TM_SQDIFF || method==CV_TM_SQDIFF_NORMED);
	rect.x = rect.x + (minisbest ? minloc.x : maxloc.x);
	rect.y = rect.y + (minisbest ? minloc.y : maxloc.y);

	CvPoint shift = cvPoint(rect.x - orect.x, rect.y - orect.y);
	ShiftContour(oimg->contourArray[i],img->contourArray[i],shift);
	ShiftFeatPoints(oimg->feats[i], img->feats[i], cvPointTo32f(shift));
}
Exemple #8
0
    int InitNextImage(IplImage* img)
    {
        CvSize sz(img->width, img->height);
        ReallocImage(&imgGray, sz, 1);
        ReallocImage(&imgThresh, sz, 1);
        ptRotate = face[MOUTH].ptCenter;
        float m[6];
        CvMat mat = cvMat( 2, 3, CV_32FC1, m );

        if (NULL == imgGray || NULL == imgThresh)
            return 0;

        /*m[0] = (float)cos(-dbRotateAngle*CV_PI/180.);
        m[1] = (float)sin(-dbRotateAngle*CV_PI/180.);
        m[2] = (float)ptRotate.x;
        m[3] = -m[1];
        m[4] = m[0];
        m[5] = (float)ptRotate.y;*/
        cv2DRotationMatrix( cvPointTo32f(ptRotate), -dbRotateAngle, 1., &mat );
        cvWarpAffine( img, imgGray, &mat );

        if (NULL == mstgContours)
            mstgContours = cvCreateMemStorage();
        else
            cvClearMemStorage(mstgContours);
        if (NULL == mstgContours)
            return 0;
        return 1;
    }
/*!
    \fn CvFaceSegment::rotate(IplImage *img)
 */
IplImage* CvFaceSegment::rotate(IplImage *img)
{
  int xl = lefteye.x;
  int yl = lefteye.y;
  int xr = righteye.x;
  int yr = righteye.y;
  
  double angle = atan((double)(yr-yl)/(double)(xr-xl));
  angle = 180*angle/CV_PI;
  
  double distance = sqrt((double)(pow((xl-xr),2)+pow((yl-yr),2)));
  int dis = (int)round(distance);
  
  CvMat* map_matrix = cvCreateMat(2,3,CV_32FC1);
  cv2DRotationMatrix( cvPointTo32f( righteye ), angle, 1.0, map_matrix);
  
  IplImage* newimg = cvCreateImage( cvGetSize(img), IPL_DEPTH_8U, 3 );
  cvWarpAffine( img, newimg, map_matrix, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0) );
  
  lefteye.y = righteye.y;
  lefteye.x = righteye.x+dis;
  cvReleaseMat( &map_matrix );
  
  /*
  cvCircle( newimg, righteye, 4, cvScalar(0, 255, 0, 0), 1, 8, 0 );
  cvCircle( newimg, lefteye, 4, cvScalar(0, 255, 0, 0), 1, 8, 0 );
  cvNamedWindow("Scale", CV_WINDOW_AUTOSIZE);
  cvShowImage("Scale", newimg);
  cvWaitKey(0);
  */
  return newimg;
}
Exemple #10
0
IplImage *square_puzzle(IplImage *in, const CvPoint2D32f *location) {
    int xsize = location[1].x - location[0].x;
    int ysize = xsize;

    CvPoint2D32f warped_coordinates[4];
    warped_coordinates[0] = cvPointTo32f(cvPoint(0,       0));
    warped_coordinates[1] = cvPointTo32f(cvPoint(xsize-1, 0));
    warped_coordinates[2] = cvPointTo32f(cvPoint(xsize-1, ysize-1));
    warped_coordinates[3] = cvPointTo32f(cvPoint(0,       ysize-1));

    CvMat *map_matrix = cvCreateMat(3, 3, CV_64FC1);
    cvGetPerspectiveTransform(location, warped_coordinates, map_matrix);

    IplImage *warped_image = cvCreateImage(cvSize(xsize, ysize), 8, in->nChannels);
    CvScalar fillval=cvScalarAll(0);
    cvWarpPerspective(in, warped_image, map_matrix, CV_WARP_FILL_OUTLIERS, fillval);

    return warped_image;
}
asm_shape CWrapper::main_fit(IplImage * image) {
	asm_shape retval, detshape;

	double t = (double) cvGetTickCount();

	detshape.Resize(2);

	detshape[0] = cvPointTo32f(cvPoint(0, 0));
	detshape[1] = cvPointTo32f(cvPoint(image->width, image->height));

	InitShapeFromDetBox(retval, detshape, fit_asm.GetMappingDetShape(),
		fit_asm.GetMeanFaceWidth());

	fit_asm.Fitting(retval, image);

	t = ((double) cvGetTickCount() - t) / (cvGetTickFrequency()*1000.);
	printf("ASM fitting time cost: %.2f millisec\n", t);

	return retval;
}
Exemple #12
0
// A function to calculate the transformation matrix used for perspective transformation
void calculateTransformationMatrix( BoundingBox* from, BoundingBox* to, CvMat* transMat ) {
    CvPoint2D32f from_arr[] = { 
        cvPointTo32f( from->topLeft ),
        cvPointTo32f( from->topRight ),
        cvPointTo32f( from->bottomRight ),
        cvPointTo32f( from->bottomLeft )
    };
    CvPoint2D32f to_arr[] = {
        cvPointTo32f( to->topLeft ),
        cvPointTo32f( to->topRight ),
        cvPointTo32f( to->bottomRight ),
        cvPointTo32f( to->bottomLeft )
    };
    cvGetPerspectiveTransform( from_arr, to_arr, transMat );
}
Exemple #13
0
// 传进来一个contour,然后计算它的最小包围矩形minRect,再把原图以包围矩形中心为旋转中心旋转minRect.angle°,得到调正的图像。
// http://blog.csdn.net/include1224/article/details/4384855
CvBox2D RegionRotate(IplImage *src, IplImage *dst, CvSeq *contour) {
	//dst 是通过cvClone()src得到的
	CvMat *mat_contour = cvCreateMat(1, contour->total, CV_32FC2); 	//双通道
	CvPoint2D32f *ptr_mat = (CvPoint2D32f*) (mat_contour->data.ptr);
	for (int i = 0; i != contour->total; ++i) {
		CvPoint *ptr_seq = (CvPoint*) (cvGetSeqElem(contour, i));
		*ptr_mat = cvPointTo32f(*ptr_seq); 			//显示把CvPoint转换成CvPoint2D32F
		ptr_mat++;
	} //把轮廓变成矩阵
	CvBox2D minRect = cvMinAreaRect2(mat_contour); 			//得到最小包围矩形
	//CvMat *rot = cvCreateMat(2,3,CV_32FC1);
	//cv2DRotationMatrix(cvPoint2D32f(src->width*0.5f,src->height*0.5f),minRect.angle,0.6,rot);//计算得到旋转矩阵----这里计算得到的矩阵不能使图像变换到想要的旋转结果
	float factor = 1.0; //缩放
	float angle = -minRect.angle;
	float w = 0, h = 0;
	w = minRect.center.x;
	h = minRect.center.y;
	RotateImage(src, dst, cvPoint(w, h), angle, factor);
	//cvEllipseBox(dst,minRect,cvScalar(0,0,255));
	cvReleaseMat(&mat_contour);
	return minRect; //返回最佳包围盒
}
IplImage* cutSign(IplImage* origImg, CvPoint* corners, int numcorners, bool drawcircles)
{

	// convert corners to CvPoint2D32f.
        CvPoint2D32f cornersf[numcorners];
        for (int i=0; i<numcorners; ++i)
                cornersf[i] = cvPointTo32f(corners[i]);

	if (_debug) printf("Corners: %d,%d %d,%d %d,%d %d,%d\n",corners[0].x,corners[0].y,corners[1].x,corners[1].y,corners[2].x,corners[2].y,corners[3].x,corners[3].y);

	// Create target-image with right size.
        double xDiffBottom = pointDist(corners[0], corners[1]);
        double yDiffLeft = pointDist(corners[0], corners[3]);
        IplImage* cut = cvCreateImage(cvSize(xDiffBottom,yDiffLeft), IPL_DEPTH_8U, 3);

	// target points for perspective correction.
        CvPoint2D32f cornerstarget[numcorners];
        cornerstarget[0] = cvPoint2D32f(0,0);
        cornerstarget[1] = cvPoint2D32f(cut->width-1,0);
        cornerstarget[2]= cvPoint2D32f(cut->width-1,cut->height-1);
        cornerstarget[3] = cvPoint2D32f(0,cut->height-1);
	if (_debug) printf("Corners: %f,%f %f,%f %f,%f %f,%f\n",cornerstarget[0].x,cornerstarget[0].y,cornerstarget[1].x,cornerstarget[1].y,cornerstarget[2].x,cornerstarget[2].y,cornerstarget[3].x,cornerstarget[3].y);
        
	// Apply perspective correction to the image.
        CvMat* transmat = cvCreateMat(3, 3, CV_32FC1); // Colums, rows ?
        transmat = cvGetPerspectiveTransform(cornersf,cornerstarget,transmat);
        cvWarpPerspective(origImg,cut,transmat);
        cvReleaseMat(&transmat);

	// Draw yellow circles around the corners.
	if (drawcircles)
		for (int i=0; i<numcorners; ++i)
			cvCircle(origImg, corners[i],5,CV_RGB(255,255,0),2);

        return cut;
}
Exemple #15
0
int main(int argc, char * argv[])
{
	if(argc < 2) {
		fprintf(stderr, "%s image1 image2\n", argv[0]);
		return 1;
	}

	char * im1fname = argv[1];
	char * im2fname = argv[2];

	IplImage * image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_GRAYSCALE);

	IplImage * eigenvalues = cvCreateImage(cvGetSize(image1), 32, 1);
	IplImage * temp = cvCreateImage(cvGetSize(image1), 32, 1);

	int count = MAX_COUNT;
	double quality = 0.5;
	// double min_distance = 2;
	double min_distance = 50;
	int block_size = 7;
	int use_harris = 0;
	int win_size = 10;
	int flags = 0;

	CvPoint2D32f * source_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f));
	CvPoint2D32f * dest_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f));
	CvPoint2D32f * delaunay_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f));

	cvGoodFeaturesToTrack( image1, eigenvalues, temp, source_points, &count,
			quality, min_distance, 0, block_size, use_harris, 0.04 );

	printf("%d features\n",count);

	setbuf(stdout, NULL);

	printf("Finding corner subpix...");
	cvFindCornerSubPix( image1, source_points, count,
			cvSize(win_size,win_size), cvSize(-1,-1),
			cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03));
	printf("done.\n");

	cvReleaseImage(&eigenvalues);
	cvReleaseImage(&temp);

	IplImage * image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_GRAYSCALE);

	char * status = (char*)cvAlloc(sizeof(char)*MAX_COUNT);

	IplImage * pyramid = cvCreateImage( cvGetSize(image1), IPL_DEPTH_8U, 1 );
	IplImage * second_pyramid = cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 1 );

	printf("Computing optical flow...");	
	cvCalcOpticalFlowPyrLK(image1, image2, pyramid, second_pyramid, source_points,
		dest_points, count, cvSize(win_size,win_size), 4, status, 0,
		cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03),
		flags);
	printf("done.\n");

	int num_matches = 0;
	int num_out_matches = 0;
	int max_dist = 30;
	int offset = 200;
	
	CvMemStorage * storage = cvCreateMemStorage(0);
	CvSubdiv2D * delaunay = cvCreateSubdivDelaunay2D( cvRect(0,0,image1->width,image1->height), storage);

	cvReleaseImage(&image1);
	cvReleaseImage(&image2);
	
	image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);
	image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);

	cvSet( image1, cvScalarAll(255) );

	std::map<CvPoint, CvPoint> point_lookup_map;
	std::vector<std::pair<CvPoint, CvPoint> > point_lookup;

	// put corners in the point lookup as going to themselves
	point_lookup_map[cvPoint(0,0)] = cvPoint(0,0);
	point_lookup_map[cvPoint(0,image1->height-1)] = cvPoint(0,image1->height-1);
	point_lookup_map[cvPoint(image1->width-1,0)] = cvPoint(image1->width-1,0);
	point_lookup_map[cvPoint(image1->width-1,image1->height-1)] = cvPoint(image1->width-1,image1->height-1);

	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,0), cvPoint(0,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,image1->height-1), cvPoint(0,image1->height-1)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,0), cvPoint(image1->width-1,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,image1->height-1), cvPoint(image1->width-1,image1->height-1)));

	printf("Inserting corners...");
	// put corners in the Delaunay subdivision
	for(unsigned int i = 0; i < point_lookup.size(); i++) {
		cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(point_lookup[i].first) );
	}
	printf("done.\n");

	CvSubdiv2DEdge proxy_edge;
	for(int i = 0; i < count; i++) {
		if(status[i]) {
			CvPoint source = cvPointFrom32f(source_points[i]);
			CvPoint dest = cvPointFrom32f(dest_points[i]);
	
			if((((int)fabs((double)(source.x - dest.x))) > max_dist) ||
				 (((int)fabs((double)(source.y - dest.y))) > max_dist)) {	
				num_out_matches++;
			}
			else if((dest.x >= 0) && (dest.y >= 0) && (dest.x < (image1->width)) && (dest.y < (image1->height))) {
				if(point_lookup_map.find(source) == point_lookup_map.end()) {
					num_matches++;
				
					point_lookup_map[source] = dest;
					point_lookup.push_back(std::pair<CvPoint,CvPoint>(source,dest));
					delaunay_points[i] = (cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(source) ))->pt;
					cvSetImageROI( image1, cvRect(source.x-8,source.y-8,8*2,8*2) );
					cvResetImageROI( image2 );
					cvGetRectSubPix( image2, image1, dest_points[i] );
				}
				/*
				cvSet2D( image1, source.y, source.x, cvGet2D( image2, dest.y, dest.x ) );
				cvSet2D( image1, source.y, source.x+1, cvGet2D( image2, dest.y, dest.x+1 ) );
				cvSet2D( image1, source.y, source.x-1, cvGet2D( image2, dest.y, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x, cvGet2D( image2, dest.y+1, dest.x ) );
				cvSet2D( image1, source.y-1, source.x, cvGet2D( image2, dest.y-1, dest.x ) );
				cvSet2D( image1, source.y+1, source.x+1, cvGet2D( image2, dest.y+1, dest.x+1 ) );
				cvSet2D( image1, source.y-1, source.x-1, cvGet2D( image2, dest.y-1, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x-1, cvGet2D( image2, dest.y+1, dest.x-1 ) );
				cvSet2D( image1, source.y-1, source.x+1, cvGet2D( image2, dest.y-1, dest.x+1 ) );
				*/

				// cvCircle( image1, source, 4, CV_RGB(255,0,0), 2, CV_AA );
				// cvCircle( image2, dest, 4, CV_RGB(255,0,0), 2, CV_AA );
			}

			/*
			cvSetImageROI( image1, cvRect(source.x-offset,source.y-offset,offset*2,offset*2) );
			cvSetImageROI( image2, cvRect(dest.x-offset,dest.y-offset,offset*2,offset*2) );
			cvNamedWindow("image1",0);
			cvNamedWindow("image2",0);
			cvShowImage("image1",image1);
			cvShowImage("image2",image2);
			printf("%d,%d -> %d,%d\n",source.x,source.y,dest.x,dest.y);
			cvWaitKey(0);
			cvDestroyAllWindows();
			*/
		}
	}
	printf("%d %d\n",num_matches,num_out_matches);
	printf("%d lookups\n",point_lookup_map.size());

	cvResetImageROI( image1 );

	cvSaveImage("sparse.jpg", image1);

	cvReleaseImage(&image1);
	image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);
	cvSet( image1, cvScalarAll(255) );
	printf("Warping image...");

	CvSeqReader  reader;
	int total = delaunay->edges->total;
	int elem_size = delaunay->edges->elem_size;

	cvStartReadSeq( (CvSeq*)(delaunay->edges), &reader, 0 );

	std::vector<Triangle> trivec;
	std::vector<CvMat *> baryinvvec;

	for( int i = 0; i < total; i++ ) {
		CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr);

		if( CV_IS_SET_ELEM( edge ))	{
			CvSubdiv2DEdge curedge = (CvSubdiv2DEdge)edge;
			CvSubdiv2DEdge t = curedge;
			Triangle temptri;
			int count = 0;
			
			// construct a triangle from this edge
			do {
				CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t );
				if(count < 3) {
					pt->pt.x = pt->pt.x >= image1->width ? image1->width-1 : pt->pt.x;
					pt->pt.y = pt->pt.y >= image1->height ? image1->height-1 : pt->pt.y;
					pt->pt.x = pt->pt.x < 0 ? 0 : pt->pt.x;
					pt->pt.y = pt->pt.y < 0 ? 0 : pt->pt.y;

					temptri.points[count] = cvPointFrom32f( pt->pt );
				}
				else {
					printf("More than 3 edges\n");
				}
				count++;
				t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT );
			} while( t != curedge );
			
			// check that triangle is not already in
			if( std::find(trivec.begin(), trivec.end(), temptri) == trivec.end() ) {
				// push triangle in and draw
				trivec.push_back(temptri);
				cvLine( image1, temptri.points[0], temptri.points[1], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[1], temptri.points[2], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[2], temptri.points[0], CV_RGB(255,0,0), 1, CV_AA, 0 );

				// compute barycentric computation vector for this triangle
				CvMat * barycen = cvCreateMat( 3, 3, CV_32FC1 );
				CvMat * baryceninv = cvCreateMat( 3, 3, CV_32FC1 );

				barycen->data.fl[3*0+0] = temptri.points[0].x;
				barycen->data.fl[3*0+1] = temptri.points[1].x;
				barycen->data.fl[3*0+2] = temptri.points[2].x;
				barycen->data.fl[3*1+0] = temptri.points[0].y;
				barycen->data.fl[3*1+1] = temptri.points[1].y;
				barycen->data.fl[3*1+2] = temptri.points[2].y;
				barycen->data.fl[3*2+0] = 1;
				barycen->data.fl[3*2+1] = 1;
				barycen->data.fl[3*2+2] = 1;

				cvInvert( barycen, baryceninv, CV_LU );
				baryinvvec.push_back(baryceninv);

				cvReleaseMat( &barycen );
			}
		}

		CV_NEXT_SEQ_ELEM( elem_size, reader );
	}
	printf("%d triangles...", trivec.size());
	cvSaveImage("triangles.jpg", image1);
	
	cvSet( image1, cvScalarAll(255) );
	IplImage * clean_nonthresh = cvLoadImage( "conhull-clean.jpg", CV_LOAD_IMAGE_COLOR );

	// for each triangle
	for(unsigned int i = 0; i < trivec.size(); i++) {
		Triangle curtri = trivec[i];
		CvMat * curpoints = cvCreateMat( 1, 3, CV_32SC2 );
		Triangle target;
		std::map<CvPoint,CvPoint>::iterator piter[3];
		
		printf("Triangle %d / %d\n",i,trivec.size());
		bool is_corner = false;
		for(int j = 0; j < 3; j++) {
			/*
			curpoints->data.i[2*j+0] = curtri.points[j].x;
			curpoints->data.i[2*j+1] = curtri.points[j].y;
			*/
			CV_MAT_ELEM( *curpoints, CvPoint, 0, j ) = curtri.points[j];
			printf("%d,%d\n",curtri.points[j].x,curtri.points[j].y);
	
			/*	
			if((curtri.points[j] == cvPoint(0,0)) ||  (curtri.points[j] == cvPoint(0,image1->height)) ||(curtri.points[j] == cvPoint(image1->width,0)) ||(curtri.points[j] == cvPoint(image1->width,image1->height))) {
				is_corner = true;
				break;
			}
			*/

			for(unsigned int k = 0; k < point_lookup.size(); k++) {
				std::pair<CvPoint,CvPoint> thispair = point_lookup[k];
				if(thispair.first == curtri.points[j]) {
					target.points[j] = thispair.second;
					break;
				}
			}

			/*
			piter[j] = point_lookup_map.find(curtri.points[j]);
			if(piter[j] != point_lookup_map.end() ) {
				target.points[j] = piter[j]->second;
			}
			*/
		}
			
		// if((piter[0] != point_lookup_map.end()) && (piter[1] != point_lookup_map.end()) && (piter[2] != point_lookup_map.end())) {
		if(!is_corner) {
			CvMat * newcorners = cvCreateMat( 3, 3, CV_32FC1 );
			newcorners->data.fl[3*0+0] = target.points[0].x;
			newcorners->data.fl[3*0+1] = target.points[1].x;
			newcorners->data.fl[3*0+2] = target.points[2].x;
			newcorners->data.fl[3*1+0] = target.points[0].y;
			newcorners->data.fl[3*1+1] = target.points[1].y;
			newcorners->data.fl[3*1+2] = target.points[2].y;
			newcorners->data.fl[3*2+0] = 1;
			newcorners->data.fl[3*2+1] = 1;
			newcorners->data.fl[3*2+2] = 1;

			CvContour hdr;
			CvSeqBlock blk;
			CvRect trianglebound = cvBoundingRect( cvPointSeqFromMat(CV_SEQ_KIND_CURVE+CV_SEQ_FLAG_CLOSED, curpoints, &hdr, &blk), 1 );
			printf("Bounding box: %d,%d,%d,%d\n",trianglebound.x,trianglebound.y,trianglebound.width,trianglebound.height);
			for(int y = trianglebound.y; (y < (trianglebound.y + trianglebound.height)) && ( y < image1->height); y++) {
				for(int x = trianglebound.x; (x < (trianglebound.x + trianglebound.width)) && (x < image1->width); x++) {
					// check to see if we're inside this triangle
					/*
					CvPoint v0 = cvPoint( curtri.points[2].x - curtri.points[0].x, curtri.points[2].y - curtri.points[0].y );
					CvPoint v1 = cvPoint( curtri.points[1].x - curtri.points[0].x, curtri.points[1].y - curtri.points[0].y );
					CvPoint v2 = cvPoint( x - curtri.points[0].x, y - curtri.points[0].y );
					
					int dot00 = v0.x * v0.x + v0.y * v0. y;
					int dot01 = v0.x * v1.x + v0.y * v1. y;
					int dot02 = v0.x * v2.x + v0.y * v2. y;
					int dot11 = v1.x * v1.x + v1.y * v1. y;
					int dot12 = v1.x * v2.x + v1.y * v2. y;

					double invDenom = 1.0 / (double)(dot00 * dot11 - dot01 * dot01);
					double u = (double)(dot11 * dot02 - dot01 * dot12) * invDenom;
					double v = (double)(dot00 * dot12 - dot01 * dot02) * invDenom;
					*/

					CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
					CvMat * result = cvCreateMat(3, 1, CV_32FC1);
					curp->data.fl[0] = x;
					curp->data.fl[1] = y;
					curp->data.fl[2] = 1;
					cvMatMul( baryinvvec[i], curp, result );
					// double u = result->data.fl[0]/result->data.fl[2];
					// double v = result->data.fl[1]/result->data.fl[2];
			

					if( (result->data.fl[0] > 0) && (result->data.fl[1] > 0) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) {
					// if((u > 0) || (v > 0) /*&& ((u +v) < 1)*/ ) {
						// printf("Barycentric: %f %f %f\n", result->data.fl[0], result->data.fl[1], result->data.fl[2]);
						// this point is inside this triangle
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//	trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);

						CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1);
						cvMatMul( newcorners, result, sourcepoint );
						double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/;
						double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/;
						if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) {
							// printf("%d,%d %d,%d\n",x,y,(int)sourcex,(int)sourcey);
							cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) );
						}
	
						/*
						if((i == 143) && (y == 3577) && (x > 2055) && (x < 2087)) {
							printf("%d: %f, %f, %f\t%f, %f, %f\n",x,result->data.fl[0],result->data.fl[1],result->data.fl[2],
									sourcepoint->data.fl[0],sourcepoint->data.fl[1],sourcepoint->data.fl[2]);
						}
						*/
	
						cvReleaseMat( &sourcepoint );
						
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//		trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);

					}
					cvReleaseMat( &result );
					cvReleaseMat( &curp );
				}
			}
			cvReleaseMat( &newcorners );
		}
		cvReleaseMat( &curpoints );
	}
	/*
	for(int y = 0; y < image1->height; y++) {
		for(int x = 0; x < image1->width; x++) {
			CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
			CvMat * result = cvCreateMat(3, 1, CV_32FC1);
			curp->data.fl[0] = x;
			curp->data.fl[1] = y;
			curp->data.fl[2] = 1;
			for(unsigned int i = 0; i < baryinvvec.size(); i++) {
				cvMatMul( baryinvvec[i], curp, result );
				double u = result->data.fl[0]/result->data.fl[2];
				double v = result->data.fl[1]/result->data.fl[2];
				if((u > 0) && (v > 0) && (u + v < 1)) {
					// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
					//		trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);

					break;
				}
			}
			cvReleaseMat( &result );
			cvReleaseMat( &curp );
		}
	}
	*/

	cvReleaseImage( &clean_nonthresh );

#ifdef OLD_BUSTED
	for(int y = 0; y < image1->height; y++) {
		for(int x = 0; x < image1->width; x++) {
			CvSubdiv2DPointLocation locate_result;
			CvSubdiv2DEdge on_edge;
			CvSubdiv2DPoint * on_vertex;
			CvPoint curpoint = cvPoint( x, y );
			locate_result = cvSubdiv2DLocate( delaunay, cvPointTo32f( curpoint ),
				&on_edge, &on_vertex );
			if( (locate_result != CV_PTLOC_OUTSIDE_RECT) && (locate_result != CV_PTLOC_ERROR) ) {
				if( locate_result == CV_PTLOC_VERTEX ) { // this point is on a vertex
					for(int i = 0; i < count; i++) {
						if(((on_vertex->pt).x == delaunay_points[i].x) && ((on_vertex->pt).y == delaunay_points[i].y)) {
							cvSet2D( image1, y, x, cvGet2D( image2, cvPointFrom32f(dest_points[i]).y, cvPointFrom32f(dest_points[i]).x ) );
							break;
						}
					}
				}
				else if( locate_result == CV_PTLOC_ON_EDGE ) { // this point is on an edge
					CvSubdiv2DPoint* org_pt;
					CvSubdiv2DPoint* dst_pt;
					CvPoint org_pt_warp;
					CvPoint dst_pt_warp;
						
					org_pt = cvSubdiv2DEdgeOrg(on_edge);
					dst_pt = cvSubdiv2DEdgeDst(on_edge);

					for(int i = 0; i < count; i++) {
						if(((org_pt->pt).x == delaunay_points[i].x) && ((org_pt->pt).y == delaunay_points[i].y)) {
							org_pt_warp = cvPointFrom32f(dest_points[i]);
						}
						if(((dst_pt->pt).x == delaunay_points[i].x) && ((dst_pt->pt).y == delaunay_points[i].y)) {
							dst_pt_warp = cvPointFrom32f(dest_points[i]);
						}
					}

					// compute vector length of original edge and current point
					double original_length;
					double cur_length; 
					if( (int)((org_pt->pt).x) == curpoint.x ) { // vertical line
						original_length = fabs((org_pt->pt).y - (dst_pt->pt).y);
						cur_length = fabs((org_pt->pt).y - curpoint.y);
					}
					else if( (int)((org_pt->pt).y) == curpoint.y ) { // horizontal line
						original_length = fabs((org_pt->pt).x - (dst_pt->pt).x);
						cur_length = fabs((org_pt->pt).x - curpoint.x);
					}
					else { // sloped line
				 		original_length = sqrt(pow((org_pt->pt).x - (dst_pt->pt).x, 2.0) + pow((org_pt->pt).y - (dst_pt->pt).y, 2.0));
						cur_length = sqrt(pow((org_pt->pt).x - curpoint.x, 2.0) + pow((org_pt->pt).y - curpoint.y, 2.0));
					}
					// compute ratio of this point on the edge
					double ratio = cur_length / original_length;
					// copy this point from the destination edge
					CvPoint point_in_original;
					int warped_x = (int)(org_pt_warp.x - dst_pt_warp.x);
					int warped_y = (int)(org_pt_warp.y - dst_pt_warp.y);
					if( org_pt_warp.x == curpoint.x ) { // vertical line
						point_in_original.y = (int)(org_pt_warp.y + (ratio * (org_pt_warp.y - dst_pt_warp.y)));
						point_in_original.x = org_pt_warp.x;
					}
					else if(org_pt_warp.y == curpoint.y) { // horizontal line
						point_in_original.x = (int)(org_pt_warp.x + (ratio * (org_pt_warp.x - dst_pt_warp.x)));
						point_in_original.y = org_pt_warp.y;
					}
					else { // sloped line
						double destination_length = sqrt(pow((org_pt_warp).x - (dst_pt_warp).x, 2.0) + pow((org_pt_warp).y - (dst_pt_warp).y, 2.0));
						double scaled_length = ratio * destination_length;
						double dest_angle = atan(fabs( (double)warped_y / (double)warped_x ));
						double xdist = scaled_length * cos(dest_angle);
						double ydist = scaled_length * sin(dest_angle);
						xdist = warped_x > 0 ? xdist : xdist * -1;
						ydist = warped_y > 0 ? ydist : ydist * -1;
						point_in_original.x = (int)( org_pt_warp.x + xdist);
						point_in_original.y = (int)( org_pt_warp.y + ydist);
					}
					
					if((point_in_original.x >= 0) && (point_in_original.y >= 0) && (point_in_original.x < (image1->width)) && (point_in_original.y < (image1->height))) {
						cvSet2D( image1, y, x, cvGet2D( image2, point_in_original.y, point_in_original.x ) );
					}
					else {
						printf("Edge point outside image\n");
					}
					// cvSet2D( image1, y, x, cvGet2D( image2, (int)(org_pt_warp.x + (ratio * (org_pt_warp.x - dst_pt_warp.x))), 
					//			(int)(org_pt_warp.y + (ratio * (org_pt_warp.y - dst_pt_warp.y))) ) );
				}
				else if( locate_result == CV_PTLOC_INSIDE ) { // this point is inside a facet (triangle)
					/*
					printf("Point inside facet: %d, %d\n",curpoint.x,curpoint.y);
					int count = 0;
					CvPoint * origins = (CvPoint*)malloc(sizeof(CvPoint)*3);
					CvSubdiv2DEdge t = on_edge;
					// count number of edges
					do {
						CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t );
						if(count < 3) {
							origins[count] = cvPoint( cvRound(pt->pt.x), cvRound(pt->pt.y));
							printf("%d,%d\t",origins[count].x,origins[count].y);
						}
						count++;
						t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT );
					} while(t != on_edge);
					printf("\n");

					free(origins);
					*/
				}
			}
		}
	}
#endif // OLD_BUSTED
	printf("done.\n");

	cvSaveImage("fullwarp.jpg", image1);

	printf("Drawing subdivisions on warped image...");
	draw_subdiv( image1, delaunay, NULL, NULL, 0, NULL );
	// draw_subdiv( image1, delaunay, delaunay_points, source_points, count, status );
	printf("done.\n");
	
	cvSaveImage("edgeswarp.jpg", image1);

	cvReleaseImage(&image2);

	image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);
	// cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 3 );

	// cvCalcSubdivVoronoi2D( delaunay );
	printf("Drawing subdivisions on unwarped image...");
	draw_subdiv( image2, delaunay, delaunay_points, dest_points, count, status );
	// draw_subdiv( image2, delaunay, NULL, NULL, 0, NULL );
	printf("done.\n");

	cvSaveImage("edges.jpg",image2);

	cvReleaseImage(&image1);
	cvFree(&source_points);
	cvFree(&dest_points);
	cvFree(&status);
	cvReleaseMemStorage(&storage);
	cvFree(&delaunay_points);

	cvReleaseImage(&image2);

	return 0;
}
Exemple #16
0
int opticaltri( CvMat * &clean_texture, int verts )
{
	char * im1fname = "conhull-dirty-thresh.jpg";
	char * im2fname = "conhull-clean-thresh.jpg";

	int count = MAX_COUNT;
	char * status;
	
	CvPoint2D32f * source_points;
	CvPoint2D32f * dest_points;
	CvPoint2D32f * delaunay_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f));

	// count = opticalflow( im1fname, im2fname, source_points, dest_points, status ); 
	count = findsiftpoints( "conhull-dirty.jpg", "conhull-clean.jpg", source_points, dest_points, status ); 

	IplImage * image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);

	CvMemStorage * storage = cvCreateMemStorage(0);
	CvSubdiv2D * delaunay = cvCreateSubdivDelaunay2D( cvRect(0,0,image1->width,image1->height), storage);

	IplImage * image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);

	cvSet( image1, cvScalarAll(255) );

	std::map<CvPoint, CvPoint> point_lookup_map;
	std::vector<std::pair<CvPoint, CvPoint> > point_lookup;

	int num_matches = 0;
	int num_out_matches = 0;
	int max_dist = 50;
	int offset = 200;	

	// put corners in the point lookup as going to themselves
	point_lookup_map[cvPoint(0,0)] = cvPoint(0,0);
	point_lookup_map[cvPoint(0,image1->height-1)] = cvPoint(0,image1->height-1);
	point_lookup_map[cvPoint(image1->width-1,0)] = cvPoint(image1->width-1,0);
	point_lookup_map[cvPoint(image1->width-1,image1->height-1)] = cvPoint(image1->width-1,image1->height-1);

	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,0), cvPoint(0,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,image1->height-1), cvPoint(0,image1->height-1)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,0), cvPoint(image1->width-1,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,image1->height-1), cvPoint(image1->width-1,image1->height-1)));

	printf("Inserting corners...");
	// put corners in the Delaunay subdivision
	for(unsigned int i = 0; i < point_lookup.size(); i++) {
		cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(point_lookup[i].first) );
	}
	printf("done.\n");

	CvSubdiv2DEdge proxy_edge;
	for(int i = 0; i < count; i++) {
		if(status[i]) {
			CvPoint source = cvPointFrom32f(source_points[i]);
			CvPoint dest = cvPointFrom32f(dest_points[i]);
	
			if((((int)fabs((double)(source.x - dest.x))) > max_dist) ||
				 (((int)fabs((double)(source.y - dest.y))) > max_dist)) {	
				num_out_matches++;
			}
			else if((dest.x >= 0) && (dest.y >= 0) && (dest.x < (image1->width)) && (dest.y < (image1->height))) {
				if(point_lookup_map.find(source) == point_lookup_map.end()) {
					num_matches++;
				
					point_lookup_map[source] = dest;
					point_lookup.push_back(std::pair<CvPoint,CvPoint>(source,dest));
					// delaunay_points[i] = 
					(cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(source) ))->pt;
					cvSetImageROI( image1, cvRect(source.x-8,source.y-8,8*2,8*2) );
					cvResetImageROI( image2 );
					cvGetRectSubPix( image2, image1, dest_points[i] );
				}
				/*
				cvSet2D( image1, source.y, source.x, cvGet2D( image2, dest.y, dest.x ) );
				cvSet2D( image1, source.y, source.x+1, cvGet2D( image2, dest.y, dest.x+1 ) );
				cvSet2D( image1, source.y, source.x-1, cvGet2D( image2, dest.y, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x, cvGet2D( image2, dest.y+1, dest.x ) );
				cvSet2D( image1, source.y-1, source.x, cvGet2D( image2, dest.y-1, dest.x ) );
				cvSet2D( image1, source.y+1, source.x+1, cvGet2D( image2, dest.y+1, dest.x+1 ) );
				cvSet2D( image1, source.y-1, source.x-1, cvGet2D( image2, dest.y-1, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x-1, cvGet2D( image2, dest.y+1, dest.x-1 ) );
				cvSet2D( image1, source.y-1, source.x+1, cvGet2D( image2, dest.y-1, dest.x+1 ) );
				*/

				// cvCircle( image1, source, 4, CV_RGB(255,0,0), 2, CV_AA );
				// cvCircle( image2, dest, 4, CV_RGB(255,0,0), 2, CV_AA );
			}

			/*
			cvSetImageROI( image1, cvRect(source.x-offset,source.y-offset,offset*2,offset*2) );
			cvSetImageROI( image2, cvRect(dest.x-offset,dest.y-offset,offset*2,offset*2) );
			cvNamedWindow("image1",0);
			cvNamedWindow("image2",0);
			cvShowImage("image1",image1);
			cvShowImage("image2",image2);
			printf("%d,%d -> %d,%d\n",source.x,source.y,dest.x,dest.y);
			cvWaitKey(0);
			cvDestroyAllWindows();
			*/
		}
	}
	printf("%d %d\n",num_matches,num_out_matches);
	printf("%d lookups\n",point_lookup_map.size());

	cvResetImageROI( image1 );

	cvSaveImage("sparse.jpg", image1);

	cvReleaseImage(&image1);
	image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);
	cvSet( image1, cvScalarAll(255) );
	printf("Warping image...");

	CvSeqReader  reader;
	int total = delaunay->edges->total;
	int elem_size = delaunay->edges->elem_size;


	std::vector<Triangle> trivec;
	std::vector<CvMat *> baryinvvec;

	for( int i = 0; i < total*2; i++ ) {
		if((i == 0) || (i == total)) {
			cvStartReadSeq( (CvSeq*)(delaunay->edges), &reader, 0 );
		}
		CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr);

		if( CV_IS_SET_ELEM( edge ))	{
			CvSubdiv2DEdge curedge = (CvSubdiv2DEdge)edge;
			CvSubdiv2DEdge t = curedge;
			Triangle temptri;
			int count = 0;
			
			// construct a triangle from this edge
			do {
				CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t );
				if(count < 3) {
					pt->pt.x = pt->pt.x >= image1->width ? image1->width-1 : pt->pt.x;
					pt->pt.y = pt->pt.y >= image1->height ? image1->height-1 : pt->pt.y;
					pt->pt.x = pt->pt.x < 0 ? 0 : pt->pt.x;
					pt->pt.y = pt->pt.y < 0 ? 0 : pt->pt.y;

					temptri.points[count] = cvPointFrom32f( pt->pt );
				}
				else {
					printf("More than 3 edges\n");
				}
				count++;
				if(i < total)
					t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT );
				else
					t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_RIGHT );
			} while( t != curedge );
			
			// check that triangle is not already in
			if( std::find(trivec.begin(), trivec.end(), temptri) == trivec.end() ) {
				// push triangle in and draw
				trivec.push_back(temptri);
				cvLine( image1, temptri.points[0], temptri.points[1], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[1], temptri.points[2], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[2], temptri.points[0], CV_RGB(255,0,0), 1, CV_AA, 0 );

				// compute barycentric computation vector for this triangle
				CvMat * barycen = cvCreateMat( 3, 3, CV_32FC1 );
				CvMat * baryceninv = cvCreateMat( 3, 3, CV_32FC1 );

				barycen->data.fl[3*0+0] = temptri.points[0].x;
				barycen->data.fl[3*0+1] = temptri.points[1].x;
				barycen->data.fl[3*0+2] = temptri.points[2].x;
				barycen->data.fl[3*1+0] = temptri.points[0].y;
				barycen->data.fl[3*1+1] = temptri.points[1].y;
				barycen->data.fl[3*1+2] = temptri.points[2].y;
				barycen->data.fl[3*2+0] = 1;
				barycen->data.fl[3*2+1] = 1;
				barycen->data.fl[3*2+2] = 1;

				cvInvert( barycen, baryceninv, CV_LU );
				baryinvvec.push_back(baryceninv);

				cvReleaseMat( &barycen );
			}
		}

		CV_NEXT_SEQ_ELEM( elem_size, reader );
	}
	printf("%d triangles...", trivec.size());
	cvSaveImage("triangles.jpg", image1);
	
	cvSet( image1, cvScalarAll(255) );
	IplImage * clean_nonthresh = cvLoadImage( "conhull-clean.jpg", CV_LOAD_IMAGE_COLOR );

	// for each triangle
	for(unsigned int i = 0; i < trivec.size(); i++) {
		Triangle curtri = trivec[i];
		CvMat * curpoints = cvCreateMat( 1, 3, CV_32SC2 );
		Triangle target;
		std::map<CvPoint,CvPoint>::iterator piter[3];
		
		printf("Triangle %d / %d\n",i,trivec.size());
		int is_corner = 0;
		for(int j = 0; j < 3; j++) {
			/*
			curpoints->data.i[2*j+0] = curtri.points[j].x;
			curpoints->data.i[2*j+1] = curtri.points[j].y;
			*/
			CV_MAT_ELEM( *curpoints, CvPoint, 0, j ) = curtri.points[j];
			printf("%d,%d\n",curtri.points[j].x,curtri.points[j].y);
	
			if((curtri.points[j] == cvPoint(0,0)) ||  (curtri.points[j] == cvPoint(0,image1->height - 1)) ||(curtri.points[j] == cvPoint(image1->width - 1,0)) ||(curtri.points[j] == cvPoint(image1->width - 1,image1->height - 1))) {
				is_corner++;
			}
			

			for(unsigned int k = 0; k < point_lookup.size(); k++) {
				std::pair<CvPoint,CvPoint> thispair = point_lookup[k];
				if(thispair.first == curtri.points[j]) {
					target.points[j] = thispair.second;
					break;
				}
			}

			/*
			piter[j] = point_lookup_map.find(curtri.points[j]);
			if(piter[j] != point_lookup_map.end() ) {
				target.points[j] = piter[j]->second;
			}
			*/
		}
			
		// if((piter[0] != point_lookup_map.end()) && (piter[1] != point_lookup_map.end()) && (piter[2] != point_lookup_map.end())) {
		if(is_corner < 3) {
			CvMat * newcorners = cvCreateMat( 3, 3, CV_32FC1 );
			newcorners->data.fl[3*0+0] = target.points[0].x;
			newcorners->data.fl[3*0+1] = target.points[1].x;
			newcorners->data.fl[3*0+2] = target.points[2].x;
			newcorners->data.fl[3*1+0] = target.points[0].y;
			newcorners->data.fl[3*1+1] = target.points[1].y;
			newcorners->data.fl[3*1+2] = target.points[2].y;
			newcorners->data.fl[3*2+0] = 1;
			newcorners->data.fl[3*2+1] = 1;
			newcorners->data.fl[3*2+2] = 1;

			CvContour hdr;
			CvSeqBlock blk;
			CvRect trianglebound = cvBoundingRect( cvPointSeqFromMat(CV_SEQ_KIND_CURVE+CV_SEQ_FLAG_CLOSED, curpoints, &hdr, &blk), 1 );
			printf("Bounding box: %d,%d,%d,%d\n",trianglebound.x,trianglebound.y,trianglebound.width,trianglebound.height);
			for(int y = trianglebound.y; (y < (trianglebound.y + trianglebound.height)) && ( y < image1->height); y++) {
				for(int x = trianglebound.x; (x < (trianglebound.x + trianglebound.width)) && (x < image1->width); x++) {
					// check to see if we're inside this triangle
					/*
					CvPoint v0 = cvPoint( curtri.points[2].x - curtri.points[0].x, curtri.points[2].y - curtri.points[0].y );
					CvPoint v1 = cvPoint( curtri.points[1].x - curtri.points[0].x, curtri.points[1].y - curtri.points[0].y );
					CvPoint v2 = cvPoint( x - curtri.points[0].x, y - curtri.points[0].y );
					
					int dot00 = v0.x * v0.x + v0.y * v0. y;
					int dot01 = v0.x * v1.x + v0.y * v1. y;
					int dot02 = v0.x * v2.x + v0.y * v2. y;
					int dot11 = v1.x * v1.x + v1.y * v1. y;
					int dot12 = v1.x * v2.x + v1.y * v2. y;

					double invDenom = 1.0 / (double)(dot00 * dot11 - dot01 * dot01);
					double u = (double)(dot11 * dot02 - dot01 * dot12) * invDenom;
					double v = (double)(dot00 * dot12 - dot01 * dot02) * invDenom;
					*/

					CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
					CvMat * result = cvCreateMat(3, 1, CV_32FC1);
					curp->data.fl[0] = x;
					curp->data.fl[1] = y;
					curp->data.fl[2] = 1;
					cvMatMul( baryinvvec[i], curp, result );
					// double u = result->data.fl[0]/result->data.fl[2];
					// double v = result->data.fl[1]/result->data.fl[2];

					/*
					if((i == 3019) && (y == 1329) && (x > 2505) && (x < 2584)) {
						printf("Range %d: %f, %f, %f\t%f, %f, %f\n",x,result->data.fl[0],result->data.fl[1],result->data.fl[2],
								sourcepoint->data.fl[0],sourcepoint->data.fl[1],sourcepoint->data.fl[2]);
					}
					*/

					if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) {
					// if((u > 0) || (v > 0) /*&& ((u +v) < 1)*/ )
						// printf("Barycentric: %f %f %f\n", result->data.fl[0], result->data.fl[1], result->data.fl[2]);
						// this point is inside this triangle
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//	trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);
						
						CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1);
						cvMatMul( newcorners, result, sourcepoint );	
					
						double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/;
						double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/;
						if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) {
							// printf("%d,%d %d,%d\n",x,y,(int)sourcex,(int)sourcey);
							cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) );
						}
	
						
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//		trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);

						cvReleaseMat( &sourcepoint );
					}
					cvReleaseMat( &result );
					cvReleaseMat( &curp );
				}
			}
			
			for(int k = 0; k < verts; k++) {
				double x = clean_texture->data.fl[2*k+0];
				double y = clean_texture->data.fl[2*k+1];
				
				// check to see if we're inside this triangle
				CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
				CvMat * result = cvCreateMat(3, 1, CV_32FC1);
				curp->data.fl[0] = x;
				curp->data.fl[1] = y;
				curp->data.fl[2] = 1;
				cvMatMul( baryinvvec[i], curp, result );
			
				if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) {
					
					CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1);
					cvMatMul( newcorners, result, sourcepoint );	
				
					double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/;
					double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/;
					if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) {
						clean_texture->data.fl[2*k+0] = sourcex;
						clean_texture->data.fl[2*k+1] = sourcey;
						// cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) );
					}

					cvReleaseMat( &sourcepoint );
				}
				cvReleaseMat( &result );
				cvReleaseMat( &curp );
			}
			cvReleaseMat( &newcorners );
		}
		cvReleaseMat( &curpoints );
	}

	cvReleaseImage( &clean_nonthresh );

	printf("done.\n");

	cvSaveImage("fullwarp.jpg", image1);

	printf("Drawing subdivisions on warped image...");
	draw_subdiv( image1, delaunay, NULL, NULL, 0, NULL );
	// draw_subdiv( image1, delaunay, delaunay_points, source_points, count, status );
	printf("done.\n");
	
	cvSaveImage("edgeswarp.jpg", image1);

	cvReleaseImage(&image2);

	image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);
	// cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 3 );

	// cvCalcSubdivVoronoi2D( delaunay );
	printf("Drawing subdivisions on unwarped image...");
	// draw_subdiv( image2, delaunay, delaunay_points, dest_points, count, status );
	// draw_subdiv( image2, delaunay, NULL, NULL, 0, NULL );
	printf("done.\n");

	cvSaveImage("edges.jpg",image2);

	cvReleaseImage(&image1);
	cvFree(&source_points);
	cvFree(&dest_points);
	cvFree(&status);
	cvReleaseMemStorage(&storage);
	cvFree(&delaunay_points);

	cvReleaseImage(&image2);

	return 0;
}
CV_IMPL double
cvPointPolygonTest( const CvArr* _contour, CvPoint2D32f pt, int measure_dist )
{
    double result = 0;
    CV_FUNCNAME( "cvCheckPointPolygon" );

    __BEGIN__;
    
    CvSeqBlock block;
    CvContour header;
    CvSeq* contour = (CvSeq*)_contour;
    CvSeqReader reader;
    int i, total, counter = 0;
    int is_float;
    double min_dist_num = FLT_MAX, min_dist_denom = 1;
    CvPoint ip = {0,0};

    if( !CV_IS_SEQ(contour) )
    {
        CV_CALL( contour = cvPointSeqFromMat( CV_SEQ_KIND_CURVE + CV_SEQ_FLAG_CLOSED,
                                              _contour, &header, &block ));
    }
    else if( CV_IS_SEQ_POLYGON(contour) )
    {
        if( contour->header_size == sizeof(CvContour) && !measure_dist )
        {
            CvRect r = ((CvContour*)contour)->rect;
            if( pt.x < r.x || pt.y < r.y ||
                pt.x >= r.x + r.width || pt.y >= r.y + r.height )
                return -100;
        }
    }
    else if( CV_IS_SEQ_CHAIN(contour) )
    {
        CV_ERROR( CV_StsBadArg,
            "Chains are not supported. Convert them to polygonal representation using cvApproxChains()" );
    }
    else
        CV_ERROR( CV_StsBadArg, "Input contour is neither a valid sequence nor a matrix" );

    total = contour->total;
    is_float = CV_SEQ_ELTYPE(contour) == CV_32FC2;
    cvStartReadSeq( contour, &reader, -1 );

    if( !is_float && !measure_dist && (ip.x = cvRound(pt.x)) == pt.x && (ip.y = cvRound(pt.y)) == pt.y )
    {
        // the fastest "pure integer" branch
        CvPoint v0, v;
        CV_READ_SEQ_ELEM( v, reader );

        for( i = 0; i < total; i++ )
        {
            int dist;
            v0 = v;
            CV_READ_SEQ_ELEM( v, reader );

            if( (v0.y <= ip.y && v.y <= ip.y) ||
                (v0.y > ip.y && v.y > ip.y) ||
                (v0.x < ip.x && v.x < ip.x) )
            {
                if( ip.y == v.y && (ip.x == v.x || (ip.y == v0.y &&
                    ((v0.x <= ip.x && ip.x <= v.x) || (v.x <= ip.x && ip.x <= v0.x)))) )
                    EXIT;
                continue;
            }

            dist = (ip.y - v0.y)*(v.x - v0.x) - (ip.x - v0.x)*(v.y - v0.y);
            if( dist == 0 )
                EXIT;
            if( v.y < v0.y )
                dist = -dist;
            counter += dist > 0;
        }

        result = counter % 2 == 0 ? -100 : 100;
    }
    else
    {
        CvPoint2D32f v0, v;
        CvPoint iv;

        if( is_float )
        {
            CV_READ_SEQ_ELEM( v, reader );
        }
        else
        {
            CV_READ_SEQ_ELEM( iv, reader );
            v = cvPointTo32f( iv );
        }

        if( !measure_dist )
        {
            for( i = 0; i < total; i++ )
            {
                double dist;
                v0 = v;
                if( is_float )
                {
                    CV_READ_SEQ_ELEM( v, reader );
                }
                else
                {
                    CV_READ_SEQ_ELEM( iv, reader );
                    v = cvPointTo32f( iv );
                }

                if( (v0.y <= pt.y && v.y <= pt.y) ||
                    (v0.y > pt.y && v.y > pt.y) ||
                    (v0.x < pt.x && v.x < pt.x) )
                {
                    if( pt.y == v.y && (pt.x == v.x || (pt.y == v0.y &&
                        ((v0.x <= pt.x && pt.x <= v.x) || (v.x <= pt.x && pt.x <= v0.x)))) )
                        EXIT;
                    continue;
                }

                dist = (double)(pt.y - v0.y)*(v.x - v0.x) - (double)(pt.x - v0.x)*(v.y - v0.y);
                if( dist == 0 )
                    EXIT;
                if( v.y < v0.y )
                    dist = -dist;
                counter += dist > 0;
            }

            result = counter % 2 == 0 ? -100 : 100;
        }
        else
        {
            for( i = 0; i < total; i++ )
            {
                double dx, dy, dx1, dy1, dx2, dy2, dist_num, dist_denom = 1;
        
                v0 = v;
                if( is_float )
                {
                    CV_READ_SEQ_ELEM( v, reader );
                }
                else
                {
                    CV_READ_SEQ_ELEM( iv, reader );
                    v = cvPointTo32f( iv );
                }
        
                dx = v.x - v0.x; dy = v.y - v0.y;
                dx1 = pt.x - v0.x; dy1 = pt.y - v0.y;
                dx2 = pt.x - v.x; dy2 = pt.y - v.y;
        
                if( dx1*dx + dy1*dy <= 0 )
                    dist_num = dx1*dx1 + dy1*dy1;
                else if( dx2*dx + dy2*dy >= 0 )
                    dist_num = dx2*dx2 + dy2*dy2;
                else
                {
                    dist_num = (dy1*dx - dx1*dy);
                    dist_num *= dist_num;
                    dist_denom = dx*dx + dy*dy;
                }

                if( dist_num*min_dist_denom < min_dist_num*dist_denom )
                {
                    min_dist_num = dist_num;
                    min_dist_denom = dist_denom;
                    if( min_dist_num == 0 )
                        break;
                }

                if( (v0.y <= pt.y && v.y <= pt.y) ||
                    (v0.y > pt.y && v.y > pt.y) ||
                    (v0.x < pt.x && v.x < pt.x) )
                    continue;

                dist_num = dy1*dx - dx1*dy;
                if( dy < 0 )
                    dist_num = -dist_num;
                counter += dist_num > 0;
            }

            result = sqrt(min_dist_num/min_dist_denom);
            if( counter % 2 == 0 )
                result = -result;
        }
    }

    __END__;

    return result;
}
Exemple #18
0
void mexFunction(int output_size, mxArray *output[], int input_size, const mxArray *input[]) {
    
    char* input_buf;
    /* copy the string data from input[0] into a C string input_ buf. */
    input_buf = mxArrayToString(I_IN);
    CvCapture* capture = 0;

    capture = cvCaptureFromAVI(input_buf);
    if (!capture) {
        fprintf(stderr, "Could not initialize capturing...\n");
    }

    cvNamedWindow( "LkDemo", 0 );

    for(;;) {
        init = clock();
        IplImage* frame = 0;
        int i, k, c;
        
        frame = cvQueryFrame( capture );
        if (!frame)
            break;

        if (!image) {
            /* allocate all the buffers */
            image = cvCreateImage(cvGetSize(frame), 8, 3);
            image->origin = frame->origin;
            grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            points[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT * sizeof(points[0][0]));
            points[1] = (CvPoint2D32f*)cvAlloc(MAX_COUNT * sizeof(points[0][0]));
            pointadd[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT * sizeof(points[0][0]));
            ptcolor = (int*)cvAlloc(MAX_COUNT*sizeof(ptcolor[0]));
            status = (char*)cvAlloc(MAX_COUNT);
            flags = 0;
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, grey, CV_BGR2GRAY );
        //CvRect rect = cvRect(image->width/2-50, 0, 100,image->height*0.6);
        
        if (night_mode)
            cvZero( image );

        countlast = ptcount;
        if (need_to_add) {
            /* automatic initialization */
            IplImage* eig = cvCreateImage(cvGetSize(grey), 32, 1);
            IplImage* temp = cvCreateImage(cvGetSize(grey), 32, 1);
            double quality = 0.01;
            double min_distance = 10;
            
            countadd = MAX_COUNT;
            //cvSetImageROI(grey, rect);
            //cvSetImageROI(eig, rect);
            //cvSetImageROI(temp, rect);
            
            cvGoodFeaturesToTrack(grey, eig, temp, pointadd[0], &countadd, quality, min_distance, 0, 3, 0, 0.04);
            cvFindCornerSubPix(grey, pointadd[0], countadd, cvSize(win_size, win_size), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03));

            //for(l=0;l<countadd;l++)
            //	pointadd[0][l].x = pointadd[0][l].x + image->width/2-50;
            cvReleaseImage( &eig );
            cvReleaseImage( &temp );
            //cvResetImageROI(grey);
            for (m = 0; m < countadd; m++) {
                flag = 1;
                for (i = 0; i < countlast; i++) {
                    double dx = pointadd[0][m].x - points[0][i].x;
                    double dy = pointadd[0][m].y - points[0][i].y;

                    if( dx*dx + dy*dy <= 100 ) {
                        flag = 0;
                        break;
                    }
                }

                if (flag==1) {
                    points[0][ptcount++] = pointadd[0][m];
                    cvCircle(image, cvPointFrom32f(points[1][ptcount-1]), 3, CV_RGB(255, 0, 0), -1, 8, 0);
                }
                if (ptcount >= MAX_COUNT) {
                    break;
                }
            }
        }

        if (need_to_init) {
            /* automatic initialization */
            IplImage* eig = cvCreateImage( cvGetSize(grey), 32, 1 );
            IplImage* temp = cvCreateImage( cvGetSize(grey), 32, 1 );
            double quality = 0.01;
            double min_distance = 10;
            
            ptcount = MAX_COUNT;
            cvGoodFeaturesToTrack(grey, eig, temp, points[1], &ptcount, quality, min_distance, 0, 3, 0, 0.04);
            cvFindCornerSubPix(grey, points[1], ptcount, cvSize(win_size, win_size), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03));
            cvReleaseImage( &eig );
            cvReleaseImage( &temp );
            add_remove_pt = 0;
            /* set the point color */
            for( i=0; i<ptcount; i++ ){
                switch (i%5) {
                    case 0:
                        ptcolor[i] = 0;
                        break;
                    case 1:
                        ptcolor[i] = 1;
                        break;
                    case 2:
                        ptcolor[i] = 2;
                        break;
                    case 3:
                        ptcolor[i] = 3;
                        break;
                    case 4:
                        ptcolor[i] = 4;
                        break;
                    default:
                        ptcolor[i] = 0;
                }
            }
        }
        else if( ptcount > 0 ) {
            cvCalcOpticalFlowPyrLK( prev_grey, grey, prev_pyramid, pyramid,
                    points[0], points[1], ptcount, cvSize(win_size, win_size), 3, status, 0,
                    cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03), flags );
            flags |= CV_LKFLOW_PYR_A_READY;
            for( i = k = 0; i < ptcount; i++ ) {
                if( add_remove_pt ) {
                    double dx = pointadd[0][m].x - points[1][i].x;
                    double dy = pointadd[0][m].y - points[1][i].y;

                    if( dx*dx + dy*dy <= 25 ) {
                        add_remove_pt = 0;
                        continue;
                    }
                }

                pt = cvPointFrom32f(points[1][i]);
                pttl.x = pt.x-3; pttl.y = pt.y-3; // point top left
                ptdr.x = pt.x+3; ptdr.y = pt.y+3; // point down right

                if( !status[i] ){
                    pt = cvPointFrom32f(points[0][i]);
                    cvCircle( image, pt, 3, CV_RGB(0, 0, 255), -1, 8, 0);
                    continue;
                }

                pt = cvPointFrom32f(points[1][i]);
                points[1][k] = points[1][i];
                if(i<countlast){
                    /* matched feats */
                    ptcolor[k] = ptcolor[i];
                    switch (ptcolor[k]) {
                        case 0:
                            cvCircle( image, pt, 3, CV_RGB(0, 255, 0), -1, 8, 0);
                            break;
                        case 1:
                            cvCircle( image, pt, 3, CV_RGB(255, 255, 0), -1, 8, 0);
                            break;
                        case 2:
                            cvCircle( image, pt, 3, CV_RGB(0, 255, 255), -1, 8, 0);
                            break;
                        case 3:
                            cvCircle( image, pt, 3, CV_RGB(255, 0, 255), -1, 8, 0);
                            break;
                        case 4:
                            cvCircle( image, pt, 3, CV_RGB(255, 0, 0), -1, 8, 0);                            
                            break;
                        default:
                            cvCircle( image, pt, 3, CV_RGB(0, 255, 0), -1, 8, 0);
                    }
                }
                else
                    /* new feats */
                    switch (k%5) {
                        case 0:
                            //  void cvRectangle( CvArr* img, CvPoint pt1, CvPoint pt2, double color, int thickness=1 );
                            cvRectangle( image, pttl, ptdr, CV_RGB(0, 255, 0), -1, 8, 0);
                            ptcolor[k] = 0;
                            break;
                        case 1:
                            cvRectangle( image, pttl, ptdr, CV_RGB(255, 255, 0), -1, 8, 0);
                            ptcolor[k] = 1;
                            break;
                        case 2:
                            cvRectangle( image, pttl, ptdr, CV_RGB(0, 255, 255), -1, 8, 0);
                            ptcolor[k] = 2;
                            break;
                        case 3:
                            cvRectangle( image, pttl, ptdr, CV_RGB(255, 0, 255), -1, 8, 0);
                            ptcolor[k] = 3;
                            break;
                        case 4:
                            cvRectangle( image, pttl, ptdr, CV_RGB(255, 0, 0), -1, 8, 0);
                            ptcolor[k] = 4;
                            break;
                        default:
                            cvRectangle( image, pttl, ptdr, CV_RGB(0, 255, 0), -1, 8, 0);
                    }
                    k++;
            }
            ptcount = k;
        }

        if( add_remove_pt && ptcount < MAX_COUNT ) {
            points[1][ptcount++] = cvPointTo32f(pt);
            cvFindCornerSubPix( grey, points[1] + ptcount - 1, 1,
                    cvSize(win_size, win_size), cvSize(-1, -1),
                    cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03));
            add_remove_pt = 0;
        }

        CV_SWAP( prev_grey, grey, swap_temp );
        CV_SWAP( prev_pyramid, pyramid, swap_temp );
        CV_SWAP( points[0], points[1], swap_points );
        need_to_init = 0;
        cvShowImage( "LkDemo", image );

        std::string filename = "Rst/Rst";
        std::string seq;
        std::ostringstream fs;
        fs << imgseq << "\n";
        std::istringstream input(fs.str());
        input >> seq>> imgseq;
        filename += seq + ".jpg";
        cvSaveImage(filename.c_str(), image);
        imgseq++;
        if(imgseq>500)
            break;

        c = cvWaitKey(10);
        if( (char)c == 27 )
            break;
        switch( (char) c ) {
            case 'r':
                need_to_init = 1;
                break;
            case 'c':
                ptcount = 0;
                break;
            case 'n':
                night_mode ^= 1;
                break;
            default:
                ;
        }
        if (ptcount<100) {
            need_to_init =1;
        }
        if (ptcount>50&&ptcount<MAX_COUNT) {
            need_to_add = 1;
        }
        final = clock()-init;
    }
Exemple #19
0
void Points2::retreiveFrame(cv::Mat & frame) {

	bobs.clear();
	double horizProp = (double) 640 / frame.cols;
	double vertProp = (double) 480 / frame.rows;

    CvSize frameSize;
    frameSize.width = frame.size().width;
    frameSize.height = frame.size().height;

    checkSize(frameSize);

    IplImage *dupa = new IplImage(frame);

    cvCopy(dupa,tempImage,0);

    cvCvtColor(tempImage, currGreyImage, CV_BGR2GRAY);

    int i, k;

    if( count > 0 )
    {
        cvCalcOpticalFlowPyrLK( prevGreyImage, currGreyImage, prevPyramid, currPyramid,
            points[0], points[1], count, cvSize(20,20), 3, status, 0,
            cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), flags );

        flags |= CV_LKFLOW_PYR_A_READY;
        for( i = k = 0; i < count; i++ )
        {
            if( add_remove_pt )
            {
                double dx = pt.x - points[1][i].x;
                double dy = pt.y - points[1][i].y;

                if( dx*dx + dy*dy <= 25 )
                {
                    add_remove_pt = 0;
                    continue;
                }
            }

            if( !status[i] )
                continue;

            points[1][k++] = points[1][i];
            cv::circle( frame, cvPointFrom32f(points[1][i]), 3, CV_RGB(0,255,0), -1, 8,0);
			bobs.append(BOb((quint16) (horizProp * points[1][i].x),
							(quint16) (vertProp * points[1][i].y),
							1,1));
        }
        count = k;
    }

    if( add_remove_pt && count < max_count )
    {

        points[1][count++] = cvPointTo32f(pt);

        cvFindCornerSubPix( currGreyImage, points[1] + count - 1, 1,
            cvSize(10,10), cvSize(-1,-1),
            cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
        add_remove_pt = 0;

    }

    CV_SWAP( prevGreyImage, currGreyImage, swapImage );
    CV_SWAP( prevPyramid, currPyramid, swapImage );
    CV_SWAP( points[0], points[1], swap_points );
    delete dupa;

	if(!bobs.empty())
		emit bobjects(&bobs);
}
Exemple #20
0
/* main code */
int main(int argc, char** argv ){
	CvCapture   *capture;
	int i, key;
	//*struct tm *newtime; 
	time_t second,milsecond;
	char rightfilename[100], leftfilename[100];

	// ブーストされた分類器のカスケードを読み込む
//	cascade = (CvHaarClassifierCascade *) cvLoad (cascade_name, 0, 0, 0);
	righteye_cascade = (CvHaarClassifierCascade *) cvLoad (righteye_cascade_name, 0, 0, 0);
	lefteye_cascade = (CvHaarClassifierCascade *) cvLoad (lefteye_cascade_name, 0, 0, 0);

	/* initialize camera */
	capture = cvCaptureFromCAM( 0 );

	/* always check */
	if( !capture ) return 1;

	/* get video properties, needed by template image */
	frame = cvQueryFrame( capture );
	if ( !frame ) return 1;
    
	/* create template image */
	tpl = cvCreateImage( cvSize( TPL_WIDTH, TPL_HEIGHT ), 
                         frame->depth, frame->nChannels );
    
	/* create image for template matching result */
	tm = cvCreateImage( cvSize( WINDOW_WIDTH  - TPL_WIDTH  + 1,
                                WINDOW_HEIGHT - TPL_HEIGHT + 1 ),
                        IPL_DEPTH_32F, 1 );

	//eyezone
	eyezone1 = cvCreateImage(cvSize(50,50), IPL_DEPTH_8U, 1);
	minieyezone1 = cvCreateImage(cvSize(16,16), IPL_DEPTH_8U, 1);
	output1 = cvCreateImage(cvSize(512,512), IPL_DEPTH_8U, 1);
   	eyezone2 = cvCreateImage(cvSize(50,50), IPL_DEPTH_8U, 1);
	minieyezone2 = cvCreateImage(cvSize(16,16), IPL_DEPTH_8U, 1);
	output2 = cvCreateImage(cvSize(512,512), IPL_DEPTH_8U, 1);

	/* create a window and install mouse handler */
	cvNamedWindow( "video", CV_WINDOW_AUTOSIZE );
	cvSetMouseCallback( "video", mouseHandler, NULL );
	cvNamedWindow("output1", CV_WINDOW_AUTOSIZE); 
	cvNamedWindow("output2", CV_WINDOW_AUTOSIZE);

	gray = cvCreateImage (cvGetSize (frame), IPL_DEPTH_8U, 1);
	righteye_storage = cvCreateMemStorage (0);
	lefteye_storage = cvCreateMemStorage (0);
	CvPoint righteye_center, lefteye_center;
    
	// eye candidate
	CvRect righteye_cand1, righteye_cand2, lefteye_cand1, lefteye_cand2, right, left;
	int eye_candidate_num = 0;	



	while( key != 'q' ) {
		eye_candidate_num = 0;
		/* get a frame */
		frame = cvQueryFrame( capture );

		/* always check */
		if( !frame ) break;

		/* 'fix' frame */
		/*   cvFlip( frame, frame, -1 ); */
		frame->origin = 0;
        
		/* perform tracking if template is available */
		if( is_tracking ) trackObject();
        

		cvClearMemStorage (righteye_storage);
		cvClearMemStorage (lefteye_storage);
		cvCvtColor (frame, gray, CV_BGR2GRAY);
		cvEqualizeHist (gray, gray);
		righteye = cvHaarDetectObjects (gray, righteye_cascade, righteye_storage, 1.11, 4, 0, cvSize (40, 40), cvSize(40,40));
		lefteye = cvHaarDetectObjects (gray, lefteye_cascade, lefteye_storage, 1.11, 4, 0, cvSize (40, 40), cvSize(40,40));


		//右目を円で描画
		for (i = 0; i < (righteye ? righteye->total : 0); i++) {
			CvRect *r = (CvRect *) cvGetSeqElem (righteye, i);
			CvPoint center;
			int radius;
			center.x = cvRound (r->x + r->width * 0.5);
			center.y = cvRound (r->y + r->height * 0.5);
			radius = cvRound ((r->width + r->height) * 0.25);
			cvCircle (frame, center, radius, colors[i % 8], 3, 8, 0);
		//右目候補
			if(i == 0){
				righteye_cand1 = *r;
				}
			if(i == 1){
				righteye_cand2 = *r;
				}
			}
		//左目を死角で描画
		for (i = 0; i < (lefteye ? lefteye->total : 0); i++) {
			CvRect *r = (CvRect *) cvGetSeqElem (lefteye, i);
			CvPoint apex1, apex2;
			apex1 = cvPoint(r->x, r->y);
			apex2.x = cvRound(r->x + r->width);
			apex2.y = cvRound(r->y + r->height);
			cvRectangle (frame,apex1, apex2, colors[i % 8], 3, 8, 0);
			
		//左目候補
			if(i == 0){
				lefteye_cand1 = *r;
				}
			if(i == 1){
				lefteye_cand2 = *r;
				}
			}
		//候補しぼり
			if(righteye->total >= 1){
				if(righteye->total >= 2){
					if(righteye_cand1.x <= righteye_cand2.x){
						right = righteye_cand1;
						righteye_center.x = cvRound(right.x + right.width*0.5);
						righteye_center.y = cvRound(right.y + right.height*0.5);
						}			
					else{
						right = righteye_cand2;
						righteye_center.x = cvRound(right.x + right.width*0.5);
						righteye_center.y = cvRound(right.y + right.height*0.5);
						}
					}
				else{
					right = righteye_cand1;
					righteye_center.x = cvRound(right.x + right.width*0.5);
					righteye_center.y = cvRound(right.y + right.height*0.5);
					}
				eyezone1 = cvCreateImage(cvSize(right.width, right.height), IPL_DEPTH_8U, 1);
				cvGetRectSubPix(gray, eyezone1, cvPointTo32f(righteye_center));
				cvEqualizeHist(eyezone1, eyezone1);
				cvResize(eyezone1, minieyezone1, CV_INTER_LINEAR);
				cvResize(minieyezone1, output1, CV_INTER_NN);
			}



			if(lefteye->total >= 1){
				if(lefteye->total >= 2){
					if(lefteye_cand1.x >= lefteye_cand2.x){
						left = lefteye_cand1;
						lefteye_center.x = cvRound(left.x + left.width*0.5);
						lefteye_center.y = cvRound(left.y + left.height*0.5);
						}			
					else{
						left = lefteye_cand2;
						lefteye_center.x = cvRound(left.x + left.width*0.5);
						lefteye_center.y = cvRound(right.y + left.height*0.5);
						}
					}
				else{
					left = lefteye_cand1;
					lefteye_center.x = cvRound(left.x + left.width*0.5);
					lefteye_center.y = cvRound(left.y + left.height*0.5);
					}
				eyezone2 = cvCreateImage(cvSize(left.width, left.height), IPL_DEPTH_8U, 1);
				cvGetRectSubPix(gray, eyezone2, cvPointTo32f(lefteye_center));
				cvEqualizeHist(eyezone2, eyezone2);
				cvResize(eyezone2, minieyezone2, CV_INTER_LINEAR);
				cvResize(minieyezone2, output2, CV_INTER_NN);
			}
			printf("righteye width = %d, height = %d\n", right.width, right.height); 
			printf("lefteye width = %d, height = %d\n", left.width, left.height);
	//		printf("righteye x = %d\n", right.x);
	//		printf("lefteye x = %d\n", left.x);





		/* display frame */
		cvShowImage( "video", frame);
		//cvShowImage( "eyezone1", eyezone1);
		//cvShowImage( "eyezone2", eyezone2);
		cvShowImage( "output1", output1);
		cvShowImage( "output2", output2);

		//ファイル出力,時間計測
		time(&second);
		milsecond = clock();
	//	printf("時間[sec] = %ld\n", second);
		printf("経過時間[usec] = %ld\n", milsecond);
		//sprintf(filename, "%ld.bmp",second);
		//printf("sprintf = %s\n", filename);
		//cvSaveImage(filename, frame,0); 
	   	if(key == 'n'){
			sprintf(rightfilename, "n_right%ld.bmp", milsecond);
			sprintf(leftfilename, "n_left%ld.bmp", milsecond);
	      		printf("fileoutput %s, %s\n", rightfilename, leftfilename);
	  		cvSaveImage(rightfilename, minieyezone1, 0); 		
	  		cvSaveImage(leftfilename, minieyezone2, 0); 		
		}
	   	if(key == 'h'){
			sprintf(rightfilename, "h_right%ld.bmp", milsecond);
			sprintf(leftfilename, "h_left%ld.bmp", milsecond);
	      		printf("fileoutput %s, %s\n", rightfilename, leftfilename);
	  		cvSaveImage(rightfilename, minieyezone1, 0); 		
	  		cvSaveImage(leftfilename, minieyezone2, 0); 		
		}
	   	if(key == 'j'){
			sprintf(rightfilename, "j_right%ld.bmp", milsecond);
			sprintf(leftfilename, "j_left%ld.bmp", milsecond);
	      		printf("fileoutput %s, %s\n", rightfilename, leftfilename);
	  		cvSaveImage(rightfilename, minieyezone1, 0); 		
	  		cvSaveImage(leftfilename, minieyezone2, 0); 		
		}
	   	if(key == 'k'){
			sprintf(rightfilename, "k_right%ld.bmp", milsecond);
			sprintf(leftfilename, "k_left%ld.bmp", milsecond);
	      		printf("fileoutput %s, %s\n", rightfilename, leftfilename);
	  		cvSaveImage(rightfilename, minieyezone1, 0); 		
	  		cvSaveImage(leftfilename, minieyezone2, 0); 		
		}
	   	if(key == 'l'){
			sprintf(rightfilename, "l_right%ld.bmp", milsecond);
			sprintf(leftfilename, "l_left%ld.bmp", milsecond);
	      		printf("fileoutput %s, %s\n", rightfilename, leftfilename);
	  		cvSaveImage(rightfilename, minieyezone1, 0); 		
	  		cvSaveImage(leftfilename, minieyezone2, 0); 		
		}








		
		/* exit if user press 'q' */
		key = cvWaitKey( 1 );
		}

	/* free memory */
	cvDestroyWindow( "video" );
	cvDestroyWindow( "output1");
	cvDestroyWindow( "output2");
	cvReleaseCapture( &capture );
	cvReleaseImage( &tpl );
	cvReleaseImage( &tm );
	cvReleaseImage( &gray);
   	cvReleaseImage( &eyezone1);
	cvReleaseImage( &eyezone2);
	cvReleaseImage( &minieyezone1);
	cvReleaseImage( &minieyezone2);
	cvReleaseImage( &output1);
	cvReleaseImage( &output2);
	return 0;
	}
Exemple #21
0
int main( int argc, char** argv )
{
    CvCapture* capture = 0;

    if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
        capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
    else if( argc == 2 )
        capture = cvCaptureFromAVI( argv[1] );

    if( !capture )
    {
        fprintf(stderr,"Could not initialize capturing...\n");
        return -1;
    }

    /* print a welcome message, and the OpenCV version */
    printf ("Welcome to lkdemo, using OpenCV version %s (%d.%d.%d)\n",
	    CV_VERSION,
	    CV_MAJOR_VERSION, CV_MINOR_VERSION, CV_SUBMINOR_VERSION);

    printf( "Hot keys: \n"
            "\tESC - quit the program\n"
            "\tr - auto-initialize tracking\n"
            "\tc - delete all the points\n"
            "\tn - switch the \"night\" mode on/off\n"
            "To add/remove a feature point click it\n" );

    cvNamedWindow( "LkDemo", 0 );
    cvSetMouseCallback( "LkDemo", on_mouse, 0 );

    for(;;)
    {
        IplImage* frame = 0;
        int i, k, c;

        frame = cvQueryFrame( capture );
        if( !frame )
            break;

        if( !image )
        {
            /* allocate all the buffers */
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            points[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
            points[1] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
            status = (char*)cvAlloc(MAX_COUNT);
            flags = 0;
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, grey, CV_BGR2GRAY );

        if( night_mode )
            cvZero( image );

        if( need_to_init )
        {
            /* automatic initialization */
            IplImage* eig = cvCreateImage( cvGetSize(grey), 32, 1 );
            IplImage* temp = cvCreateImage( cvGetSize(grey), 32, 1 );
            double quality = 0.01;
            double min_distance = 10;

            count = MAX_COUNT;
            cvGoodFeaturesToTrack( grey, eig, temp, points[1], &count,
                                   quality, min_distance, 0, 3, 0, 0.04 );
            cvFindCornerSubPix( grey, points[1], count,
                cvSize(win_size,win_size), cvSize(-1,-1),
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
            cvReleaseImage( &eig );
            cvReleaseImage( &temp );

            add_remove_pt = 0;
        }
        else if( count > 0 )
        {
            cvCalcOpticalFlowPyrLK( prev_grey, grey, prev_pyramid, pyramid,
                points[0], points[1], count, cvSize(win_size,win_size), 3, status, 0,
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), flags );
            flags |= CV_LKFLOW_PYR_A_READY;
            for( i = k = 0; i < count; i++ )
            {
                if( add_remove_pt )
                {
                    double dx = pt.x - points[1][i].x;
                    double dy = pt.y - points[1][i].y;

                    if( dx*dx + dy*dy <= 25 )
                    {
                        add_remove_pt = 0;
                        continue;
                    }
                }

                if( !status[i] )
                    continue;

                points[1][k++] = points[1][i];
                cvCircle( image, cvPointFrom32f(points[1][i]), 3, CV_RGB(0,255,0), -1, 8,0);
            }
            count = k;
        }

        if( add_remove_pt && count < MAX_COUNT )
        {
            points[1][count++] = cvPointTo32f(pt);
            cvFindCornerSubPix( grey, points[1] + count - 1, 1,
                cvSize(win_size,win_size), cvSize(-1,-1),
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
            add_remove_pt = 0;
        }

        CV_SWAP( prev_grey, grey, swap_temp );
        CV_SWAP( prev_pyramid, pyramid, swap_temp );
        CV_SWAP( points[0], points[1], swap_points );
        need_to_init = 0;
        cvShowImage( "LkDemo", image );

        c = cvWaitKey(10);
        if( (char)c == 27 )
            break;
        switch( (char) c )
        {
        case 'r':
            need_to_init = 1;
            break;
        case 'c':
            count = 0;
            break;
        case 'n':
            night_mode ^= 1;
            break;
        default:
            ;
        }
    }

    cvReleaseCapture( &capture );
    cvDestroyWindow("LkDemo");

    return 0;
}
Exemple #22
0
CV_IMPL int
cvMinEnclosingCircle( const void* array, CvPoint2D32f * _center, float *_radius )
{
    const int max_iters = 100;
    const float eps = FLT_EPSILON*2;
    CvPoint2D32f center = { 0, 0 };
    float radius = 0;
    int result = 0;

    if( _center )
        _center->x = _center->y = 0.f;
    if( _radius )
        *_radius = 0;

    CV_FUNCNAME( "cvMinEnclosingCircle" );

    __BEGIN__;

    CvSeqReader reader;
    int i, k, count;
    CvPoint2D32f pts[8];
    CvContour contour_header;
    CvSeqBlock block;
    CvSeq* sequence = 0;
    int is_float;

    if( !_center || !_radius )
        CV_ERROR( CV_StsNullPtr, "Null center or radius pointers" );

    if( CV_IS_SEQ(array) )
    {
        sequence = (CvSeq*)array;
        if( !CV_IS_SEQ_POINT_SET( sequence ))
            CV_ERROR( CV_StsBadArg, "The passed sequence is not a valid contour" );
    }
    else
    {
        CV_CALL( sequence = cvPointSeqFromMat(
            CV_SEQ_KIND_GENERIC, array, &contour_header, &block ));
    }

    if( sequence->total <= 0 )
        CV_ERROR_FROM_STATUS( CV_BADSIZE_ERR );

    CV_CALL( cvStartReadSeq( sequence, &reader, 0 ));

    count = sequence->total;
    is_float = CV_SEQ_ELTYPE(sequence) == CV_32FC2;

    if( !is_float )
    {
        CvPoint *pt_left, *pt_right, *pt_top, *pt_bottom;
        CvPoint pt;
        pt_left = pt_right = pt_top = pt_bottom = (CvPoint *)(reader.ptr);
        CV_READ_SEQ_ELEM( pt, reader );

        for( i = 1; i < count; i++ )
        {
            CvPoint* pt_ptr = (CvPoint*)reader.ptr;
            CV_READ_SEQ_ELEM( pt, reader );

            if( pt.x < pt_left->x )
                pt_left = pt_ptr;
            if( pt.x > pt_right->x )
                pt_right = pt_ptr;
            if( pt.y < pt_top->y )
                pt_top = pt_ptr;
            if( pt.y > pt_bottom->y )
                pt_bottom = pt_ptr;
        }

        pts[0] = cvPointTo32f( *pt_left );
        pts[1] = cvPointTo32f( *pt_right );
        pts[2] = cvPointTo32f( *pt_top );
        pts[3] = cvPointTo32f( *pt_bottom );
    }
    else
    {
        CvPoint2D32f *pt_left, *pt_right, *pt_top, *pt_bottom;
        CvPoint2D32f pt;
        pt_left = pt_right = pt_top = pt_bottom = (CvPoint2D32f *) (reader.ptr);
        CV_READ_SEQ_ELEM( pt, reader );

        for( i = 1; i < count; i++ )
        {
            CvPoint2D32f* pt_ptr = (CvPoint2D32f*)reader.ptr;
            CV_READ_SEQ_ELEM( pt, reader );

            if( pt.x < pt_left->x )
                pt_left = pt_ptr;
            if( pt.x > pt_right->x )
                pt_right = pt_ptr;
            if( pt.y < pt_top->y )
                pt_top = pt_ptr;
            if( pt.y > pt_bottom->y )
                pt_bottom = pt_ptr;
        }

        pts[0] = *pt_left;
        pts[1] = *pt_right;
        pts[2] = *pt_top;
        pts[3] = *pt_bottom;
    }

    for( k = 0; k < max_iters; k++ )
    {
        double min_delta = 0, delta;
        CvPoint2D32f ptfl;
        
        icvFindEnslosingCicle4pts_32f( pts, &center, &radius );
        cvStartReadSeq( sequence, &reader, 0 );

        for( i = 0; i < count; i++ )
        {
            if( !is_float )
            {
                ptfl.x = (float)((CvPoint*)reader.ptr)->x;
                ptfl.y = (float)((CvPoint*)reader.ptr)->y;
            }
            else
            {
                ptfl = *(CvPoint2D32f*)reader.ptr;
            }
            CV_NEXT_SEQ_ELEM( sequence->elem_size, reader );

            delta = icvIsPtInCircle( ptfl, center, radius );
            if( delta < min_delta )
            {
                min_delta = delta;
                pts[3] = ptfl;
            }
        }
        result = min_delta >= 0;
        if( result )
            break;
    }

    if( !result )
    {
        cvStartReadSeq( sequence, &reader, 0 );
        radius = 0.f;

        for( i = 0; i < count; i++ )
        {
            CvPoint2D32f ptfl;
            float t, dx, dy;

            if( !is_float )
            {
                ptfl.x = (float)((CvPoint*)reader.ptr)->x;
                ptfl.y = (float)((CvPoint*)reader.ptr)->y;
            }
            else
            {
                ptfl = *(CvPoint2D32f*)reader.ptr;
            }

            CV_NEXT_SEQ_ELEM( sequence->elem_size, reader );
            dx = center.x - ptfl.x;
            dy = center.y - ptfl.y;
            t = dx*dx + dy*dy;
            radius = MAX(radius,t);
        }

        radius = (float)(sqrt(radius)*(1 + eps));
        result = 1;
    }

    __END__;

    *_center = center;
    *_radius = radius;

    return result;
}
Exemple #23
0
int main(int argc, char* args[])
{
	printf("Hello world\n");

	TracksterRenderer renderer;
	TracksterAVI trackster;
	
	renderer.hTrackster = &trackster;
	
	renderer.init();

	HANDLE tackingThread = CreateThread(NULL, 0, TrackingThread, &trackster, 0, NULL);

	IplImage* image = cvCreateImage({ 1000, 1000 }, IPL_DEPTH_32F, 4);
	CvPoint trainingPoints[5] = { { 500, 500 }, { 100, 100 }, { 900, 900 }, { 100, 900 }, { 900, 100 } };

	int trainingIndex = -1;
	int trainingPass = 0;

	CvPoint2D32f trainingDeltas[3][5];
	int trainingFrames[3][5];

	//cvSetMouseCallback(h_trainingView, mouseClickCallback, &trackster);

	SDL_Event event;

	while (runTracking) {
		
		while (SDL_PollEvent(&event)) {
			
			char key = (event.type == SDL_KEYDOWN) ? event.key.keysym.sym : -1;
			 
			switch (key) {
			case('x') :
				runTracking = false;

				if (testPointIndex > 0) {

					std::ofstream outFile;
					outFile.open("results.txt");

					for (int j = 0; j < 3; j++) {
						for (int i = 0; i < 5; i++) {
							outFile << trainingFrames[j][i] << ","
								<< trainingPoints[i].x << "," << trainingPoints[i].y << ","
								<< trainingDeltas[j][i].x << "," << trainingDeltas[j][i].y;
							outFile << "\n";
						}
					}

					for (int i = 0; i < testPointIndex; i++) {
						outFile << testFrames[i] << "," << testPoints[i].x << "," << testPoints[i].y;
						outFile << "," << testProjections[i].x << "," << testProjections[i].y;
						outFile << "\n";
					}

					outFile.close();
				}

				break;

			case('q') :
				trackster.pupilThreshold = MIN(255, MAX(0, trackster.pupilThreshold + 1));
				break;
			case('a') :
				trackster.pupilThreshold = MIN(255, MAX(0, trackster.pupilThreshold - 1));
				break;

			case('w') :
				trackster.glintThreshold = MIN(255, MAX(0, trackster.glintThreshold + 1));
				break;
			case('s') :
				trackster.glintThreshold = MIN(255, MAX(0, trackster.glintThreshold - 1));
				break;

			case(' ') :

				// Clear this to let us take back our training image screen
				trackster.trained = false;

				if (trainingIndex < 0) {
					trackster.displayStaticCrosshair = true;
					trackster.staticCrosshairCoord = cvPointTo32f(trainingPoints[++trainingIndex]);
				}

				else if (trainingIndex < 4) {
					// Capture points 0, 1, 2, and 3
					trainingDeltas[trainingPass][trainingIndex].x = trackster.delta_x;
					trainingDeltas[trainingPass][trainingIndex].y = trackster.delta_y;
					trainingFrames[trainingPass][trainingIndex] = trackster.frameCount;

					//showCrosshair(h_trainingView, image, cvPointTo32f(trainingPoints[++trainingIndex]));
					trackster.displayStaticCrosshair = true;
					trackster.staticCrosshairCoord = cvPointTo32f(trainingPoints[++trainingIndex]);
				}

				else if (trainingIndex == 4) {
					trackster.displayStaticCrosshair = false;

					// Capture point 4
					trainingDeltas[trainingPass][trainingIndex].x = trackster.delta_x;
					trainingDeltas[trainingPass][trainingIndex].y = trackster.delta_y;
					trainingFrames[trainingPass][trainingIndex] = trackster.frameCount;

					// Train
					trainingPass++;
 					if (trainingPass < 3) {
						trainingIndex = -1;
						trackster.displayStaticCrosshair = true;
						trackster.staticCrosshairCoord = cvPointTo32f(trainingPoints[++trainingIndex]);
					}
					else {
						CvPoint2D32f averageDeltas[5] = { { 0, 0 }, { 0, 0 }, { 0, 0 }, { 0, 0 }, { 0, 0 } };
						for (int i = 0; i < 5; i++) {
							for (int j = 0; j < trainingPass; j++) {
								averageDeltas[i].x += trainingDeltas[j][i].x;
								averageDeltas[i].y += trainingDeltas[j][i].y;
							}

							averageDeltas[i].x /= 3;
							averageDeltas[i].y /= 3;
						}

  						trackster.Train(averageDeltas, trainingPoints);
						// Reset in case we want to retrain
						trainingIndex = -1;
						trainingPass = 0;
					}
				}
				break;
			}


			if ((event.type == SDL_MOUSEBUTTONDOWN) && trackster.trained && testPointIndex < 10) {

				testFrames[testPointIndex] = trackster.frameCount;

				CvPoint2D32f projeciton = trackster.GetProjection();
				testProjections[testPointIndex] = projeciton;

				testPoints[testPointIndex].x = event.button.x;
				testPoints[testPointIndex].y = event.button.y;

				testPointIndex++;
			}

		}


		renderer.render();

	}
	
	WaitForSingleObject(TrackingThread, INFINITE);

	std::cout << "done!\n";

	cvWaitKey();

	SDL_Quit();

	return 0;
}
Exemple #24
0
CV_IMPL int
cvMinEnclosingCircle( const void* array, CvPoint2D32f * _center, float *_radius )
{
    const int max_iters = 100;
    const float eps = FLT_EPSILON*2;
    CvPoint2D32f center = { 0, 0 };
    float radius = 0;
    int result = 0;

    if( _center )
        _center->x = _center->y = 0.f;
    if( _radius )
        *_radius = 0;

    CvSeqReader reader;
    int k, count;
    CvPoint2D32f pts[8];
    CvContour contour_header;
    CvSeqBlock block;
    CvSeq* sequence = 0;
    int is_float;

    if( !_center || !_radius )
        CV_Error( CV_StsNullPtr, "Null center or radius pointers" );

    if( CV_IS_SEQ(array) )
    {
        sequence = (CvSeq*)array;
        if( !CV_IS_SEQ_POINT_SET( sequence ))
            CV_Error( CV_StsBadArg, "The passed sequence is not a valid contour" );
    }
    else
    {
        sequence = cvPointSeqFromMat(
            CV_SEQ_KIND_GENERIC, array, &contour_header, &block );
    }

    if( sequence->total <= 0 )
        CV_Error( CV_StsBadSize, "" );

    cvStartReadSeq( sequence, &reader, 0 );

    count = sequence->total;
    is_float = CV_SEQ_ELTYPE(sequence) == CV_32FC2;

    if( !is_float )
    {
        CvPoint *pt_left, *pt_right, *pt_top, *pt_bottom;
        CvPoint pt;
        pt_left = pt_right = pt_top = pt_bottom = (CvPoint *)(reader.ptr);
        CV_READ_SEQ_ELEM( pt, reader );

        for(int i = 1; i < count; i++ )
        {
            CvPoint* pt_ptr = (CvPoint*)reader.ptr;
            CV_READ_SEQ_ELEM( pt, reader );

            if( pt.x < pt_left->x )
                pt_left = pt_ptr;
            if( pt.x > pt_right->x )
                pt_right = pt_ptr;
            if( pt.y < pt_top->y )
                pt_top = pt_ptr;
            if( pt.y > pt_bottom->y )
                pt_bottom = pt_ptr;
        }

        pts[0] = cvPointTo32f( *pt_left );
        pts[1] = cvPointTo32f( *pt_right );
        pts[2] = cvPointTo32f( *pt_top );
        pts[3] = cvPointTo32f( *pt_bottom );
    }
    else
    {
        CvPoint2D32f *pt_left, *pt_right, *pt_top, *pt_bottom;
        CvPoint2D32f pt;
        pt_left = pt_right = pt_top = pt_bottom = (CvPoint2D32f *) (reader.ptr);
        CV_READ_SEQ_ELEM( pt, reader );

        for(int i = 1; i < count; i++ )
        {
            CvPoint2D32f* pt_ptr = (CvPoint2D32f*)reader.ptr;
            CV_READ_SEQ_ELEM( pt, reader );

            if( pt.x < pt_left->x )
                pt_left = pt_ptr;
            if( pt.x > pt_right->x )
                pt_right = pt_ptr;
            if( pt.y < pt_top->y )
                pt_top = pt_ptr;
            if( pt.y > pt_bottom->y )
                pt_bottom = pt_ptr;
        }

        pts[0] = *pt_left;
        pts[1] = *pt_right;
        pts[2] = *pt_top;
        pts[3] = *pt_bottom;
    }

    for( k = 0; k < max_iters; k++ )
    {
        double min_delta = 0, delta;
        CvPoint2D32f ptfl, farAway = { 0, 0};
        /*only for first iteration because the alg is repared at the loop's foot*/
        if(k==0)
            icvFindEnslosingCicle4pts_32f( pts, &center, &radius );

        cvStartReadSeq( sequence, &reader, 0 );

        for(int i = 0; i < count; i++ )
        {
            if( !is_float )
            {
                ptfl.x = (float)((CvPoint*)reader.ptr)->x;
                ptfl.y = (float)((CvPoint*)reader.ptr)->y;
            }
            else
            {
                ptfl = *(CvPoint2D32f*)reader.ptr;
            }
            CV_NEXT_SEQ_ELEM( sequence->elem_size, reader );

            delta = icvIsPtInCircle( ptfl, center, radius );
            if( delta < min_delta )
            {
                min_delta = delta;
                farAway = ptfl;
            }
        }
        result = min_delta >= 0;
        if( result )
            break;

        CvPoint2D32f ptsCopy[4];
        /* find good replacement partner for the point which is at most far away,
        starting with the one that lays in the actual circle (i=3) */
        for(int i = 3; i >=0; i-- )
        {
            for(int j = 0; j < 4; j++ )
            {
                ptsCopy[j]=(i != j)? pts[j]: farAway;
            }

            icvFindEnslosingCicle4pts_32f(ptsCopy, &center, &radius );
            if( icvIsPtInCircle( pts[i], center, radius )>=0){ // replaced one again in the new circle?
                pts[i] = farAway;
                break;
            }
        }
    }

    if( !result )
    {
        cvStartReadSeq( sequence, &reader, 0 );
        radius = 0.f;

        for(int i = 0; i < count; i++ )
        {
            CvPoint2D32f ptfl;
            float t, dx, dy;

            if( !is_float )
            {
                ptfl.x = (float)((CvPoint*)reader.ptr)->x;
                ptfl.y = (float)((CvPoint*)reader.ptr)->y;
            }
            else
            {
                ptfl = *(CvPoint2D32f*)reader.ptr;
            }

            CV_NEXT_SEQ_ELEM( sequence->elem_size, reader );
            dx = center.x - ptfl.x;
            dy = center.y - ptfl.y;
            t = dx*dx + dy*dy;
            radius = MAX(radius,t);
        }

        radius = (float)(sqrt(radius)*(1 + eps));
        result = 1;
    }

    *_center = center;
    *_radius = radius;

    return result;
}
Exemple #25
0
int lk_work(CAMOBJ * st)
{
        int i, k;
		float mx,my,cx,cy;

    //    frame = cvQueryFrame( capture );
        if( !frame )
            return(1);

        if( !image ) 	            // allocate all the buffers 
		{
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_grey = cvCreateImage( cvGetSize(frame), 8, 1 );
            save_grey = cvCreateImage( cvGetSize(frame), 8, 1 );

            pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            prev_pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );
            save_pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );

            points[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
            points[1] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
            save_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));

            status = (char*)cvAlloc(MAX_COUNT);
			for (i=0;i<MAX_COUNT;i++) pt_mode[i]=0;
            flags = 0;
			statuscount++;
		}
        cvCopy( frame, image, 0 );

		if (st->mode==1)
		{
		  if (!video_writer)
		       video_writer = cvCreateVideoWriter(st->videofilename,-1,15,cvGetSize(image));
		  	
		  cvWriteFrame(video_writer,image);
  		}


        if (st->enable_tracking)
		{
		cvCvtColor( image, grey, CV_BGR2GRAY );

        if( night_mode )
            cvZero( image );

		if (need_to_init)
		{
		  need_to_init=0;
		  init_flag=0;

		  if (st->trackface)
		  {
			if (detect_face())
			{
				int x;

				count=2;

				cvFindCornerSubPix( grey, points[1], count,
					cvSize(win_size,win_size), cvSize(-1,-1),
					cvTermCriteria(CV_TERMCRIT_ITER,1,1.0));
//					cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
  
	            cvCopy(grey,save_grey,0 );
		        cvCopy(pyramid,save_pyramid,0 );
				cvCopy(grey,prev_grey,0 );
		        cvCopy(pyramid,prev_pyramid,0 );

			    for (x=0;x<count;x++)
				{
					save_points[x].x=points[1][x].x;
					save_points[x].y=points[1][x].y;
					points[0][x].x=points[1][x].x;
					points[0][x].y=points[1][x].y;
					save_pt_mode[x]=pt_mode[x];
				}
				calc_distances(1);
				save_count=count;
				add_remove_pt = 0;
	            flags = 0;
				time_to_restore=0;
				
			} 
		  }
		  else
		  {
			    save_points[0].x=PT1_xpos*100;
				save_points[0].y=PT1_ypos*100;
				points[0][0].x=PT1_xpos*100;
				points[0][0].y=PT1_ypos*100;
				save_pt_mode[0]=0;
				count=1;MAX_COUNT=1;
				calc_distances(1);

  				cvFindCornerSubPix( grey, points[1], 1,
					cvSize(win_size,win_size), cvSize(-1,-1),
					cvTermCriteria(CV_TERMCRIT_ITER,1,1.0));
	            
				// report("hallo");
				cvCopy(grey,save_grey,0 );
		        cvCopy(pyramid,save_pyramid,0 );
				cvCopy(grey,prev_grey,0 );
		        cvCopy(pyramid,prev_pyramid,0 );
				
				save_count=1;
				add_remove_pt = 0;
	            flags = 0;
				//time_to_restore=0;
				
		  }

		}        

		if(count < MAX_COUNT) need_to_init=1;
		else
        {
			
            cvCalcOpticalFlowPyrLK( prev_grey, grey, prev_pyramid, pyramid,
                points[0], points[1], count, cvSize(win_size,win_size), 5, status, 0,
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), flags );
            flags |= CV_LKFLOW_PYR_A_READY;

			mx=0;my=0;
			cx=0;cy=0;mcount=0;ccount=0;
            for( i = k = 0; i < count; i++ )
            {

                if( add_remove_pt )
                {
                    double dx = pt.x - points[1][i].x;
                    double dy = pt.y - points[1][i].y;

                    if( dx*dx + dy*dy <= 25 )
                    {
                        add_remove_pt = 0;
                        if (pt_mode[i]==1) {pt_mode[i]=0; continue;}
						pt_mode[i]=1;
                    }
                }
                
                if( !status[i] ) { need_to_init=1; status[i]=true; }
                    

				if (pt_mode[i]==1)
				{
					cx+= (points[0][i].x - points[1][i].x);
					cy+= (points[0][i].y - points[1][i].y);
					ccount++;
				}
				else
				{
					mx += (points[0][i].x - points[1][i].x);
					my += (points[0][i].y - points[1][i].y);
					mcount++;
				}
				
				points[1][k] = points[1][i];
				pt_mode[k++]=pt_mode[i];
				if (need_to_init)
				  cvCircle( image, cvPointFrom32f(points[1][i]), 4, CV_RGB(255,0,0), 2, 8,0);
				else if (pt_mode[i]==1)
                  cvCircle( image, cvPointFrom32f(points[1][i]), 4, CV_RGB(255,255,0), 2, 8,0);
				  else
				   cvCircle( image, cvPointFrom32f(points[1][i]), 4, CV_RGB(0,210,0), 2, 8,0);
            }
            count = k;
			if (k==MAX_COUNT)
			{
				if (init_flag>1)
				{
				x_move=mx/mcount;
				y_move=my/mcount;
				x_click=cx/ccount;
				y_click=cy/ccount;
				}
				if (st->trackface) calc_distances(0); else calc_distances(2);
				
				
				if ((autorestore)) // && (init_flag>5))
				{
				  if (st->trackface)
				  {
					if ((dist_error>=dist_threshold) || (angle_error>=angle_threshold))
						time_to_restore++;
					else time_to_restore=0;

					if (time_to_restore>threshold_time)
					{ need_to_init=1; time_to_restore=0; }
				  }
				  else
				  {
					if ((dist_error>=dist_threshold))
						time_to_restore++;
					else time_to_restore=0;

					if (time_to_restore>threshold_time)
					{ need_to_init=1; time_to_restore=0; }

				  }
				  
				}
				
					
			} 
        }

        if( add_remove_pt && count < MAX_COUNT )
        {
            points[1][count++] = cvPointTo32f(pt);
            cvFindCornerSubPix( grey, points[1] + count - 1, 1,
                cvSize(win_size,win_size), cvSize(-1,-1),
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
            add_remove_pt = 0;
        }

	  }

	  CV_SWAP( prev_grey, grey, swap_temp );
	  CV_SWAP( prev_pyramid, pyramid, swap_temp );
	  CV_SWAP( points[0], points[1], swap_points );
		
	  if (init_flag<1000) init_flag++;

	  if (st->showlive) cvShowImage( "Camera", image );
	

	return(0);
}
int main( int argc, char** argv ) 
{ 
     
    FILE *ptr; 
    ptr=fopen("dataerr.dat","w+"); 
    CvCapture* capture = 0; 
 
    int counter1=0; 
 
    IplImage* image2 = 0; 
 
    float sumX=0; 
    float sumY=0; 
 
    float err_X; 
    float err_Y; 
 
    int XX=0; 
    int YY=0; 
 
    CvPoint ipt1; 
 
    int tempxx1=0; 
    int tempyy1=0; 
    int tempxx2=0; 
    int tempyy2=0; 
 
     
 
    char *imgFmt="pgm"; 
    char str1[100]; 
 
    /* Initailize the error array */ 
    for(int kk=0;kk<=400;kk++) 
    { 
        optical_flow_error[0][kk]=0; 
        optical_flow_errorP[0][kk]=0; 
        optical_flow_error[1][kk]=0; 
        optical_flow_errorP[1][kk]=0; 
    } 
 
    //capturing frame from video 
    capture = cvCaptureFromAVI("soccer_track.mpeg"); 
 
    cvNamedWindow( "KLT-Tracking Group_R", 0 ); 
    cvSetMouseCallback( "KLT-Tracking Group_R", on_mouse, 0 ); 
 
    if(add_remove_pt==1) 
    { 
        flagg=1; 
    } 
 
    for(;;) 
    { 
        IplImage* frame = 0; 
         
        int i, k, c; 
 
        //creating file name 
        sprintf(str1,"%d.%s",counter1,imgFmt); 
        err_X=0; 
        err_Y=0; 
        sumX=0; 
        sumY=0; 
 
        //decompressing the grab images 
 
        frame = cvQueryFrame( capture ); 
 
     
        if( !frame ) 
            break; 
 
        if( !image ) 
            //The first frame:to allocation some memories,and do somen initialization work 
        { 
            // allocate all the image buffers  
            image = cvCreateImage( cvGetSize(frame), 8, 3 ); 
            image->origin = frame->origin; 
            grey = cvCreateImage( cvGetSize(frame), 8, 1 );//make it grey 
            prev_grey = cvCreateImage( cvGetSize(frame), 8, 1 );//the previous frame in grey mode 
            pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );//pyramid frame 
            prev_pyramid = cvCreateImage( cvGetSize(frame), 8, 1 );//previous pyramid frame 
            /* Define two pointers */ 
            points[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0])); 
            points[1] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0])); 
            status = (char*)cvAlloc(MAX_COUNT); 
            flags = 0; 
        } 
 
        cvCopy( frame, image, 0 );//frame->image 
 
        //converting the image into gray scale for further computation 
        cvCvtColor( image, grey, CV_BGR2GRAY ); 
         
        if( need_to_init ) 
        { 
             
            IplImage* eig = cvCreateImage( cvGetSize(grey), 32, 1 ); 
            IplImage* temp = cvCreateImage( cvGetSize(grey), 32, 1 ); 
            double quality = 0.01; 
            double min_distance = 10; 
         
 
            //using good features to track 
            count = MAX_COUNT; 
            cvGoodFeaturesToTrack( grey, eig, temp, points[1], &count, 
                                   quality, min_distance, 0, 3, 0, 0.04 ); 
            cvFindCornerSubPix( grey, points[1], count, 
            cvSize(win_size,win_size), cvSize(-1,-1), 
            cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03)); 
            cvReleaseImage( &eig ); 
            cvReleaseImage( &temp ); 
 
 
 
            add_remove_pt = 0; 
        } 
        else if( count > 0 ) 
        { 
            //using pyramidal optical flow method 
            cvCalcOpticalFlowPyrLK(  
                    prev_grey, grey,  
                    prev_pyramid, pyramid, 
                    points[0], points[1],  
                    count, cvSize(win_size,win_size),  
                    5, status,0, 
                    cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), flags ); 
             
            flags |= CV_LKFLOW_PYR_A_READY|CV_LKFLOW_PYR_B_READY; 
 
            for( i = k = 0; i < count; i++ ) 
            { 
                /* When need to add or remove the point */ 
                if( add_remove_pt ) 
                { 
 
                    double dx = pt.x - points[1][i].x; 
                    double dy = pt.y - points[1][i].y; 
                    /* Calulate the distance between the point you select and the point tracked  
                    if they are far from less than 5,stop the add or move action     
                    */ 
                    if( dx*dx + dy*dy <= 25 ) 
                    { 
                        add_remove_pt = 0; 
                        continue; 
                    } 
                } 
                 
                if( !status[i] )//if the point is not tracked correctly,pass! 
                    continue; 
                
                points[1][k++] = points[1][i]; 
 
                ipt1=cvPointFrom32f(points[1][i]);//get a point 
                 
            //calculating error here,initalize the error array 
                optical_flow_error[0][i]=ipt1.x; 
                optical_flow_error[1][i]=ipt1.y; 
 
 
            } 
            //taking average error for moving the window 
 
            for(int zz=0; zz<=count;zz++) 
                { 
                    errX[zz]=optical_flow_error[0][zz]- optical_flow_errorP[0][zz]; 
                    errY[zz]=optical_flow_error[1][zz]- optical_flow_errorP[1][zz]; 
 
                    sumX=sumX+errX[zz]; 
                    sumY=sumY+errY[zz]; 
 
                    optical_flow_errorP[0][zz]=optical_flow_error[0][zz]; 
                    optical_flow_errorP[1][zz]=optical_flow_error[1][zz]; 
 
                } 
 
                fprintf(ptr,"%d\n",count); 
                 
                err_X=sumX/count; 
                err_Y=sumY/count; 
 
            if(flagg==1) 
            { 
              int static startonce=0; 
 
            if(startonce==0) 
            { 
                 
             
            tempxx1=pt.x-20; 
            tempyy1=pt.y-20; 
 
            tempxx2=pt.x+20; 
            tempyy2=pt.y+20; 
 
            XX=pt.x; 
            YY=pt.y; 
 
            startonce=1; 
 
            } 
            if(err_X<3) 
            { 
                tempxx1=tempxx1+err_X; 
                tempyy1=tempyy1+err_Y; 
                tempxx2=tempxx2+err_X; 
                tempyy2=tempyy2+err_Y; 
 
                XX=XX+err_X; 
                YY=YY+err_Y; 
                fprintf(ptr,"%f %f\n",err_X,err_Y); 
            } 
 
            printf("\n%f",err_X); 
 
            //moving window 
 
            cvRectangle(image, cvPoint(tempxx1,tempyy1), cvPoint(tempxx2,tempyy2), cvScalar(255,0,0), 1); 
            cvCircle(image, cvPoint(XX,YY), 3, cvScalar(0,0,255), 1); 
        } 
            count = k; 
        } 
 
 
        if( add_remove_pt && count < MAX_COUNT ) 
        { 
            points[1][count++] = cvPointTo32f(pt); 
            cvFindCornerSubPix( grey, points[1] + count - 1, 1, 
                cvSize(win_size,win_size), cvSize(-1,-1), 
                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03)); 
            add_remove_pt = 0; 
        } 
 
        CV_SWAP( prev_grey, grey, swap_temp ); 
        CV_SWAP( prev_pyramid, pyramid, swap_temp ); 
        CV_SWAP( points[0], points[1], swap_points ); 
        need_to_init = 0; 
 
       
        //writing image file to the file 
        //if(!cvSaveImage(str1,image)) printf("Could not save: %s\n",str1); 
        //storing in a video also 
  
         
        cvShowImage( "KLT-Tracking Group_R", image ); 
 
        c = cvWaitKey(100); 
        if( (char)c == 27 ) 
            break; 
        switch( (char) c ) 
        { 
        case 's': 
            need_to_init = 1; 
          } 
 
        counter1++; 
    } 
 
    cvReleaseCapture( &capture ); 
    cvDestroyWindow("KLT-Tracking Group_R"); 
 
    fcloseall(); 
     
    return 0; 
}