IplImage * BouyBaseObject::TemplateMask(const IplImage * imgIn, const IplImage * threshold, const IplImage * tmplt) const
{
    IplImage * imgOut = cvCloneImage(imgIn);
    //cvZero(imgOut);
    std::list<CvBox2D> blobList;
    blobList  = Zebulon::Vision::VisionUtils::GetBlobBoxes(threshold,.005,.95);
    IplImage * crop;
    CvFont font;
    cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1,1);

    for(std::list<CvBox2D>::iterator it = blobList.begin(); it != blobList.end(); it++)
    {

        crop = Zebulon::Vision::VisionUtils::Crop(imgIn,*it);
        double score = 0;
        Zebulon::Vision::VisionUtils::GetSimpleTemplateSimilarity(crop,tmplt,score,false);
        std::ostringstream s;
        s << "(" << score << ")";
        cvPutText(imgOut,s.str().c_str(),cvPointFrom32f(it->center),&font,CV_RGB(255,255,255));
        Zebulon::Vision::VisionUtils::DrawSquare(imgOut,*it);
        if(score > mTemplateThreshold)
        {
            cvDrawCircle(imgOut,cvPointFrom32f(it->center),(crop->width/2.0),CV_RGB(255,255,255));
        }
        cvReleaseImage(&crop);
    }
    return imgOut;
}
        void run()
        {
        	cvCopyImage(*mInputImage, mOutputImage);
        	for (int i = mFromId; i < mToId; ++i) {
        		Craftag *tTag = mTags[i-mFromId];
				if (tTag->isPresent()){
					wykobi::point2d<float> tCenter = tTag->getCenter();
					wykobi::point2d<float> tTop = tCenter+tTag->getYUnit();
					wykobi::point2d<float> tRight = tCenter+tTag->getXUnit();

					static const int scShift = 2;
					static const float scPrecision = 1<<scShift;
					cvLine(mOutputImage, cvPoint(tCenter.x*scPrecision, tCenter.y*scPrecision), cvPoint(tTop.x*scPrecision, tTop.y*scPrecision), sColor, 1, CV_AA, scShift);
					cvLine(mOutputImage, cvPoint(tCenter.x*scPrecision, tCenter.y*scPrecision), cvPoint(tRight.x*scPrecision, tRight.y*scPrecision), sColor, 1, CV_AA, scShift);
					//char tText[256];
					//sprintf(tText, "%d:(%.2f,%.2f),(%.2f,%.2f),(%.2f,%.2f)", i, tCenter.x, tCenter.y, tTop.x, tTop.y, tRight.x, tRight.y);
					char tText[4+1];
					sprintf(tText, "%d", i);
					cvPutText(mOutputImage, tText, cvPoint(tCenter.x, tCenter.y), &mFont, sColor);

					CvPoint2D32f tCvCorners[4];
					CvWykobiBridge::convertQuad(wykobi::scale(scPrecision, scPrecision, tTag->getCorners()), tCvCorners);
					for (int i = 0; i < 4; ++i) {
						cvLine(mOutputImage, cvPointFrom32f(tCvCorners[i]), cvPointFrom32f(tCvCorners[(i+1)%4]), sColor, 1, CV_AA, scShift);
					}
				}
			}
        	cvShowImage("CraftagIdentifier", mOutputImage);
        	char tKeyPressed = cvWaitKey(1);
        	if (tKeyPressed == 'q') stop();
        }
Example #3
0
/**
 * @author      JIA Pei
 * @version     2010-05-07
 * @brief       draw a point on the image
 * @param       iShape          Input -- the input shape
 * @param       theSubshape     Output -- and input, the image drawn with the point
 * @param       iLine           Input -- the line
 * @param       oImg            Output--  output image
 * @param       dir             Input -- direction
 * @param       ws              Input --
 * @param       offset          Input -- add some offset at both ends of the line segment itself
 * @param       ci              Input -- color index
 * @return      void
 */
void VO_Fitting2DSM::VO_DrawAline(  const VO_Shape& iShape,
                                    const VO_Shape& theSubshape,
                                    const std::vector<float>& iLine,
                                    cv::Mat& oImg,
                                    unsigned int dir,
                                    bool ws,
                                    unsigned int offset,
                                    unsigned int ci)
{
    switch(dir)
    {
    case VERTICAL:
    {
        float A = iLine[0];
        float B = iLine[1];
        float C = iLine[2];
        cv::Point2f ptf1, ptf2;
        if(ws)
        {
            ptf1.y = iShape.MinY() - offset;
            ptf2.y = iShape.MaxY() + offset;
        }
        else
        {
            ptf1.y = theSubshape.MinY() - offset;
            ptf2.y = theSubshape.MaxY() + offset;
        }
        ptf1.x = -(C + B*ptf1.y)/A;
        ptf2.x = -(C + B*ptf2.y)/A;
        cv::Point pt1 = cvPointFrom32f( ptf1 );
        cv::Point pt2 = cvPointFrom32f( ptf2 );
        cv::line( oImg, pt1, pt2, colors[ci], 2, 0, 0);
    }
    break;
    case HORIZONTAL:
    default:
    {
        float A = iLine[0];
        float B = iLine[1];
        float C = iLine[2];
        cv::Point2f ptf1, ptf2;
        if(ws)
        {
            ptf1.x = iShape.MinX() - offset;
            ptf2.x = iShape.MaxX() + offset;
        }
        else
        {
            ptf1.x = theSubshape.MinX() - offset;
            ptf2.x = theSubshape.MaxX() + offset;
        }
        ptf1.y = -(C + A*ptf1.x)/B;
        ptf2.y = -(C + A*ptf2.x)/B;
        cv::Point pt1 = cvPointFrom32f( ptf1 );
        cv::Point pt2 = cvPointFrom32f( ptf2 );
        cv::line( oImg, pt1, pt2, colors[ci], 2, 0, 0);
    }
    break;
    }
}
Example #4
0
void Lines::Line::drawInfiniteLine(IplImage* img, CvScalar color)
{

	CvPoint pt1 = cvPointFrom32f(cvPoint2D32f(-1000, -1000*Slope + Intercept));
	CvPoint pt2 = cvPointFrom32f(cvPoint2D32f(1000, 1000*Slope + Intercept));
	if (isVertical == false)
		cvLine(img, pt1, pt2, color, 1, 8, 0);
	else
		cvLine(img, cvPoint(point1.x, 0), cvPoint(point1.x, 1000), color, 1, 8, 0);
}
Example #5
0
void Bot::make()
{
	camera->update();
	if(camera->getFrame()){
		//
		// получаем кадр с камеры
		//
		if(image==0){
			image = cvCloneImage(camera->getFrame());
		}
		else{
			cvCopy(camera->getFrame(), image);
		}

		// для хранения центра объекта
		CvPoint2D32f point;

		//
		// передаём кадр на обработку детектору
		//
		bool result = detector->getCenter( image, point, BOT_THRESHOLD);

		char buf[128]; // для вывода информации

		if(result){
			//
			// объект обнаружен
			//

			printf("[i][Bot] point: %.2f : %.2f \n", point.x, point.y);

			if(gimbal){
				gimbal->make( cvPoint(image->width/2, image->height/2), cvPointFrom32f(point) );
			}

			// отметим центр
			cvCircle(image, cvPointFrom32f(point), 3, CV_RGB(0, 255, 0), 2);

			// выводим сообщение в верхнем левом углу картинки
			sprintf(buf, "Object detected: %.2f : %.2f", point.x, point.y);
			cvPutText(image, buf, cvPoint(10, 20), &font, CV_RGB(0, 255, 0));
		}
		else{
			//
			// объект не обнаружен
			//

			// выводим сообщение в верхнем левом углу картинки
			sprintf(buf, "Cant find object!");
			cvPutText(image, buf, cvPoint(10, 20), &font, CV_RGB(255 , 0, 0));
		}

		cvShowImage(window_name, image);
	}
}
Example #6
0
void * cvBox(CvBox2D box, IplImage *image, CvScalar color, int thickness)
{
    CvPoint2D32f pt[4];
    
    cvBoxPoints(box, pt);
    
    for(int i = 0; i < 4; i++) {
        cvLine(image, cvPointFrom32f(pt[i]), cvPointFrom32f(pt[(i+1)%4]), color, thickness);
        cvCircle(image, cvPointFrom32f(pt[i]), thickness, cvScalarAll((255/4)*i), -1);
    }
}
// the function draws all the squares in the image
void drawSquares(IplImage* imgSrc, CvSeq* squares)
{
	CvSeqReader reader;
	IplImage* imgCopy = cvCloneImage(imgSrc);
	int i;

	// initialize reader of the sequence
	cvStartReadSeq(squares, &reader, 0);

	// read 4 sequence elements at a time (all vertices of a square)
	printf("Found %d rectangles in image\n", squares->total / 4);

	for (i = 0; i < squares->total; i += 4)
	{
		CvPoint* pntRect = gPnt;
		int pntCount = 4;
		CvSeq* seqRect = cvCreateSeq(CV_32SC2, sizeof(CvSeq), sizeof(CvPoint), gStorage);

		// read 4 vertices
		memcpy(gPnt, reader.ptr, squares->elem_size);
		CV_NEXT_SEQ_ELEM(squares->elem_size, reader);
		cvSeqPush(seqRect, &pntRect[0]);

		memcpy(gPnt + 1, reader.ptr, squares->elem_size);
		CV_NEXT_SEQ_ELEM(squares->elem_size, reader);
		cvSeqPush(seqRect, &pntRect[1]);

		memcpy(gPnt + 2, reader.ptr, squares->elem_size);
		CV_NEXT_SEQ_ELEM(squares->elem_size, reader);
		cvSeqPush(seqRect, &pntRect[2]);

		memcpy(gPnt + 3, reader.ptr, squares->elem_size);
		CV_NEXT_SEQ_ELEM(squares->elem_size, reader);
		cvSeqPush(seqRect, &pntRect[3]);

		// draw the square as a closed polyline
		cvPolyLine(imgCopy, &pntRect, &pntCount, 1, 1, CV_RGB(0, 255, 0), 1, CV_AA, 0);

		// draw the min outter rect
		CvBox2D box = cvMinAreaRect2(seqRect, NULL);
	    CvPoint2D32f ptBox[4];
	    cvBoxPoints(box, ptBox);
	    for(int i = 0; i < 4; ++i) {
	        cvLine(imgCopy, cvPointFrom32f(ptBox[i]), cvPointFrom32f(ptBox[((i+1)%4)?(i+1):0]), CV_RGB(255,0,0));
	    }

	}

	// show the resultant image
	cvShowImage(wndname, imgCopy);
	cvReleaseImage(&imgCopy);
}
Example #8
0
std::list<CvBox2D> PathObject::GetBounding(const IplImage * imgIn, IplImage * debugOut) const
{

    std::list<CvBox2D> blobList;
    if(imgIn == NULL) return blobList;
    IplImage * imgOut1 = GetMask(imgIn);
    //cvShowImage("imgout1",imgOut1);
    CvFont font;
    cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1,1);
    if(imgOut1)
    {
        blobList = Zebulon::Vision::VisionUtils::GetBlobBoxes(imgOut1,.001,.95);
        if (debugOut)
        {
            for(std::list<CvBox2D>::iterator it = blobList.begin(); it != blobList.end(); it++)
            {
//                CvPoint2D32f boxCorners[4];
//                cvBoxPoints(*it,boxCorners);

                std::ostringstream s;
                s << "(" << it->angle << ")";
                cvPutText(debugOut,s.str().c_str(),cvPointFrom32f(it->center),&font,mNearColor);
                Zebulon::Vision::VisionUtils::DrawSquare(debugOut,*it,mNearColor);
            }
        }
        cvReleaseImage(&imgOut1);
    }

    return blobList;

}
Example #9
0
//在图像srcImg上根据contour轮廓画上最小外接椭圆
CvBox2D DrawMinAreaEllipse(IplImage *srcImg,CvSeq *contour,CvScalar color/*=CV_RGB(255,0,0)*/)
{
	int count = contour->total; // This is number point in contour
	CvPoint center;
	CvSize size;
	CvBox2D box;

	if( count < 6 )
		return box;

	CvMat* points_f = cvCreateMat( 1, count, CV_32FC2 );
	CvMat points_i = cvMat( 1, count, CV_32SC2, points_f->data.ptr );
	cvCvtSeqToArray( contour, points_f->data.ptr, CV_WHOLE_SEQ );
	cvConvert( &points_i, points_f );
	// 椭圆拟合
	box = cvFitEllipse2( points_f );
	cout<<"拟合的椭圆参数:angle="<<box.angle<<",center=("<<box.center.x<<","
		<<box.center.y<<")"<<",size(w,h)=("<<box.size.width<<","<<box.size.height<<")"<<endl;
	// 获得椭圆参数
	center = cvPointFrom32f(box.center);
	size.width = cvRound(box.size.width*0.5)+1;
	size.height = cvRound(box.size.height*0.5)+1;
	// 画椭圆
	cvEllipse(srcImg, center, size,
		-box.angle, 0, 360,	color, 1, CV_AA, 0);
	cvReleaseMat(&points_f);
	return box;
}
void Build_Vocabulary::drawKmeans(String vocabularyfile, String descriptorsfile){// can only draw 2D points.
   
    cout << "Loading Files..." << endl;
	FileStorage fs(descriptorsfile, FileStorage::READ);
    Mat training_descriptors;
	fs["training_descriptors"] >> training_descriptors;
	fs.release();	
    
    FileStorage fs1(vocabularyfile, FileStorage::READ);
    Mat vocabulary;
	fs1["vocabulary"] >> vocabulary;
	fs1.release();	

    IplImage* img = cvCreateImage( cvSize( 500, 500 ), 8, 3 );
    cvZero( img );
    
    for( int i = 0; i < training_descriptors.rows; i++ )
    {
        CvPoint2D32f pt = ((CvPoint2D32f*)training_descriptors.data)[i];
        int cluster_idx = vocabulary.data[i];
        cvCircle( img, cvPointFrom32f(pt), 2, CV_RGB(cluster_idx%255 , cluster_idx%255 , cluster_idx%255), CV_FILLED );
    }
    
    training_descriptors.release(); 
    vocabulary.release();
    
    cvShowImage( "clusters", img );
    
    waitKey(0);
}
Example #11
0
main( int argc, char* argv[] ) {

    // Choose a negative floating point number.  Take its absolute value,
    // round it, and then take its ceiling and floor.
    double a = -1.23;
    printf( "CV_IABS(a) = %d\n", CV_IABS(a) );
    printf( "cvRound(a) = %d\n", cvRound(a) );
    printf( "cvCeil(a) = %d\n", cvCeil(a) );
    printf( "cvFloor(a) = %d\n", cvFloor(a) );


    // Generate some random numbers.
    CvRNG rngState = cvRNG(-1);
    for (int i = 0; i < 10; i++) {
        printf( "%u %f\n", cvRandInt( &rngState ),
                           cvRandReal( &rngState ) );
    }

    // Create a floating point CvPoint2D32f and convert it to an integer
    // CvPoint.
    CvPoint2D32f point_float1 = cvPoint2D32f(1.0, 2.0);
    CvPoint point_int1 = cvPointFrom32f( point_float1 );

    // Convert a CvPoint to a CvPoint2D32f.
    CvPoint point_int2 = cvPoint(3, 4);
    CvPoint2D32f point_float2 = cvPointTo32f( point_int2 );

}
Example #12
0
// Define trackbar callback functon. This function find contours,
// draw it and approximate it by ellipses.
void process_image(int h)
{
    CvMemStorage* storage;
    CvSeq* contour;

    // Create dynamic structure and sequence.
    storage = cvCreateMemStorage(0);
    contour = cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvSeq), sizeof(CvPoint) , storage);

    // Threshold the source image. This needful for cvFindContours().
    cvThreshold( image03, image02, slider_pos, 255, CV_THRESH_BINARY );

    // Find all contours.
    cvFindContours( image02, storage, &contour, sizeof(CvContour),
                    CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0));

    // Clear images. IPL use.
    cvZero(image02);
    cvZero(image04);

    // This cycle draw all contours and approximate it by ellipses.
    for(;contour;contour = contour->h_next)
    {
        int count = contour->total; // This is number point in contour
        CvPoint center;
        CvSize size;
        CvBox2D box;

        // Number point must be more than or equal to 6 (for cvFitEllipse_32f).
        if( count < 6 )
            continue;

        CvMat* points_f = cvCreateMat( 1, count, CV_32FC2 );
        CvMat points_i = cvMat( 1, count, CV_32SC2, points_f->data.ptr );
        cvCvtSeqToArray( contour, points_f->data.ptr, CV_WHOLE_SEQ );
        cvConvert( &points_i, points_f );

        // Fits ellipse to current contour.
        box = cvFitEllipse2( points_f );

        // Draw current contour.
        cvDrawContours(image04,contour,CV_RGB(255,255,255),CV_RGB(255,255,255),0,1,8,cvPoint(0,0));

        // Convert ellipse data from float to integer representation.
        center = cvPointFrom32f(box.center);
        size.width = cvRound(box.size.width*0.5);
        size.height = cvRound(box.size.height*0.5);

        // Draw ellipse.
        cvEllipse(image04, center, size,
                  -box.angle, 0, 360,
                  CV_RGB(0,0,255), 1, CV_AA, 0);

        cvReleaseMat(&points_f);
    }

    // Show image. HighGUI use.
    cvShowImage( "Result", image04 );
}
Example #13
0
/**
 * @author      JIA Pei
 * @version     2010-05-07
 * @brief       draw a point on the image
 * @param       iShape          Input -- the input shape
 * @param       iAAMModel       Input -- the model
 * @param       ioImg           Input and Output -- the image
 * @return      void
 */
void VO_Fitting2DSM::VO_DrawMesh(const VO_Shape& iShape, const VO_AXM* iModel, cv::Mat& ioImg)
{
    cv::Point iorg,idst;
    std::vector<VO_Edge> edges = iModel->GetEdge();
    unsigned int NbOfEdges = iModel->GetNbOfEdges();

    for (unsigned int i = 0; i < NbOfEdges; i++)
    {
        iorg = cvPointFrom32f( iShape.GetA2DPoint( edges[i].GetIndex1() ) );
        idst = cvPointFrom32f( iShape.GetA2DPoint( edges[i].GetIndex2() ) );
        // Edge
        cv::line( ioImg, iorg, idst, colors[8], 1, 0, 0 );
        // Key points
        cv::circle( ioImg, iorg, 2, colors[0], -1, 8, 0 );
        cv::circle( ioImg, idst, 2, colors[0], -1, 8, 0 );
    }
}
Example #14
0
void Utils::drawFeatureMatchLines(IplImage* &img, CvSeq *img1Keypoints, CvSeq *img2Keypoints, vector<int> ptpairs, int widthOffset)
{
    for(int i = 0; i < (int)ptpairs.size(); i += 2 ){
        CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( img1Keypoints, ptpairs[i] );
        CvSURFPoint* r2 = (CvSURFPoint*)cvGetSeqElem( img2Keypoints, ptpairs[i+1] );
        cvLine( img, cvPointFrom32f(r1->pt), cvPoint(cvRound(r2->pt.x + widthOffset), cvRound(r2->pt.y)), cvScalar(255, 0, 150) );
    }
}
Example #15
0
void KLT::drawFeatures (IplImage *draw)
{
    printf("KLT :: drawing %d features...\n", lkCount);
    int i=0, k=0;
    for (k=i=0; i<lkCount; i++) {
        lkPoints[1][k++] = lkPoints[1][i];
        cvCircle(draw, cvPointFrom32f(lkPoints[1][i]), 3, CV_RGB(0,0,255), -1, 8, 0);
    }

} // end drawFeatures
Example #16
0
		void operator = (CvSURFPoint &surfp)
		{
			//最接近的转换
			CvPoint cp = cvPointFrom32f(surfp.pt);
			m_ix = cp.x;
			m_iy = cp.y;
			m_laplacian = surfp.laplacian;
			m_size = surfp.size;
			m_dir = surfp.dir;
			m_hessian = surfp.hessian;
		}
Example #17
0
		//add 2.18
		void operator = (KeyPoint &keypt)
		{
			CvPoint cp = cvPointFrom32f(keypt.pt);
			m_ix = cp.x;
			m_iy = cp.y;
			//m_ix = keypt.pt.x;
			//m_iy = keypt.pt.y;
			m_laplacian = 0;//keypt.octave;//?? need to be revised!
			m_size = keypt.size;
			m_dir = keypt.angle;
			m_hessian = 0;//keypt.class_id;
		}
Example #18
0
void SCSM::TransformVector(int angle, float scale, CvPoint2D32f orig_vector,
    CvPoint &transform_point, CvPoint2D32f &transform_vector) {
    // 变换后的位置可能不是整数
    // first rotation
    transform_vector.x = orig_vector.x*cosrota[angle] - orig_vector.y*sinrota[angle];
    transform_vector.y = orig_vector.x*sinrota[angle] + orig_vector.y*cosrota[angle];
    // second s
    transform_vector.x *= scale;
    transform_vector.y *= scale;
    // transformed location
    double trans_x = static_cast<double>(transform_vector.x + fixedCenter.x);
    double trans_y = static_cast<double>(transform_vector.y + fixedCenter.y);
    transform_point = cvPointFrom32f(cvPoint2D32f(trans_x, trans_y));
}
Example #19
0
void Utils::drawTriangle(IplImage* &img, CvSeq *imgKeypoints, vector<int> imgTri)
{
    CvSURFPoint* imgV1 = (CvSURFPoint*)cvGetSeqElem( imgKeypoints, imgTri[0] );
    CvSURFPoint* imgV2 = (CvSURFPoint*)cvGetSeqElem( imgKeypoints, imgTri[1] );
    CvSURFPoint* imgV3 = (CvSURFPoint*)cvGetSeqElem( imgKeypoints, imgTri[2] );
    cvLine(img, cvPointFrom32f(imgV1->pt), cvPointFrom32f(imgV2->pt), cvScalar(0, 255, 0) );
    cvLine(img, cvPointFrom32f(imgV1->pt), cvPointFrom32f(imgV3->pt), cvScalar(0, 255, 0) );
    cvLine(img, cvPointFrom32f(imgV2->pt), cvPointFrom32f(imgV3->pt), cvScalar(0, 255, 0) );
}
Example #20
0
CvPoint CTools::QuartzPostion(IplImage* src, IplImage* dst)
{
	CvMemStorage * storage = cvCreateMemStorage(0);
	CvSeq * contour = 0;
	int mode = CV_RETR_EXTERNAL;
	double length;
	CvPoint2D32f center;
	float r;
	CvPoint pt; 

	pt.y = 1000;
	pt.x = 0;

	CalibrateData m_CalDat;

	GetCalirateParam(&m_CalDat);

	IplImage* temp = cvCreateImage(cvGetSize(src), 8, 1);
	cvCanny(src, temp, 50, 100);

	cvFindContours(temp, storage, &contour, sizeof(CvContour), mode);

	for( CvSeq* c = contour; c != NULL; c = c->h_next)
	{
		c = cvApproxPoly( c, sizeof(CvContour), storage, CV_POLY_APPROX_DP, 5, 1 );
		length = cvArcLength(c, CV_WHOLE_SEQ, -1);
		if ((length > m_CalDat.WaferPxLow) && (length < m_CalDat.WaferPxHigh))
		{
			cvDrawContours(dst, c, CV_RGB(0,0,255), CV_RGB(255, 0, 0), -1, 2, 8);
			cvMinEnclosingCircle(c, &center, &r);
			if ((center.y > 336) && (center.y < pt.y))
			{
				pt = cvPointFrom32f(center);
			}
			//pt[num] = cvPointFrom32f(center);
			//cvCircle(pContoursImg, pt[num], 3, CV_RGB(0,0,255), -1);
			//cvCircle(pContoursImg, pt[num], r, CV_RGB(0,0,255), 2);
		}
	}
	cvCircle(dst, pt, 10, CV_RGB(255,0, 0), -1);
	cvReleaseImage(&temp);
	cvClearMemStorage( storage );
	cvReleaseMemStorage( &storage );

	return pt;

}
Example #21
0
// показать результат
void OpticalFlowLK::show()
{
	if(!imgA || !imgB)
	{
		return;
	}

	//
	// нарисуем линии между найденными точками 
	//
	IplImage* imgC = cvCreateImage(cvGetSize(imgB), IPL_DEPTH_8U, 3);
	cvConvertImage(imgB, imgC, CV_GRAY2BGR);

	for( int i=0; i<cornerCount; i++ )
	{
		// пропускаем ненайденные точки и точки с большой ошибкой
		if( featuresFound[i]==0 || featureErrors[i]>LK_MAX_FEATURE_ERROR )
		{
			//printf("Error is %f/n",feature_errors[i]);
			continue;
		}
		//    printf("Got it/n");
		CvPoint p0 = cvPoint(
			cvRound( cornersA[i].x ),
			cvRound( cornersA[i].y )
			);
		CvPoint p1 = cvPoint(
			cvRound( cornersB[i].x ),
			cvRound( cornersB[i].y )
			);
		cvLine( imgC, p0, p1, CV_RGB(255,0,0), 1);
	}

	// центр
	cvCircle(imgC, cvPointFrom32f(center), 5, CV_RGB(255, 98, 0), -1);

	// показываем
	cvNamedWindow("ImageA");
	cvNamedWindow("ImageB");
	cvNamedWindow("LKpyr_OpticalFlow");
	cvShowImage("ImageA", imgA);
	cvShowImage("ImageB", imgB);
	cvShowImage("LKpyr_OpticalFlow", imgC);

	cvReleaseImage(&imgC);
}
Example #22
0
QTransform Features::getTransformation(const CvSeq* objectKeypoints, const CvSeq* imageKeypoints, vector<int> &objTri, vector<int> &imgTri)
{
    //Matriz de la transformación:  |a b c|
    //                              |d e f|
    //                              |0 0 1|

    CvPoint v1 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( objectKeypoints, objTri[0] ))->pt);
    CvPoint v2 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( objectKeypoints, objTri[1] ))->pt);
    CvPoint v3 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( objectKeypoints, objTri[2] ))->pt);
    CvPoint u1 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( imageKeypoints, imgTri[0] ))->pt);
    CvPoint u2 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( imageKeypoints, imgTri[1] ))->pt);
    CvPoint u3 = cvPointFrom32f(((CvSURFPoint*)cvGetSeqElem( imageKeypoints, imgTri[2] ))->pt);    

    double 	x11 = v1.x,
            x12 = v1.y,
            x21 = v2.x,
            x22 = v2.y,
            x31 = v3.x,
            x32 = v3.y,
            y11 = u1.x,
            y12 = u1.y,
            y21 = u2.x,
            y22 = u2.y,
            y31 = u3.x,
            y32 = u3.y;

    double a1 = ((y11-y21)*(x12-x32)-(y11-y31)*(x12-x22))/
                ((x11-x21)*(x12-x32)-(x11-x31)*(x12-x22));
    double a2 = ((y11-y21)*(x11-x31)-(y11-y31)*(x11-x21))/
                ((x12-x22)*(x11-x31)-(x12-x32)*(x11-x21));
    double a3 = y11-a1*x11-a2*x12;
    double a4 = ((y12-y22)*(x12-x32)-(y12-y32)*(x12-x22))/
                ((x11-x21)*(x12-x32)-(x11-x31)*(x12-x22));
    double a5 = ((y12-y22)*(x11-x31)-(y12-y32)*(x11-x21))/
                ((x12-x22)*(x11-x31)-(x12-x32)*(x11-x21));
    double a6 = y12-a4*x11-a5*x12;

    a1 = round(a1*1000)/1000.0;
    a2 = round(a2*1000)/1000.0;
    a3 = round(a3*1000)/1000.0;
    a4 = round(a4*1000)/1000.0;
    a5 = round(a5*1000)/1000.0;
    a6 = round(a6*1000)/1000.0;

    if(VERBOSE)
        cout << "Transformacion hallada: [" << a1 << ", " << a4 << ", 0, " << a2 << ", " << a5 << ", 0, " << a3 << ", " << a6 << ", 1]" << endl << endl;

    QTransform tr = QTransform(a1, a4, 0, a2, a5, 0, a3, a6);
//    qDebug() << "m11:" << tr.m11();
//    qDebug() << "m22:" << tr.m22();

    return tr;
}
void rspfOpenCVSURFFeatures::runUcharTransformation(rspfImageData* tile)
{   

	IplImage *input;
	IplImage *output;
	IplImage *temp;

	char* bSrc;
	char* bDst;

	//int nChannels = tile->getNumberOfBands();

	//for(int k=0; k<nChannels; k++) {
	input = cvCreateImageHeader(cvSize(tile->getWidth(),tile->getHeight()),8,1);
	output = cvCreateImageHeader(cvSize(tile->getWidth(),tile->getHeight()),8,1);
	temp = cvCreateImage(cvGetSize(input),32,1);

	CvMemStorage* storage = cvCreateMemStorage(0);
	
	bSrc = static_cast<char*>(tile->getBuf(0));
	input->imageData=bSrc;
	bDst = static_cast<char*>(theTile->getBuf());
	output->imageData=bDst;
    
	CvSeq *imageKeypoints = NULL;
	cvCopy(input,output);
	
	CvSURFParams params = cvSURFParams(theHessianThreshold, 1);

	cvExtractSURF(input,NULL,&imageKeypoints,NULL,storage,params);

	int numKeyPoints = imageKeypoints->total;

	for (int i=0;i<numKeyPoints;i++){
		CvSURFPoint* corner = (CvSURFPoint*)cvGetSeqElem(imageKeypoints,i);
		theKeyPoints.push_back(rspfDpt(corner->pt.x,corner->pt.y)+tile->getOrigin());         
		cvCircle(output,cvPointFrom32f(corner->pt),1,cvScalar(0),1);
	}
	cvReleaseImageHeader(&input);
	cvReleaseImageHeader(&output);
	cvReleaseImage(&temp);
	//}

	theTile->validate(); 
}
Example #24
0
void EnclosingCircle(IplImage* _image, IplImage *dem)
{
        assert(_image != 0);

        IplImage* bin = cvCreateImage( cvGetSize(_image), IPL_DEPTH_8U, 1);

        // конвертируем в градации серого
        cvConvertImage(_image, bin, CV_BGR2GRAY);
        // находим границы
        cvCanny(bin, bin, 100, 200);

        //cvNamedWindow( "bin", 1 );
        //cvShowImage("bin", bin);

        // хранилище памяти для контуров
        CvMemStorage* storage = cvCreateMemStorage(0);
        CvSeq* contours = 0;

        // находим контуры
        int contoursCont = cvFindContours( bin, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));

        assert(contours != 0);

        CvPoint2D32f center;
        float radius = 0;
        // обходим все контуры
        for( CvSeq* current = contours; current != NULL; current = current->h_next )
        {
                // находим параметры окружности
                cvMinEnclosingCircle(current, &center, &radius);
        
                // рисуем
                cvCircle(dem, cvPointFrom32f(center), radius, CV_RGB(255, 0, 0), 1, 1);
        }

        // освобождаем ресурсы
        cvReleaseMemStorage(&storage);
        cvReleaseImage(&bin);
}
Example #25
0
bool BouyObject::GetVisionReturn(const IplImage * imgIn, Vision::Return& result, IplImage * debugOut) const
{

    std::list<CvBox2D> blobList = GetBounding(imgIn);
    result.mValid = false;
    if(blobList.size() == 0) return false;
    CvFont font;
    cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1,1);
    for(std::list<CvBox2D>::iterator it = blobList.begin(); it != blobList.end(); it++)
    {
        result.mCenterI = it->center.x;
        result.mCenterJ = it->center.y;
        result.mArea = it->size.width * it->size.height;
        result.mValid = true;
        result.mAngle = VisionUtils::GetAngle(*it);
        result.mAngle = -result.mAngle;
        if(debugOut)
        {
            std::ostringstream s;
            s << "Area(" << result.mArea << ")";
            cvPutText(debugOut,s.str().c_str(),cvPointFrom32f(it->center),&font,mNearColor);
            Zebulon::Vision::VisionUtils::DrawSquare(debugOut,*it,mNearColor);
        }
    }
    return true;
//    double mCenterI;    ///< I is like X 0 to width = left to right
//    double mCenterJ;    ///< J is like Y 0 to height  top to bottom
//    int mArea;          ///< Area in pixels, either pixel count or width*height of bounding box
//    double mAngle;      ///< Angle in degrees [-90, 90] positive to the right, negative left.

//    int mValid;         ///< Valid is an on or off (or boolean) if something is detected
//    double mConfidence; ///< Confidence in identification of target, higher is better

//    double mMinI;   ///< Bounding Box furthest left column index.
//    double mMinJ;   ///< Bounding Box lowest row value (lower value higher up).
//    double mMaxI;   ///< Bounding Box furthest right column index (higher value right).
//    double mMaxJ;   ///< Bounding Box highest row value (higher value is towards picture bottom).
}
Example #26
0
/**
 * @author      JIA Pei
 * @version     2010-05-07
 * @brief       draw a point on the image
 * @param       pt      Input -- the point
 * @param       oImg    Input and Output -- the image drawn with the point
 * @return      void
 */
void VO_Fitting2DSM::VO_DrawAPoint(const cv::Point2f& pt, cv::Mat& ioImg)
{
    cv::Point tempPt = cvPointFrom32f(pt);
    cv::circle( ioImg, tempPt, 2, colors[5], -1, 8, 0 );
}
Example #27
0
IplImage* BouyBaseObject::GetMask(const IplImage * imgIn, IplImage * debugOut) const
{
    if(imgIn == NULL) return NULL;
    IplImage* colormask = NULL;
    IplImage* gvcolormask = NULL;
    //IplImage* shapemask = ShapeMask(imgIn);
    IplImage* segmentationmask = NULL;
    IplImage* histogrammask = NULL;
    //IplImage* edgemask = EdgeMask(imgIn);
    IplImage* templatemask = NULL;

//        if(colormask == NULL  || shapemask == NULL ||
//           segmentationmask== NULL || histogrammask == NULL ||
//           edgemask == NULL ) return NULL;

    //cvShowImage("colormask", colormask);
    //cvShowImage("channelmask", channelmask);
    IplImage * imgOut = cvCreateImage(cvGetSize(imgIn),IPL_DEPTH_8U, 1);
    IplImage * threshold = cvCreateImage(cvGetSize(imgIn),IPL_DEPTH_8U, 1);
    cvZero(imgOut);
    if(mEnableHist)
    {
        histogrammask = HistogramMask(imgIn);
    }
    if(mEnableColor)
    {
        colormask = ColorMask(imgIn);
    }
    if(mEnableSegment)
    {
         segmentationmask = SegmentationMask(imgIn);
    }
    if(mEnableGVColor)
    {
         gvcolormask = GVColorMask(imgIn);
    }
    int count = 1;
    if(VisionUtils::CombineMasks(imgOut,histogrammask,imgOut,count,mHistWeight))
    {
        count++;
    }
    if(VisionUtils::CombineMasks(imgOut,colormask,imgOut, count, mColorWeight))
    {
        count++;
    }
    if(VisionUtils::CombineMasks(imgOut,segmentationmask,imgOut,count,mSegmentWeight))
    {
        count++;
    }
    if(VisionUtils::CombineMasks(imgOut,gvcolormask,imgOut,count,mGVColorWeight))
    {
        count++;
    }


    //VisionUtils::CombineMasks(imgOut,edgemask,imgOut,2,1);
    //VisionUtils::CombineMasks(imgOut,histogrammask,imgOut,2,1);
    cvNormalize(imgOut,imgOut,255,0,CV_MINMAX);
    if(mDebug)
    {
        cvShowImage("combined", imgOut);
    }
    cvThreshold(imgOut,threshold,mMainThreshold,255,CV_THRESH_BINARY );
    std::list<CvBox2D> blobList;
    blobList = Zebulon::Vision::VisionUtils::GetBlobBoxes(threshold,0,mMinNoiseSizePercent);
    for(std::list<CvBox2D>::iterator it = blobList.begin(); it != blobList.end(); it++)
    {
        CvPoint2D32f boxCorners32[4];
        CvPoint boxCorners[4];
        cvBoxPoints(*it,boxCorners32);
        for(int i = 0; i < 4; i ++)
        {
            boxCorners[i] = cvPointFrom32f(boxCorners32[i]);
        }
        cvFillConvexPoly(threshold,boxCorners,4,cvScalar(0,0,0),4);
        //Zebulon::Vision::VisionUtils::DrawSquare(imgOut,*it);
    }
    //shapemask = FindCircles(imgOut);
    imgOut = TemplateMask(imgOut, threshold, mTemplate);
    //VisionUtils::CombineMasks(imgOut,templatemask,imgOut);
    if(mDebug)
    {
        cvShowImage("clean", threshold);
        cvShowImage("final", imgOut);
        cvShowImage("color", colormask);
        cvShowImage("hist", histogrammask);
        cvShowImage("segment", segmentationmask);
        cvShowImage("template", templatemask);
        cvShowImage("gvcolor", gvcolormask);
    }
    cvReleaseImage(&colormask);
    //cvReleaseImage(&shapemask);
    cvReleaseImage(&segmentationmask);
    cvReleaseImage(&histogrammask);
    cvReleaseImage(&gvcolormask);
    //cvReleaseImage(&edgemask);
    return imgOut;
}
Example #28
0
void draw_subdiv_edge( IplImage* img, CvSubdiv2D* subdiv, CvSubdiv2DEdge edge, CvScalar color, CvPoint2D32f * unwarped_points, CvPoint2D32f * warped_points, int count, char * status )
{
    CvSubdiv2DPoint* org_pt;
    CvSubdiv2DPoint* dst_pt;
    CvPoint2D32f org;
    CvPoint2D32f dst;
    CvPoint iorg, idst;

    org_pt = cvSubdiv2DEdgeOrg(edge);
    dst_pt = cvSubdiv2DEdgeDst(edge);

    if( org_pt && dst_pt )
    {
        org = org_pt->pt;
        dst = dst_pt->pt;

				iorg = cvPoint( cvRound( org.x ), cvRound( org.y ));
        idst = cvPoint( cvRound( dst.x ), cvRound( dst.y ));

				if( count ) {
					CvPoint orgwarp, dstwarp;
					
					bool found_org = false, found_dst = false;
					int found_org_loc, found_dst_loc;
					CvSubdiv2DPoint * found_org_pt;
					CvSubdiv2DPoint * found_dst_pt;
					CvSubdiv2DEdge ignored_edge;
					for(int i = 0; i < count; i++) {
						if(status[i]) {
							if(!found_org) {
								if( ((org_pt->pt).x == unwarped_points[i].x) && ((org_pt->pt).y == unwarped_points[i].y) ) {
									found_org = true;
									found_org_loc = i;
								}
							}
							if(!found_dst) {
								if( ((dst_pt->pt).x == unwarped_points[i].x) && ((dst_pt->pt).y == unwarped_points[i].y) ) {
									found_dst = true;
									found_dst_loc = i;
								}
							}
						}
					}

					if((!found_dst) && (!found_org)) {
						printf("Unfound point correspondence!\n");
					}
					else {
						if(found_org) {
							orgwarp = cvPointFrom32f(warped_points[found_org_loc]);
							iorg = orgwarp;
						}
						if(found_dst) {
							dstwarp = cvPointFrom32f(warped_points[found_dst_loc]);
							idst = dstwarp;
						}
					}
				}

				iorg.x = iorg.x > img->width ? img->width : iorg.x;
				iorg.y = iorg.y > img->height ? img->height : iorg.y;
				idst.x = idst.x > img->width ? img->width : idst.x;
				idst.y = idst.y > img->height ? img->height : idst.y;

				iorg.x = iorg.x < 0 ? 0 : iorg.x;
				iorg.y = iorg.y < 0 ? 0 : iorg.y;
				idst.x = idst.x < 0 ? 0 : idst.x;
				idst.y = idst.y < 0 ? 0 : idst.y;
				
				cvLine( img, iorg, idst, color, 1, CV_AA, 0 );
    }
}
Example #29
0
int opticaltri( CvMat * &clean_texture, int verts )
{
	char * im1fname = "conhull-dirty-thresh.jpg";
	char * im2fname = "conhull-clean-thresh.jpg";

	int count = MAX_COUNT;
	char * status;
	
	CvPoint2D32f * source_points;
	CvPoint2D32f * dest_points;
	CvPoint2D32f * delaunay_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f));

	// count = opticalflow( im1fname, im2fname, source_points, dest_points, status ); 
	count = findsiftpoints( "conhull-dirty.jpg", "conhull-clean.jpg", source_points, dest_points, status ); 

	IplImage * image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);

	CvMemStorage * storage = cvCreateMemStorage(0);
	CvSubdiv2D * delaunay = cvCreateSubdivDelaunay2D( cvRect(0,0,image1->width,image1->height), storage);

	IplImage * image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);

	cvSet( image1, cvScalarAll(255) );

	std::map<CvPoint, CvPoint> point_lookup_map;
	std::vector<std::pair<CvPoint, CvPoint> > point_lookup;

	int num_matches = 0;
	int num_out_matches = 0;
	int max_dist = 50;
	int offset = 200;	

	// put corners in the point lookup as going to themselves
	point_lookup_map[cvPoint(0,0)] = cvPoint(0,0);
	point_lookup_map[cvPoint(0,image1->height-1)] = cvPoint(0,image1->height-1);
	point_lookup_map[cvPoint(image1->width-1,0)] = cvPoint(image1->width-1,0);
	point_lookup_map[cvPoint(image1->width-1,image1->height-1)] = cvPoint(image1->width-1,image1->height-1);

	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,0), cvPoint(0,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,image1->height-1), cvPoint(0,image1->height-1)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,0), cvPoint(image1->width-1,0)));
	point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,image1->height-1), cvPoint(image1->width-1,image1->height-1)));

	printf("Inserting corners...");
	// put corners in the Delaunay subdivision
	for(unsigned int i = 0; i < point_lookup.size(); i++) {
		cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(point_lookup[i].first) );
	}
	printf("done.\n");

	CvSubdiv2DEdge proxy_edge;
	for(int i = 0; i < count; i++) {
		if(status[i]) {
			CvPoint source = cvPointFrom32f(source_points[i]);
			CvPoint dest = cvPointFrom32f(dest_points[i]);
	
			if((((int)fabs((double)(source.x - dest.x))) > max_dist) ||
				 (((int)fabs((double)(source.y - dest.y))) > max_dist)) {	
				num_out_matches++;
			}
			else if((dest.x >= 0) && (dest.y >= 0) && (dest.x < (image1->width)) && (dest.y < (image1->height))) {
				if(point_lookup_map.find(source) == point_lookup_map.end()) {
					num_matches++;
				
					point_lookup_map[source] = dest;
					point_lookup.push_back(std::pair<CvPoint,CvPoint>(source,dest));
					// delaunay_points[i] = 
					(cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(source) ))->pt;
					cvSetImageROI( image1, cvRect(source.x-8,source.y-8,8*2,8*2) );
					cvResetImageROI( image2 );
					cvGetRectSubPix( image2, image1, dest_points[i] );
				}
				/*
				cvSet2D( image1, source.y, source.x, cvGet2D( image2, dest.y, dest.x ) );
				cvSet2D( image1, source.y, source.x+1, cvGet2D( image2, dest.y, dest.x+1 ) );
				cvSet2D( image1, source.y, source.x-1, cvGet2D( image2, dest.y, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x, cvGet2D( image2, dest.y+1, dest.x ) );
				cvSet2D( image1, source.y-1, source.x, cvGet2D( image2, dest.y-1, dest.x ) );
				cvSet2D( image1, source.y+1, source.x+1, cvGet2D( image2, dest.y+1, dest.x+1 ) );
				cvSet2D( image1, source.y-1, source.x-1, cvGet2D( image2, dest.y-1, dest.x-1 ) );
				cvSet2D( image1, source.y+1, source.x-1, cvGet2D( image2, dest.y+1, dest.x-1 ) );
				cvSet2D( image1, source.y-1, source.x+1, cvGet2D( image2, dest.y-1, dest.x+1 ) );
				*/

				// cvCircle( image1, source, 4, CV_RGB(255,0,0), 2, CV_AA );
				// cvCircle( image2, dest, 4, CV_RGB(255,0,0), 2, CV_AA );
			}

			/*
			cvSetImageROI( image1, cvRect(source.x-offset,source.y-offset,offset*2,offset*2) );
			cvSetImageROI( image2, cvRect(dest.x-offset,dest.y-offset,offset*2,offset*2) );
			cvNamedWindow("image1",0);
			cvNamedWindow("image2",0);
			cvShowImage("image1",image1);
			cvShowImage("image2",image2);
			printf("%d,%d -> %d,%d\n",source.x,source.y,dest.x,dest.y);
			cvWaitKey(0);
			cvDestroyAllWindows();
			*/
		}
	}
	printf("%d %d\n",num_matches,num_out_matches);
	printf("%d lookups\n",point_lookup_map.size());

	cvResetImageROI( image1 );

	cvSaveImage("sparse.jpg", image1);

	cvReleaseImage(&image1);
	image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR);
	cvSet( image1, cvScalarAll(255) );
	printf("Warping image...");

	CvSeqReader  reader;
	int total = delaunay->edges->total;
	int elem_size = delaunay->edges->elem_size;


	std::vector<Triangle> trivec;
	std::vector<CvMat *> baryinvvec;

	for( int i = 0; i < total*2; i++ ) {
		if((i == 0) || (i == total)) {
			cvStartReadSeq( (CvSeq*)(delaunay->edges), &reader, 0 );
		}
		CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr);

		if( CV_IS_SET_ELEM( edge ))	{
			CvSubdiv2DEdge curedge = (CvSubdiv2DEdge)edge;
			CvSubdiv2DEdge t = curedge;
			Triangle temptri;
			int count = 0;
			
			// construct a triangle from this edge
			do {
				CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t );
				if(count < 3) {
					pt->pt.x = pt->pt.x >= image1->width ? image1->width-1 : pt->pt.x;
					pt->pt.y = pt->pt.y >= image1->height ? image1->height-1 : pt->pt.y;
					pt->pt.x = pt->pt.x < 0 ? 0 : pt->pt.x;
					pt->pt.y = pt->pt.y < 0 ? 0 : pt->pt.y;

					temptri.points[count] = cvPointFrom32f( pt->pt );
				}
				else {
					printf("More than 3 edges\n");
				}
				count++;
				if(i < total)
					t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT );
				else
					t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_RIGHT );
			} while( t != curedge );
			
			// check that triangle is not already in
			if( std::find(trivec.begin(), trivec.end(), temptri) == trivec.end() ) {
				// push triangle in and draw
				trivec.push_back(temptri);
				cvLine( image1, temptri.points[0], temptri.points[1], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[1], temptri.points[2], CV_RGB(255,0,0), 1, CV_AA, 0 );
				cvLine( image1, temptri.points[2], temptri.points[0], CV_RGB(255,0,0), 1, CV_AA, 0 );

				// compute barycentric computation vector for this triangle
				CvMat * barycen = cvCreateMat( 3, 3, CV_32FC1 );
				CvMat * baryceninv = cvCreateMat( 3, 3, CV_32FC1 );

				barycen->data.fl[3*0+0] = temptri.points[0].x;
				barycen->data.fl[3*0+1] = temptri.points[1].x;
				barycen->data.fl[3*0+2] = temptri.points[2].x;
				barycen->data.fl[3*1+0] = temptri.points[0].y;
				barycen->data.fl[3*1+1] = temptri.points[1].y;
				barycen->data.fl[3*1+2] = temptri.points[2].y;
				barycen->data.fl[3*2+0] = 1;
				barycen->data.fl[3*2+1] = 1;
				barycen->data.fl[3*2+2] = 1;

				cvInvert( barycen, baryceninv, CV_LU );
				baryinvvec.push_back(baryceninv);

				cvReleaseMat( &barycen );
			}
		}

		CV_NEXT_SEQ_ELEM( elem_size, reader );
	}
	printf("%d triangles...", trivec.size());
	cvSaveImage("triangles.jpg", image1);
	
	cvSet( image1, cvScalarAll(255) );
	IplImage * clean_nonthresh = cvLoadImage( "conhull-clean.jpg", CV_LOAD_IMAGE_COLOR );

	// for each triangle
	for(unsigned int i = 0; i < trivec.size(); i++) {
		Triangle curtri = trivec[i];
		CvMat * curpoints = cvCreateMat( 1, 3, CV_32SC2 );
		Triangle target;
		std::map<CvPoint,CvPoint>::iterator piter[3];
		
		printf("Triangle %d / %d\n",i,trivec.size());
		int is_corner = 0;
		for(int j = 0; j < 3; j++) {
			/*
			curpoints->data.i[2*j+0] = curtri.points[j].x;
			curpoints->data.i[2*j+1] = curtri.points[j].y;
			*/
			CV_MAT_ELEM( *curpoints, CvPoint, 0, j ) = curtri.points[j];
			printf("%d,%d\n",curtri.points[j].x,curtri.points[j].y);
	
			if((curtri.points[j] == cvPoint(0,0)) ||  (curtri.points[j] == cvPoint(0,image1->height - 1)) ||(curtri.points[j] == cvPoint(image1->width - 1,0)) ||(curtri.points[j] == cvPoint(image1->width - 1,image1->height - 1))) {
				is_corner++;
			}
			

			for(unsigned int k = 0; k < point_lookup.size(); k++) {
				std::pair<CvPoint,CvPoint> thispair = point_lookup[k];
				if(thispair.first == curtri.points[j]) {
					target.points[j] = thispair.second;
					break;
				}
			}

			/*
			piter[j] = point_lookup_map.find(curtri.points[j]);
			if(piter[j] != point_lookup_map.end() ) {
				target.points[j] = piter[j]->second;
			}
			*/
		}
			
		// if((piter[0] != point_lookup_map.end()) && (piter[1] != point_lookup_map.end()) && (piter[2] != point_lookup_map.end())) {
		if(is_corner < 3) {
			CvMat * newcorners = cvCreateMat( 3, 3, CV_32FC1 );
			newcorners->data.fl[3*0+0] = target.points[0].x;
			newcorners->data.fl[3*0+1] = target.points[1].x;
			newcorners->data.fl[3*0+2] = target.points[2].x;
			newcorners->data.fl[3*1+0] = target.points[0].y;
			newcorners->data.fl[3*1+1] = target.points[1].y;
			newcorners->data.fl[3*1+2] = target.points[2].y;
			newcorners->data.fl[3*2+0] = 1;
			newcorners->data.fl[3*2+1] = 1;
			newcorners->data.fl[3*2+2] = 1;

			CvContour hdr;
			CvSeqBlock blk;
			CvRect trianglebound = cvBoundingRect( cvPointSeqFromMat(CV_SEQ_KIND_CURVE+CV_SEQ_FLAG_CLOSED, curpoints, &hdr, &blk), 1 );
			printf("Bounding box: %d,%d,%d,%d\n",trianglebound.x,trianglebound.y,trianglebound.width,trianglebound.height);
			for(int y = trianglebound.y; (y < (trianglebound.y + trianglebound.height)) && ( y < image1->height); y++) {
				for(int x = trianglebound.x; (x < (trianglebound.x + trianglebound.width)) && (x < image1->width); x++) {
					// check to see if we're inside this triangle
					/*
					CvPoint v0 = cvPoint( curtri.points[2].x - curtri.points[0].x, curtri.points[2].y - curtri.points[0].y );
					CvPoint v1 = cvPoint( curtri.points[1].x - curtri.points[0].x, curtri.points[1].y - curtri.points[0].y );
					CvPoint v2 = cvPoint( x - curtri.points[0].x, y - curtri.points[0].y );
					
					int dot00 = v0.x * v0.x + v0.y * v0. y;
					int dot01 = v0.x * v1.x + v0.y * v1. y;
					int dot02 = v0.x * v2.x + v0.y * v2. y;
					int dot11 = v1.x * v1.x + v1.y * v1. y;
					int dot12 = v1.x * v2.x + v1.y * v2. y;

					double invDenom = 1.0 / (double)(dot00 * dot11 - dot01 * dot01);
					double u = (double)(dot11 * dot02 - dot01 * dot12) * invDenom;
					double v = (double)(dot00 * dot12 - dot01 * dot02) * invDenom;
					*/

					CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
					CvMat * result = cvCreateMat(3, 1, CV_32FC1);
					curp->data.fl[0] = x;
					curp->data.fl[1] = y;
					curp->data.fl[2] = 1;
					cvMatMul( baryinvvec[i], curp, result );
					// double u = result->data.fl[0]/result->data.fl[2];
					// double v = result->data.fl[1]/result->data.fl[2];

					/*
					if((i == 3019) && (y == 1329) && (x > 2505) && (x < 2584)) {
						printf("Range %d: %f, %f, %f\t%f, %f, %f\n",x,result->data.fl[0],result->data.fl[1],result->data.fl[2],
								sourcepoint->data.fl[0],sourcepoint->data.fl[1],sourcepoint->data.fl[2]);
					}
					*/

					if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) {
					// if((u > 0) || (v > 0) /*&& ((u +v) < 1)*/ )
						// printf("Barycentric: %f %f %f\n", result->data.fl[0], result->data.fl[1], result->data.fl[2]);
						// this point is inside this triangle
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//	trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);
						
						CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1);
						cvMatMul( newcorners, result, sourcepoint );	
					
						double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/;
						double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/;
						if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) {
							// printf("%d,%d %d,%d\n",x,y,(int)sourcex,(int)sourcey);
							cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) );
						}
	
						
						// printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y,
						//		trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y);

						cvReleaseMat( &sourcepoint );
					}
					cvReleaseMat( &result );
					cvReleaseMat( &curp );
				}
			}
			
			for(int k = 0; k < verts; k++) {
				double x = clean_texture->data.fl[2*k+0];
				double y = clean_texture->data.fl[2*k+1];
				
				// check to see if we're inside this triangle
				CvMat * curp = cvCreateMat(3, 1, CV_32FC1);
				CvMat * result = cvCreateMat(3, 1, CV_32FC1);
				curp->data.fl[0] = x;
				curp->data.fl[1] = y;
				curp->data.fl[2] = 1;
				cvMatMul( baryinvvec[i], curp, result );
			
				if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) {
					
					CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1);
					cvMatMul( newcorners, result, sourcepoint );	
				
					double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/;
					double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/;
					if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) {
						clean_texture->data.fl[2*k+0] = sourcex;
						clean_texture->data.fl[2*k+1] = sourcey;
						// cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) );
					}

					cvReleaseMat( &sourcepoint );
				}
				cvReleaseMat( &result );
				cvReleaseMat( &curp );
			}
			cvReleaseMat( &newcorners );
		}
		cvReleaseMat( &curpoints );
	}

	cvReleaseImage( &clean_nonthresh );

	printf("done.\n");

	cvSaveImage("fullwarp.jpg", image1);

	printf("Drawing subdivisions on warped image...");
	draw_subdiv( image1, delaunay, NULL, NULL, 0, NULL );
	// draw_subdiv( image1, delaunay, delaunay_points, source_points, count, status );
	printf("done.\n");
	
	cvSaveImage("edgeswarp.jpg", image1);

	cvReleaseImage(&image2);

	image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR);
	// cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 3 );

	// cvCalcSubdivVoronoi2D( delaunay );
	printf("Drawing subdivisions on unwarped image...");
	// draw_subdiv( image2, delaunay, delaunay_points, dest_points, count, status );
	// draw_subdiv( image2, delaunay, NULL, NULL, 0, NULL );
	printf("done.\n");

	cvSaveImage("edges.jpg",image2);

	cvReleaseImage(&image1);
	cvFree(&source_points);
	cvFree(&dest_points);
	cvFree(&status);
	cvReleaseMemStorage(&storage);
	cvFree(&delaunay_points);

	cvReleaseImage(&image2);

	return 0;
}
Example #30
0
int main( int argc, char* argv[] ) {

	//IplImage* img = cvCreateImage(imSize,IPL_DEPTH_8U,3);
	IplImage* img = cvLoadImage(imcd0,CV_LOAD_IMAGE_UNCHANGED);
	IplImage* imgA = cvLoadImage(imcd0,CV_LOAD_IMAGE_GRAYSCALE);
	IplImage* imgB = cvLoadImage(imcd1,CV_LOAD_IMAGE_GRAYSCALE);
	imSize = cvSize(img->width,img->height);
	rmax=0.8*((imSize.width>imSize.height)?imSize.height/2:imSize.width/2);
	rmin=0.2*((imSize.width>imSize.height)?imSize.height/2:imSize.width/2);
	lx=0.5*imSize.width;
	ly=0.5*imSize.height;
	int win_siz	= 7;
	int arr_siz	= NUMX*NUMY;
	CvPoint2D32f p0 = cvPoint2D32f(imSize.width/2,imSize.height/2);
	IplImage*	pyr = cvCreateImage(imSize,8,1);
	IplImage*	pyr_old = cvCreateImage(imSize,8,1);
	char* status	=0;
	status = (char*)cvAlloc(arr_siz);


	cvNamedWindow("testWindow");
	cvNamedWindow("ImgA");
	cvShowImage("ImgA", imgA);
	cvNamedWindow("ImgB");
	cvShowImage("ImgB", imgB);

	CvPoint2D32f*	arrg		= new CvPoint2D32f[arr_siz];
	CvPoint2D32f*	arrg_old	= new CvPoint2D32f[arr_siz];

	int counter=0;
	for(int x=0; x<NUMX; x++) {
		for(int y=0; y<NUMY; y++) {
			arrg_old[counter].x = p0.x + (-lx/2) + lx*x/NUMX;
			arrg_old[counter].y = p0.y + (-ly/2) + lx*y/NUMY;
			counter++;
		}
	}
	cout << "f**k-0" << endl;
	for(int i=0; i<arr_siz; i++) {
		cvLine(img,cvPointFrom32f(arrg_old[i]),cvPointFrom32f(arrg_old[i]),CV_RGB(0,0,0),4);
	}
	cvShowImage("testWindow",img);
	cvWaitKey(100);
	cout << "f**k-1" << endl;

	cvFindCornerSubPix(imgA,
	        			arrg_old,
	        			arr_siz,
	        			cvSize(win_siz,win_siz),
	        			cvSize(2,2),
	        			cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
	//cvReleaseImage(&img);
	//img = cvLoadImage(imcd0,CV_LOAD_IMAGE_UNCHANGED);
	cout << "f**k-2" << endl;
	for(int i=0; i<arr_siz; i++) {
		cvLine(img,cvPointFrom32f(arrg_old[i]),cvPointFrom32f(arrg_old[i]),CV_RGB(255,0,255),4);
	}
	cvShowImage("testWindow",img);
	cvWaitKey(100);
	cout << "f**k-3" << endl;

	float errors[arr_siz];
	cvCalcOpticalFlowPyrLK(imgA,imgB,
	        			pyr_old, pyr,
	        			arrg_old,
	        			arrg,
	        			arr_siz,
	        			cvSize(win_siz,win_siz),
	        			5,
	        			status,
	        			errors,
	        			cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.3),
	        			0);

	CvPoint2D32f dp, dp2;
	CvPoint2D32f center = cvPoint2D32f(0., 0.);
	bool arr_draw[arr_siz];
	int count = 0;
	for(int i=0; i<arr_siz; i++) {
		cvLine(img,cvPointFrom32f(arrg[i]),cvPointFrom32f(arrg[i]),CV_RGB(0,255,0),4);
		CvScalar color = CV_RGB(255,0,0);
		dp = getDp(arrg[i],arrg_old[i]);
		double len = getLength(dp);
//		if(errors[i]<50) {
		if(getLength(dp)>3) {
			color = CV_RGB(255,0,0);
		} else {
			color = CV_RGB(100,255,100);
		}
		int nc = i+1;
		arr_draw[i] = false;
		if((nc>-1) && (nc<arr_siz) && len>3) {
			dp2=getDp(arrg[nc],arrg_old[nc]);
			if(getLength(dp2)>2) {
				CvPoint2D32f ctmp = getCrossPoint(arrg_old[i],getOrtoVec(dp), arrg_old[nc],getOrtoVec(dp2));
//				cvLine(img,cvPointFrom32f(arrg_old[i]),cvPointFrom32f(ctmp),CV_RGB(0,0,0),1);
//				cvLine(img,cvPointFrom32f(arrg[i]),cvPointFrom32f(ctmp),CV_RGB(0,0,0),1);
				center = getSum(center,ctmp);
				count++;
				arr_draw[i] = true;
			}
		}
		drawArrow(img,arrg_old[i],arrg[i],color,2,15.);
		cout << "status=[" << (int)status[i] << "], error=[" << errors[i] << "]" << endl;
//		cout << "[" << arrg[i].x << "," << arrg[i].y << "]" << endl;

	}
	center=getDiv(center,count);

	cvCircle(img,cvPointFrom32f(center),10,CV_RGB(0,200,0),1);
	double df = 0;
	for(int i=0; i<arr_siz; i++) {
		if(arr_draw[i]) {
			cvLine(img, cvPointFrom32f(center), cvPointFrom32f(arrg_old[i]),CV_RGB(0,0,0),1);
			cvLine(img, cvPointFrom32f(center), cvPointFrom32f(arrg[i]),CV_RGB(0,0,0),1);
			df += 180.0*(getLength(getDel(arrg[i],arrg_old[i])))
			/(CV_PI*getLength(getDel(arrg_old[i],center)));
		}
	}
	CvFont font, fontbg;
	cvInitFont(&font,CV_FONT_HERSHEY_PLAIN, 2, 2, 0.0, 2, CV_AA);
	cvInitFont(&fontbg,CV_FONT_HERSHEY_PLAIN, 2, 2, 0.0, 8, CV_AA);
	char buff[100];
	bzero(buff,sizeof(buff));
	sprintf(buff,"angle=%0.1f degres",(df/count));
	cvPutText(img,buff,cvPoint(10,25),&fontbg,CV_RGB(0,0,0));
	cvPutText(img,buff,cvPoint(10,25),&font,CV_RGB(255,0,0));

/*
	for(int r=0; r<NUMR; r++) {
		for(int f=0; f<NUMF; f++) {
			double pfi = 2*CV_PI*f/NUMF;
			double ro	= rmin + (rmax-rmin)*r/NUMR;
			p1.x = p0.x + ro*cos(pfi);
			p1.y = p0.y + ro*sin(pfi);
			//cvLine(img,cvPointFrom32f(p1),cvPointFrom32f(p1),CV_RGB(0,0,255),2);
			drawArrow(img,p0,p1,CV_RGB(255,0,0));
		}
	}
*/
	cvShowImage("testWindow",img);
	cvWaitKey(0);

	cvDestroyWindow("testWindow");
	cvReleaseImage(&img);
	cout << "Shutdown" << endl;
	return 0;
}