示例#1
0
int main(int argc, char *argv[])
{
    char *path = "sequences/cokecan/img";
    char *ext = "png";
    int numLength = 5;
    char numString[numLength+1];
    char filename[255];
    int start = 0;
    int end = 291;
    cv::Point2f initTopLeft(150,82);
    cv::Point2f initBottomDown(170,118);

    CMT cmt;
    //cmt.estimateRotation = false;
    for(int i = start; i <= end; i++)
    {
        num2str(numString, numLength+1, i);
        sprintf(filename, "%s%s.%s", path, numString, ext);

#ifdef DEBUG_MODE
        qDebug() << filename;
#endif

        cv::Mat img = cv::imread(filename);
        cv::Mat im_gray;
        cv::cvtColor(img, im_gray, CV_RGB2GRAY);

        if(i == start)
            cmt.initialise(im_gray, initTopLeft, initBottomDown);
        cmt.processFrame(im_gray);

        for(int i = 0; i<cmt.trackedKeypoints.size(); i++)
            cv::circle(img, cmt.trackedKeypoints[i].first.pt, 3, cv::Scalar(255,255,255));
        cv::line(img, cmt.topLeft, cmt.topRight, cv::Scalar(255,255,255));
        cv::line(img, cmt.topRight, cmt.bottomRight, cv::Scalar(255,255,255));
        cv::line(img, cmt.bottomRight, cmt.bottomLeft, cv::Scalar(255,255,255));
        cv::line(img, cmt.bottomLeft, cmt.topLeft, cv::Scalar(255,255,255));

#ifdef DEBUG_MODE
        qDebug() << "trackedKeypoints";
        for(int i = 0; i<cmt.trackedKeypoints.size(); i++)
            qDebug() << cmt.trackedKeypoints[i].first.pt.x << cmt.trackedKeypoints[i].first.pt.x << cmt.trackedKeypoints[i].second;
        qDebug() << "box";
        qDebug() << cmt.topLeft.x << cmt.topLeft.y;
        qDebug() << cmt.topRight.x << cmt.topRight.y;
        qDebug() << cmt.bottomRight.x << cmt.bottomRight.y;
        qDebug() << cmt.bottomLeft.x << cmt.bottomLeft.y;
#endif

        imshow("frame", img);
        cv::waitKey(1);
    }
    return 0;
}
示例#2
0
    bool updateModule()
    {
        ImageOf<PixelBgr> *img=imgInPort.read();
        if (img==NULL)
            return true;

        mutex.lock();

        ImageOf<PixelMono> imgMono;
        imgMono.resize(img->width(),img->height());

        cv::Mat imgMat=cv::cvarrToMat((IplImage*)img->getIplImage());
        cv::Mat imgMonoMat=cv::cvarrToMat((IplImage*)imgMono.getIplImage());
        cv::cvtColor(imgMat,imgMonoMat,CV_BGR2GRAY);

        if (initBoundingBox)
        {
            tracker->initialise(imgMonoMat,tl,br);
            initBoundingBox=false;
        }

        if (doCMT)
        {
            if (tracker->processFrame(imgMonoMat))
            {
                if (dataOutPort.getOutputCount()>0)
                {
                    Bottle &data=dataOutPort.prepare();
                    data.clear();

                    data.addInt((int)tracker->topLeft.x);
                    data.addInt((int)tracker->topLeft.y);
                    data.addInt((int)tracker->topRight.x);
                    data.addInt((int)tracker->topRight.y);
                    data.addInt((int)tracker->bottomRight.x);
                    data.addInt((int)tracker->bottomRight.y);
                    data.addInt((int)tracker->bottomLeft.x);
                    data.addInt((int)tracker->bottomLeft.y);

                    dataOutPort.write();
                }

                if (imgOutPort.getOutputCount()>0)
                {
                    cv::line(imgMat,tracker->topLeft,tracker->topRight,cv::Scalar(255,0,0),2);
                    cv::line(imgMat,tracker->topRight,tracker->bottomRight,cv::Scalar(255,0,0),2);
                    cv::line(imgMat,tracker->bottomRight,tracker->bottomLeft,cv::Scalar(255,0,0),2);
                    cv::line(imgMat,tracker->bottomLeft,tracker->topLeft,cv::Scalar(255,0,0),2);

                    for (size_t i=0; i<tracker->trackedKeypoints.size(); i++) 
                        cv::circle(imgMat,tracker->trackedKeypoints[i].first.pt,3,cv::Scalar(0,255,0));
                }
            }            
        }

        if (imgOutPort.getOutputCount()>0)
        {
            imgOutPort.prepare()=*img;
            imgOutPort.write();
        }

        mutex.unlock();
        return true;
    }
int main(int argc, char *argv[])
{
   ros::init(argc,argv,"Vision");
   ROS_INFO("node creation");
   ros::NodeHandle nh_v;
   ros::Rate loop_rate(10);

   
    int start = 0;
    bool SelectROI=false;//For bounding box
    cout<<"Do you  want to give the ROI ?"<<endl<<"Press y for YES and n for NO"<<endl;
    char input='n';
    cin>>input;
    if(input!='n')
	SelectROI=true; 

    cout<<"SelectROI : "<<SelectROI<<endl;
	
    CMT cmt;
    
    //Code for normal camera using OpenCV
    /*VideoCapture cap(0);
    cap.set(CV_CAP_PROP_FRAME_WIDTH, 640);
    cap.set(CV_CAP_PROP_FRAME_HEIGHT, 480); 
    bool open=true;
    if (!cap.isOpened())  // if not success, exit program
    {
        cout << "Cannot open the video cam" << endl;
        return -1;
        open=false;
    }
    cvNamedWindow("Initial frame",CV_WINDOW_AUTOSIZE );*/
 

    //Code for using Kinect input using OpenCV
    bool open=true;
    VideoCapture cap;
    cap.open(CV_CAP_OPENNI);
    cap.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_VGA_30HZ );
    if( !cap.isOpened() )
    {
        cout << "Can not open a capture object." << endl;
        return -1;
    }
    cout << " Device Open " << endl;
 
    //capture.set(CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_VGA_30HZ);
    if( !cap.grab() )
    	      {
    	              cout << "Can not grab images." << endl;
    	              return -1;
    	      }
   

    cvNamedWindow("Initial frame",CV_WINDOW_AUTOSIZE ); 
    	
    	
 
    
    int Keyhit=-1;//Used to get Enter input from the keyboard for ROI selection
    
    while(Keyhit==-1){

    	//cap.read(img);//used for normal cameras
	cap.grab();
	cap.retrieve(img,CV_CAP_OPENNI_BGR_IMAGE);
	
	if(SelectROI==true)
		putText(img, "Press enter when the object is in the field of view ", cvPoint(20,30),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
	else {
		putText(img, "Using Default Bounding box at the center of frame ", cvPoint(20,30),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
		putText(img, "Press enter to start tracking ", cvPoint(20,60),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
	
	     }

	imshow("Initial frame",img);
	Keyhit=cv::waitKey(1);


    }

    while(open && ros::ok())
    {

    	//Capturing images from the  Kinect OpenCV
	//cap.read(img);//used for normal cameras
	cap.grab();
	cap.retrieve(img,CV_CAP_OPENNI_BGR_IMAGE);
	cv::cvtColor(img, im_gray, CV_RGB2GRAY);
	//imshow("Initial frame",img);
	//cv::waitKey(0);
	

	while(start==0 && SelectROI==true){

		cout<<"Inside ROI selection"<<endl;
	    //set the callback function for any mouse event
		static CvFont font;
		cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 0.5, 0.5, 0, 1, 8);
		
		//cout<<"Starting CMT initialization"<<endl;
		setMouseCallback("Initial frame", getRect, NULL); // Mouse Callback function with image as param
 			
		putText(img, "Use mouse to select the rectangle around object to track ", cvPoint(20,30),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
		putText(img, "You can reselect till you press Enter ", cvPoint(20,50),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
		putText(img, "After selecting PRESS ENTER", cvPoint(20,70),FONT_HERSHEY_COMPLEX_SMALL, 0.6, cvScalar(255,255,255), 1, CV_AA);
		
		imshow("Initial frame",img);
		cv::waitKey(0);
		start=1;
		cout<<"ROI Selected Manually"<<endl;
		cout<<"Rectangular Edge Points : "<<initBottomDown<<","<<initTopLeft<<endl;
		cout<<"CMT Initialising "<<endl;
		cmt.initialise(im_gray, initTopLeft, initBottomDown);//Using the Initialise Method with predefined bounding box
		setMouseCallback("Initial frame", NULL, NULL);//Disabling the Callback function
			}

	if(start==0 && SelectROI==false	){//If manual ROI is not selected the default case 
		
		initTopLeft.x=270;
		initTopLeft.y=190;	//Default bounding box positions - Frame Center
		initBottomDown.x=370;
		initBottomDown.y=290;

		cout<<"Rectangular Edge Points : "<<initBottomDown<<","<<initTopLeft<<endl;
		cout<<"CMT Initialising "<<endl;
		cmt.initialise(im_gray, initTopLeft, initBottomDown);//Using the Initialise Method with default bounding box
		start=1;
	}


	//cout<<endl<<"Starting CMT frame processing"<<endl;
    	cmt.processFrame(im_gray);//Using the process frame method
	
	cout<<"The Center is : "<<cmt.CENTER<<endl;

	cout<<"The scaling is : "<<cmt.SCALE<<endl;
	
	cout<<"The orientation is :"<<cmt.ROTATION<<endl;
	



    //cout<<"CMT Frame Processing Done"<<endl;

    for(int i = 0; i<cmt.trackedKeypoints.size(); i++)
    	cv::circle(img, cmt.trackedKeypoints[i].first.pt, 3, cv::Scalar(255,255,255));//draw circles around track points
        
    cv::line(img, cmt.topLeft, cmt.topRight, cv::Scalar(255,255,255));
    cv::line(img, cmt.topRight, cmt.bottomRight, cv::Scalar(255,255,255));
    cv::line(img, cmt.bottomRight, cmt.bottomLeft, cv::Scalar(255,255,255));
    cv::line(img, cmt.bottomLeft, cmt.topLeft, cv::Scalar(255,255,255));

    putText(img, "Tracking ", cvPoint(20,30),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(255,255,255), 1, CV_AA);
    imshow("Initial frame", img);//Show the image frame with circles of key points
    cv::waitKey(1);

    }//close for open while loop

   
    return 0;
}