int main(int argc, char **argv)
   {
        ros::init(argc, argv, "nmpt_saliency");
        ros::NodeHandle nh("~");
        nh.getParam("gui",debug_mode);
        if (debug_mode) cvNamedWindow("view");
        cvStartWindowThread();
        image_transport::ImageTransport it(nh);

        image_transport::Subscriber sub = it.subscribe("/camera/image_raw", 1, imageCallback);
        pub = nh.advertise<ros_nmpt_saliency::targets>("/nmpt_saliency_point", 50);

        salientSpot.setTrackerTarget(lqrpt);
        bt.blockRestart(1);

        ros::spin();
   }
Пример #2
0
int main (int argc, char * const argv[]) 
{	
	
	
	string WINDOW_NAME = "Object Tracker"; 
	
	Size patchSize = Size(30,30); 
	
	string GENTLEBOOST_FILE= "data/GenkiSZSLCascade.txt"; 
	
	Size minSearchSize(0,0); 
	Size maxSearchSize(0,0); 
	int numFeaturesToUse = -1;//-1; //3; //-1 means "All"
	int NMSRadius = 15; //15; 
	double patchThresh = -INFINITY;  //-INFINITY means "All", 0 means "p>.5"
	int maxObjects = 20; 
	int skipFrames = 0; 
	int useFast = 1; 
	
	Size imSize(320,240); 
	int key=0;
	
	/* Open capture */ 
	VideoCapture capture; 
	int usingCamera = NMPTUtils::getVideoCaptureFromCommandLineArgs(capture, argc, (const char**) argv); 
	if (!usingCamera--) return 0;  
	
	/* Set capture to desired width/height */ 
	if (usingCamera) {
		capture.set(CV_CAP_PROP_FRAME_WIDTH, imSize.width); 
		capture.set(CV_CAP_PROP_FRAME_HEIGHT, imSize.height); 
	}

	
    namedWindow (WINDOW_NAME, CV_WINDOW_AUTOSIZE); //Create the graphical algorithm display
    Mat current_frame, color_image, gray_image;
	
	BlockTimer bt; 
		
	GentleBoostCascadedClassifier* booster = new GentleBoostCascadedClassifier(); 
	
	ifstream in; 
	in.open(GENTLEBOOST_FILE.c_str()); 
	in >> booster; 
	in.close(); 
	booster->setSearchParams(useFast, minSearchSize, maxSearchSize,1.2, 1, 1); 
	booster->setNumFeaturesUsed(numFeaturesToUse); 
	
	int i = 0; 
    while (key != 'q' && key != 'Q') //Loop until user enters 'q'
    {
		
		//cout<< "Getting camera frame " << endl; 
		capture >> current_frame; 
		if (i++%(skipFrames+1) > 0 && !usingCamera) continue; 
		
		current_frame.copyTo(color_image); 
		cvtColor(current_frame, gray_image, CV_RGB2GRAY); 
				
		vector<SearchResult> boxes; 
		
		bt.blockRestart(2); 
		Mat img = gray_image; 
		booster->searchImage(img, boxes, NMSRadius, patchThresh); 
		cout << "Image Search Time was " << bt.getCurrTime(2)<< endl; 		
		
		if (boxes.size() > (unsigned int) maxObjects) {
			boxes.resize(maxObjects); 
		} 
		
		for (size_t i = 0; i < boxes.size(); i++) {
			Rect imgloc = boxes[i].imageLocation; 
			Point center = Point(imgloc.x + imgloc.width/2.0, imgloc.y + imgloc.height/2.0); 
			Scalar color; 
			if (boxes[i].value > 0 ) 
				color = (i== 0) ? Scalar(0,0,255): Scalar(0,255,255); 
			else color = Scalar(0,0,0); 
			circle(color_image, center, imgloc.width/2.0, color, 3); 
			circle(color_image, center, 2, color, 3); 
		}
		
		imshow(WINDOW_NAME, color_image);
		
		key = cvWaitKey(5);
	} 
	
	return 0;
}
void imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
	double saltime, tottime, degree;
	//	capture >> im2;
   	cv_bridge::CvImagePtr cv_ptr;
	//sensor_msgs::Image salmap_;
   //CvBridge bridge;
   try
   {
        cv_ptr = cv_bridge::toCvCopy(msg, "bgr8");
        im2=cv_ptr->image;
        /* Transforming down the image*/
        double ratio = imSize.width * 1. / im2.cols;
        resize(im2, im, Size(0,0), ratio, ratio, INTER_NEAREST);

        viz.create(im.rows, im.cols*2, CV_32FC3);

        bt.blockRestart(0);
        vector<KeyPoint> pts;
        salTracker.detect(im, pts);
        saltime = bt.getCurrTime(0) ;

        salTracker.getSalImage(sal);

        double min, max;
        Point minloc, maxloc;
        minMaxLoc(sal, &min, &max, &minloc, &maxloc);

        lqrpt[0] = maxloc.x*1.0 / sal.cols;
        lqrpt[1] = maxloc.y*1.0 / sal.rows;

        salientSpot.setTrackerTarget(lqrpt);

        Mat vizRect = viz(Rect(im.cols,0,im.cols, im.rows));
        cvtColor(sal, vizRect, CV_GRAY2BGR);

        vizRect = viz(Rect(0, 0, im.cols, im.rows));
        im.convertTo(vizRect,CV_32F, 1./256.);

        salientSpot.updateTrackerPosition();
        lqrpt = salientSpot.getCurrentPosition();

        /* assign the geometric coordinates of points identified as salient to the var. pt(Point type)*/
        pt.x=lqrpt[0];
        pt.y=lqrpt[1];
        pt.z=0;
        /* call a function to get the degree of saliency of the current point*/
        degree = 0;


        /* for the purpose of visualizing points above degree 7:the most certain
        green for degree >=7 features, yellow for degree <7 and >=4 ...
         */
        double realx = 320*pt.x;
        double realy = 240*pt.y;

        if(degree >= 7) {       circle(vizRect,cvPoint(realx,realy),5,CV_RGB(0,255,0),-1);        }
        else if(degree >= 4) {       circle(vizRect,cvPoint(realx,realy),5,CV_RGB(128,128,0),-1);        }
        else {       circle(vizRect,cvPoint(realx,realy),5,CV_RGB(255,0,0),-1);        }

        //end: discard me

        ros_nmpt_saliency::targets trg;
        trg.positions.push_back(pt);
        trg.degree=degree;
        pub.publish(trg);
        vizRect = viz(Rect(im.cols,0,im.cols, im.rows));
        cvtColor(sal, vizRect, CV_GRAY2BGR);

        tottime = bt.getCurrTime(1);
        bt.blockRestart(1);

        if (debug_mode){
            imshow("view",viz);
            waitKey(1);
        }

   }
   catch (...)
        {
            ROS_ERROR("Could not convert from '%s' to 'bgr8'.", msg->encoding.c_str());
        }
}