Example #1
0
	bool Epipolar::isValidPair(vector<DMatch>& matches, vector<KeyPoint>& key1, vector<KeyPoint>& key2, Mat& cam, Mat& distor, Mat& ess, Mat& inliersMask, double inlierPercent){
		vector<Point2f>pts1, pts2;
		inliersMask.deallocate();
		Mat rot, trans;
		size_t n = matches.size();
		Utility:: getPointMatches(key1, key2, matches, pts1, pts2);
		undistortPoints(pts1, pts1, cam, distor);
		undistortPoints(pts2, pts2, cam, distor);
		ess = findEssentialMat(pts1, pts2, 1.0, Point(0, 0), RANSAC, 0.999, 1.25, inliersMask);
		int inliers = recoverPose(ess, pts1, pts2, rot, trans, 1.0, Point(0, 0), inliersMask);
		return ((double)inliers / n) > inlierPercent;
	}
Example #2
0
// Callback 
void imgcb(const sensor_msgs::Image::ConstPtr& msg)
{
	oneTwo = ~oneTwo;
	if(oneTwo)
	{
    try
    {
      // Convert ROS images to cv::mat
      cv_bridge::CvImageConstPtr cv_ptr;
      cv_ptr = cv_bridge::toCvShare(msg);
      IplImage *img = new IplImage(cv_ptr->image);
      
      // Erosion dilation factor
      int erosion_size = 0;
      int dilation_size = 0;
      
      Mat elementE;
      Mat elementD;
      
      if(initial == FALSE)
      {
        prevImg =  cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, cv_ptr->image.type());
        currImg =  cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, cv_ptr->image.type());
        
	      nbGreen  = 0;
				nbOrange = 0;
				nbFrames = 1;
        
        initial = TRUE;
      }
      
      // Matrice used
      cv::Mat *prevImgBlur = new cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, cv_ptr->image.type());
      cv::Mat *currImgBlur = new cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, cv_ptr->image.type());
      cv::Mat *diffImg     = new cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, CV_BGR2GRAY);
      cv::Mat *prevImgGrey     = new cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, CV_BGR2GRAY);
      cv::Mat *currImgGrey     = new cv::Mat(cv_ptr->image.rows, cv_ptr->image.cols, CV_BGR2GRAY);
      
      // Blur component
      cv::Point myAnchor =cv::Point(-1,-1);
      cv::Size sizeBlur = Size(10,10);

      cv::imshow("Initial", cv_ptr->image);
      cv::waitKey(1);  // Update screen
      
      /*********************************/
      
      ///////////////////
      //               //
      //    Tracker    //
      //               //
      ///////////////////
      
      // Get current image and update previous one
      currImg.copyTo(prevImg);
      cv_ptr->image.copyTo(currImg);
      
      cvtColor(prevImg, *prevImgGrey, CV_BGR2GRAY );
      cvtColor(currImg, *currImgGrey, CV_BGR2GRAY );
      
      // Filtre "blur" sur les deux images
      blur(*prevImgGrey, *prevImgBlur, sizeBlur, myAnchor, BORDER_DEFAULT );
      blur(*currImgGrey, *currImgBlur, sizeBlur, myAnchor, BORDER_DEFAULT );
      
      // Diff between both images
      absdiff(*prevImgBlur, *currImgBlur, *diffImg);
              
      // Threshold on the diff
      threshold( *diffImg, *diffImg, 3, 255, 0 );
      
      //morphological opening (remove small objects from the foreground)
			erode(*diffImg, *diffImg, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)) );
			dilate( *diffImg, *diffImg, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)) );

			//morphological closing (fill small holes in the foreground)
			dilate( *diffImg, *diffImg, getStructuringElement(MORPH_ELLIPSE, Size(20, 20)) ); 
			erode(*diffImg, *diffImg, getStructuringElement(MORPH_ELLIPSE, Size(20, 20)) );
			
      cv::imshow("diffImg", *diffImg);
      
      vector<vector<Point> > contours;
      vector<Vec4i> hierarchy;
      
      // Find contours
      findContours( *diffImg, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE);//, Point(0, 0) );

      // Approximate contours to polygons + get bounding rects and circles
      vector<vector<Point> > contours_poly( contours.size() );
      vector<Rect> boundRect( contours.size() );
			
      for( int i = 0; i < contours.size(); i++ )
      {
        approxPolyDP( Mat(contours[i]), contours_poly[i], 3, true );
        boundRect[i] = boundingRect( Mat(contours_poly[i]) );
      }
      
      int j = 0;
      vector<Rect> lBoundRect( contours.size() );
      for( int i = 0; i < contours.size(); i++ )
      {
        if( (boundRect[i].area()) > MIN_AREA_RECTANGLE)
        {
          lBoundRect[j] = boundRect[i];
          j++;
        }
      }
			
			// If there is some movement
			if(j != 0)
			{
		    // Draw polygonal contour + bonding rects + circles
		    Mat drawing = Mat::zeros( diffImg->size(), CV_8UC3 );
		    Mat andFilter = Mat::zeros( diffImg->size(), CV_8UC3 );
		    
		    for( int i = 0; i< j; i++ )
		    {
		      //Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
		      rectangle( drawing, lBoundRect[i].tl(), lBoundRect[i].br(), Scalar(255,255,255), CV_FILLED, 8, 0 );
		    }
		    
		    // AND filter on the initial image
		    bitwise_and(drawing, currImg, andFilter, noArray());
		    
		    // Find color
		    bool isGreen =  detect_color(Scalar(70,64,0), Scalar(100,139,255), &andFilter);
		    bool isOrange =  detect_color(Scalar(0,128,0), Scalar(20,200,255), &andFilter);
		    
		    if(isGreen)
		    {
		    	nbGreen += 1;
		    }
		    if(isOrange)
		    {
		    	nbOrange += 1;
		    }
		    
		    // Send data

		    /// Show in a window
		    cv::imshow("final", andFilter);
		    
      	drawing.refcount = 0;
      	drawing.deallocate();
	    }
	    else
	    {
	    	// Find color
		    bool isGreen =  detect_color(Scalar(70,64,0), Scalar(100,139,255), &currImg);
		    bool isOrange =  detect_color(Scalar(0,128,0), Scalar(20,200,255), &currImg);
		    
		    if(isGreen)
		    {
		    	nbGreen += 1;
		    }
		    if(isOrange)
		    {
		    	nbOrange += 1;
		    }
	    }
      
      // Destruct and free memory
      prevImgBlur->refcount = 0;
      prevImgBlur->deallocate();
      currImgBlur->refcount = 0;
      currImgBlur->deallocate();
      diffImg->refcount = 0;
      diffImg->deallocate();
      
      currImgGrey->refcount = 0;
      currImgGrey->deallocate();
      prevImgGrey->refcount = 0;
      prevImgGrey->deallocate();
      
      if(nbFrames > FRAME_PER_SEC)
      {
      	if(nbGreen > FRAME_PER_SEC_DIV_2)
		    {
		    	if(nbOrange > FRAME_PER_SEC_DIV_2)
		    	{
		    		ROS_INFO("Green & Orange");
		    	}
		  		else
		  		{
		  			ROS_INFO("Green");
	  			}
		    }
		    else
		    {
		    	if(nbOrange > FRAME_PER_SEC_DIV_2)
		    	{
		    		ROS_INFO("Orange");
	    		}
    		}
    		
    		nbGreen  = 0;
				nbOrange = 0;
				nbFrames = 0;
      }
      else
      {
      	nbFrames +=1;
      }
        
    } catch (const cv_bridge::Exception& e)
    {
        ROS_ERROR("cv_bridge exception: %s", e.what());
    }
  }
}
Example #3
0
File: main.cpp Project: hangqiu/ZED
int main(int argc, char **argv) {

    if (argc > 2) {
        std::cout << "Only the path of an image can be passed in arg" << std::endl;
        return -1;
    }

    Camera* zed;

    if (argc == 1) zed = new Camera(HD720);
    else zed = new Camera(argv[1]);

    // init computation mode of the zed
    ERRCODE err = zed->init(MODE::QUALITY, -1, true); //need quite a powerful graphic card in QUALITY

    // ERRCODE display
    cout << errcode2str(err) << endl;

    // Quit if an error occurred
    if (err != SUCCESS) {
        delete zed;
        return 1;
    }

    // print on screen the keys that can be used
    bool printHelp = false;
    std::string helpString = "[p] increase distance, [m] decrease distance, [q] quit";

    // get the focale and the baseline of the zed
    float fx = zed->getParameters()->RightCam.fx; // here we work with the right camera
    float baseline = zed->getParameters()->baseline;

    // get width and height of the ZED images
    int width = zed->getImageSize().width;
    int height = zed->getImageSize().height;

    // create and alloc GPU memory for the disparity matrix
    Mat disparityRightGPU;
    disparityRightGPU.data = (unsigned char*) nppiMalloc_32f_C1(width, height, &disparityRightGPU.step);
    disparityRightGPU.setUp(width, height, 1, sl::zed::FLOAT, GPU);

    // create and alloc GPU memory for the depth matrix
    Mat depthRightGPU;
    depthRightGPU.data = (unsigned char*) nppiMalloc_32f_C1(width, height, &depthRightGPU.step);
    depthRightGPU.setUp(width, height, 1, sl::zed::FLOAT, GPU);

    // create and alloc GPU memory for the image matrix
    Mat imageDisplayGPU;
    imageDisplayGPU.data = (unsigned char*) nppiMalloc_8u_C4(width, height, &imageDisplayGPU.step);
    imageDisplayGPU.setUp(width, height, 4, sl::zed::UCHAR, GPU);

    // create a CPU image for display purpose
    cv::Mat imageDisplay(height, width, CV_8UC4);

    float depthMax = 6.; //Meter
    bool depthMaxAsChanged = true;

    char key = ' ';

    // launch a loop
    bool run = true;
    while (run) {

        // Grab the current images and compute the disparity
        bool res = zed->grab(RAW, 0, 1);

        // get the right image
        // !! WARNING !! this is not a copy, here we work with the data allocated by the zed object
        // this can be done ONLY if we call ONE time this methode before the next grab, make a copy if you want to get multiple IMAGE
        Mat imageRightGPU = zed->getView_gpu(STEREO_RIGHT);

        // get the disparity
        // !! WARNING !! this is not a copy, here we work with the data allocated by the zed object
        // this can be done ONLY if we call ONE time this methode before the next grab, make a copy if you want to get multiple MEASURE
        Mat disparityGPU = zed->retrieveMeasure_gpu(DISPARITY);

        //  Call the cuda function that convert the disparity from left to right
        cuConvertDisparityLeft2Right(disparityGPU, disparityRightGPU);

        // Call the cuda function that convert disparity to depth
        cuConvertDisparity2Depth(disparityRightGPU, depthRightGPU, fx, baseline);

        // Call the cuda function that convert depth to color and merge it with the current right image
        cuOverlayImageAndDepth(depthRightGPU, imageRightGPU, imageDisplayGPU, depthMax);

        // Copy the processed image frome the GPU to the CPU for display
        cudaMemcpy2D((uchar*) imageDisplay.data, imageDisplay.step, (Npp8u*) imageDisplayGPU.data, imageDisplayGPU.step, imageDisplayGPU.getWidthByte(), imageDisplayGPU.height, cudaMemcpyDeviceToHost);

        if (printHelp) // write help text on the image if needed
            cv::putText(imageDisplay, helpString, cv::Point(20, 20), CV_FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(111, 111, 111, 255), 2);

        // display the result
        cv::imshow("Image right Overlay", imageDisplay);
        key = cv::waitKey(20);

        switch (key)// handle the pressed key
        {
            case 'q': // close the program
            case 'Q':
                run = false;
                break;

            case 'p': // increase the distance threshold
            case 'P':
                depthMax += 1;
                depthMaxAsChanged = true;
                break;

            case 'm': // decrease the distance threshold
            case 'M':
                depthMax = (depthMax > 1 ? depthMax - 1 : 1);
                depthMaxAsChanged = true;
                break;

            case 'h': // print help
            case 'H':
                printHelp = !printHelp;
                cout << helpString << endl;
                break;
            default:
                break;
        }

        if (depthMaxAsChanged) {
            cout << "New distance max " << depthMax << "m" << endl;
            depthMaxAsChanged = false;
        }
    }

    // free all the allocated memory before quit
    imageDisplay.release();
    disparityRightGPU.deallocate();
    depthRightGPU.deallocate();
    imageDisplayGPU.deallocate();
    delete zed;

    return 0;
}