Ejemplo n.º 1
0
//Calculates matching cost and show matching results on the window
float GraphMatch::matchTwoImages(vector<NodeSig> ns1, vector<NodeSig> ns2,
                                 Mat& assignment, Mat& cost)
{
    //Construct [rowsxcols] cost matrix using pairwise
    //distance between node signature
    int rows = ns1.size();
    int cols = ns2.size();

    int** cost_matrix = (int**)calloc(rows,sizeof(int*));
    for(int i = 0; i < rows; i++)
    {
        cost_matrix[i] = (int*)calloc(cols,sizeof(int));
        for(int j = 0; j < cols; j++)
            cost_matrix[i][j] = GraphMatch::calcN2NDistance(ns1[i], ns2[j])*MULTIPLIER_1000;
    }

    hungarian_problem_t p;

    // initialize the hungarian_problem using the cost matrix
    hungarian_init(&p, cost_matrix , rows, cols, HUNGARIAN_MODE_MINIMIZE_COST);

    // solve the assignment problem
    hungarian_solve(&p);

    //Convert results to OpenCV format
    assignment = array2Mat8U(p.assignment,rows,cols);
    cost = array2Mat32F(cost_matrix,rows,cols);
    //Divide for correction
    cost = cost / MULTIPLIER_1000;

    //free variables
    hungarian_free(&p);

    // Calculate match score
    float matching_cost = 0;


    //Get non-zero indices which defines the optimal match(assignment)
    vector<Point> nonzero_locs;
    findNonZero(assignment,nonzero_locs);

    //Find optimal match cost
    for(int i = 0; i < nonzero_locs.size(); i++)
    {
        matching_cost = matching_cost + cost.at<float>(nonzero_locs[i].y, nonzero_locs[i].x);
    }

    return matching_cost;

}
Ejemplo n.º 2
0
/**
 * Create the hough accumulation matrix, and vote for each pair of symmetrical edges
 */
void FastSymmetryDetector::vote( Mat& image, int min_pair_dist, int max_pair_dist ) {
    float min_dist = min_pair_dist * 0.5;
    float max_dist = max_pair_dist * 0.5;
    
    /* Make sure that we reset the accumulation matrix and rotated edges matrix */
    accum    = Scalar::all(0);
    rotEdges = Scalar::all(0);
    
    /* Find all the pixels of the edges */
    vector<Point> temp_edges;
    findNonZero( image, temp_edges );
    
    /* Translate them in relation to center of the image */
    vector<Point2f> edges;
    for( Point point: temp_edges )
        edges.push_back( Point2f( point.x - center.x, point.y - center.y ) );
    
    /* For each degree of rotation */
    for( int t = 0; t < thetaMax; t++ ) {
        float * accum_ptr = accum.ptr<float>(t);
        
        /* Rotate edge to that degree */
        rotateEdges( edges, t );
        
        for( int i = 0; i < rhoDivision; i++ ) {
            float * col_start   = rotEdges.ptr<float>(i);
            float * col_end     = reRows[i];
            
            /* Ignore edges that have smaller number of pairings */
            if( (col_end - col_start) <= 1 ) {
                continue;
            }
            
            /* Vote for Hough matrix */
            for( float * x0 = col_start; x0 != col_end - 1; x0++ ) {
                for( float * x1 = x0 + 1; x1 != col_end; x1++ ) {
                    float dist = fabs( *x1 - *x0 );

                    if( !within(dist, min_dist, max_dist) )
                        break;
                    
                    int rho_index = static_cast<int>(*x0 + *x1);
                    accum_ptr[rho_index]++;
                }
            }
        }
    }
}
Ejemplo n.º 3
0
void Img::boundarize() {
	Mat image_gray, canny_output, nonzeros;
	int thresh = 100;
	vector<vector<Point> > contours;
	vector<Vec4i> hierarchy;
	RNG rng(12345);
	
	cvtColor( this->image, image_gray, CV_BGR2GRAY );
	blur( image_gray, image_gray, Size(3,3) );

	Canny( image_gray, canny_output, thresh, thresh*2, 3 );
	findContours( canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0) );

	this->image =   Mat::zeros( canny_output.size(), CV_8UC3 );

	for( unsigned int i = 0; i< contours.size(); i++ )
	{
	       Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
	       drawContours( this->image, contours, i, color, 2, 8, hierarchy, 0, Point() );
	}

	cvtColor( this->image, this->image, CV_BGR2GRAY );
	threshold( this->image, this->image, 0, 255,0 );
	findNonZero(this->image, nonzeros);
	
	this->points = nonzeros.rows;
	
	delete [] vx;
	delete [] vy;

	std::cout << "\n[+] Initiallizing vectors : "<<points<<" points detected";

	vx = new int[points+1];
	std::cout << "\n[+] Vx initiallized";
	vy = new int[points+1];
	std::cout << "\n[+] Vy Initiallized";
	
	this->sampling();
	
	
}
Ejemplo n.º 4
0
void caasCLR4Tx1::RefineIsolator()
{
	//int width = isolatorRightEdge - isolatorLeftEdge;
	//int height = isolatorBottomEdge - isolatorTopEdge;
	//Expand horizontally 1 time to the right; expand vertically 1/2

	int left = isolatorLeftEdge;
	int right = left + 2 * isolatorWidth; if (right > targetLeftEdge - 20) right = targetLeftEdge - 20;
	int top = isolatorTopEdge - isolatorHeight / 4;
	int bottom = top + isolatorHeight + isolatorHeight / 2;
	Rect roi = Rect(left, top, right - left, bottom - top);
	imageIsolatorRoi = imageGray(roi);
#if _DEBUG
	imwrite("5.1.IsolatorRoi.jpg", imageIsolatorRoi);
#endif

	int scale = 4;
	resize(imageIsolatorRoi, imageIsolatorRoi, Size(imageIsolatorRoi.cols / scale, imageIsolatorRoi.rows / scale));

	Mat imageBlurred, imageGraySharpened;	double GAUSSIAN_RADIUS = 4.0;
	GaussianBlur(imageIsolatorRoi, imageBlurred, Size(0, 0), GAUSSIAN_RADIUS);
	addWeighted(imageIsolatorRoi, 2.5, imageBlurred, -1.5, 0, imageGraySharpened);
#if _DEBUG
	imwrite("5.2.IsolatorRoiSharpened.jpg", imageGraySharpened);
#endif

	int median = Median(imageGraySharpened);
	Mat imageCanny; Canny(imageGraySharpened, imageCanny, 0.66 * median, 1.33 * median);
#if _DEBUG
	imwrite("5.3.IsolatorRoiCanny.jpg", imageCanny);
#endif

	Mat Points;	findNonZero(imageCanny, Points);
	Rect Min_Rect = boundingRect(Points);
	isolatorRightEdge = roi.x + (Min_Rect.x + Min_Rect.width) * scale;
	RotatedRect rect = minAreaRect(Points);
	isolatorAngle = rect.angle;
}
Ejemplo n.º 5
0
void findEyeCenterByColorSegmentation(const Mat& image, Point2f & eyeCoord, float coordinateWeight, int kmeansIterations, int kmeansRepeats, int blurSize)  {
    
    Mat img, gray_img;
    Mat colorpoints, kmeansPoints;
    
    img = equalizeImage(image);
    
    medianBlur(img, img, blurSize);
    cvtColor(image, gray_img, CV_BGR2GRAY);
    gray_img = imcomplement(gray_img);
    vector<Mat> layers(3);
    split(img, layers);
    for (int i = 0 ; i < layers.size(); i++) {
        layers[i] = layers[i].reshape(1,1).t();
    }
    hconcat(layers, colorpoints);
    
    // add coordinates
    colorpoints.convertTo(colorpoints, CV_32FC1);
    Mat coordinates = matrixPointCoordinates(img.rows,img.cols,false) *coordinateWeight;
    hconcat(colorpoints, coordinates, kmeansPoints);
    
    Mat locIndex(img.size().area(),kmeansIterations,CV_32FC1,Scalar::all(-1));
    linspace(0, img.size().area(), 1).copyTo(locIndex.col(0));
    Mat index_img(img.rows,img.cols,CV_32FC1,Scalar::all(0));
    Mat bestLabels, centers, clustered , colorsum , minColorPtIndex;
    for(int it = 1 ; it < kmeansIterations ; it++) {
        if (kmeansPoints.rows < 2) {
            break;
        }
        kmeans(kmeansPoints,2,bestLabels,TermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, kmeansRepeats, 0.001),kmeansRepeats,KMEANS_PP_CENTERS,centers);
        reduce(centers.colRange(0, 3), colorsum, 1, CV_REDUCE_SUM);
        
        if (colorsum.at<float>(0) < colorsum.at<float>(1)) {
            
            findNonZero(bestLabels==0, minColorPtIndex);
        }
        else {
            findNonZero(bestLabels==1, minColorPtIndex);
        }
        
        minColorPtIndex = minColorPtIndex.reshape(1).col(1);
        
        for (int  i = 0; i <minColorPtIndex.rows ; i ++) {
            locIndex.at<float>(i,it) = locIndex.at<float>(minColorPtIndex.at<int>(i),it-1);
        }
        Mat temp;
        for (int  i = 0; i <minColorPtIndex.rows ; i ++) {
            temp.push_back(kmeansPoints.row(minColorPtIndex.at<int>(i)));
        }
        temp.copyTo(kmeansPoints);
        temp.release();
        for (int i = 0 ; i < minColorPtIndex.rows ; i ++) {
            int r, c;
            ind2sub(locIndex.at<float>(i,it), index_img.cols, index_img.rows, r, c);
            index_img.at<float>(r,c) +=1;
        }
    }
    //    imagesc("layered",mat2gray(index_img));
    Mat layerweighted_img = index_img.mul(index_img);
    layerweighted_img = mat2gray(layerweighted_img);
    gray_img.convertTo(gray_img, CV_32FC1,1/255.0);
    Mat composed  = gray_img.mul(layerweighted_img);
    float zoomRatio = 5.0f;
    Mat zoomed;
    imresize(composed, zoomRatio, zoomed);
    Mat score = calculateImageSymmetryScore(zoomed);
//    imagesc("score", score);
    Mat scoresum;
    reduce(score.rowRange(0, zoomed.cols/6), scoresum, 0, CV_REDUCE_SUM,CV_32FC1);
//    plotVectors("scoresum", scoresum.t());
    double minVal , maxVal;
    Point minLoc, maxLoc;
    minMaxLoc(scoresum,&minVal,&maxVal,&minLoc,&maxLoc);
    float initialHC = (float)maxLoc.x/zoomRatio;
    line(zoomed, Point(maxLoc.x,0), Point(maxLoc.x,zoomed.rows-1), Scalar::all(255));
//    imshow("zoomed", zoomed);
    int bestx = 0,bestlayer = 0;
    Mat bestIndex_img = index_img >=1;
    minMaxLoc(index_img,&minVal,&maxVal,&minLoc,&maxLoc);
    for (int i = 1 ; i<=maxVal; i++) {
        Mat indexlayer_img = index_img >=i;
        medianBlur(indexlayer_img, indexlayer_img, 5);
        erode(indexlayer_img, indexlayer_img, blurSize);
        erode(indexlayer_img, indexlayer_img, blurSize);
        indexlayer_img = removeSmallBlobs(indexlayer_img);
        
        indexlayer_img = fillHoleInBinary(indexlayer_img);
        indexlayer_img = fillConvexHulls(indexlayer_img);
        Mat score = calculateImageSymmetryScore(indexlayer_img);
        Mat scoresum;
        reduce(score.rowRange(0, indexlayer_img.cols/6), scoresum, 0, CV_REDUCE_SUM,CV_32FC1);
        minMaxLoc(scoresum,&minVal,&maxVal,&minLoc,&maxLoc);
        if (abs(maxLoc.x - initialHC) < abs(bestx - initialHC)) {
            
            if (sum(indexlayer_img)[0]/255 < indexlayer_img.size().area()/5*2 &&
                sum(indexlayer_img)[0]/255 > indexlayer_img.size().area()/6) {
                bestx = maxLoc.x;
                bestlayer = i;
                bestIndex_img = indexlayer_img.clone();
                
            }
            
        }
    }
    
    Point2f massCenter = findMassCenter_BinaryBiggestBlob(bestIndex_img);
    
    
    eyeCoord =  Point2f(initialHC,massCenter.y);
}
Ejemplo n.º 6
0
//Calculates matching cost and show matching results on the window
float GraphMatch::drawMatches(vector<NodeSig> ns1, vector<NodeSig> ns2,
                              Mat img1, Mat img2)
{
    Mat assignment, cost;
    //Construct [rowsxcols] cost matrix using pairwise
    //distance between node signature
    int rows = ns1.size();
    int cols = ns2.size();

    int** cost_matrix = (int**)calloc(rows,sizeof(int*));
    for(int i = 0; i < rows; i++)
    {
        cost_matrix[i] = (int*)calloc(cols,sizeof(int));
        for(int j = 0; j < cols; j++)
        {
            //Multiplied by constant because hungarian algorithm accept integer defined cost
            //matrix. We later divide by constant for correction
            cost_matrix[i][j] = GraphMatch::calcN2NDistance(ns1[i], ns2[j])*MULTIPLIER_1000;
        }
    }

    hungarian_problem_t p;

    // initialize the hungarian_problem using the cost matrix
    hungarian_init(&p, cost_matrix , rows, cols, HUNGARIAN_MODE_MINIMIZE_COST);

    // solve the assignment problem
    hungarian_solve(&p);

    //Convert results to OpenCV format
    assignment = array2Mat8U(p.assignment,rows,cols);
    cost = array2Mat32F(cost_matrix,rows,cols);
    //Divide for correction
    cost = cost / MULTIPLIER_1000;

    //free variables
    hungarian_free(&p);

    // Calculate match score and
    // show matching results given assignment and cost matrix
    Mat img_merged;
    float matching_cost = 0;

    //Concatenate two images
    hconcat(img1, img2, img_merged);

    //Get non-zero indices which defines the optimal match(assignment)
    vector<Point> nonzero_locs;
    findNonZero(assignment,nonzero_locs);

    //Draw optimal match
    for(int i = 0; i < nonzero_locs.size(); i++)
    {
        Point p1 = ns1[nonzero_locs[i].y].center;
        Point p2 = ns2[nonzero_locs[i].x].center+Point(img1.size().width,0);
        circle(img_merged,p1,MATCH_CIRCLE_RADIUS,MATCH_LINE_COLOR);
        circle(img_merged,p2,MATCH_CIRCLE_RADIUS,MATCH_LINE_COLOR);
        line(img_merged,p1,p2,MATCH_LINE_COLOR,MATCH_LINE_WIDTH);

        float dist = cost.at<float>(nonzero_locs[i].y, nonzero_locs[i].x);

        matching_cost = matching_cost + dist;

        //Draw cost on match image
        stringstream ss;
        ss << dist;
        string cost_str = ss.str();
        Point center_pt((p1.x + p2.x) / 2.0, (p1.y + p2.y) / 2.0);
        //putText(img_merged, cost_str, center_pt, cv::FONT_HERSHEY_SIMPLEX, 1.0, Scalar(255, 0, 0), 1);
    }

    //Show matching results image on the window
    emit showMatchImage(mat2QImage(img_merged));

    return matching_cost;

}
std::vector<int> LightplaneCalibrator::AddLightImage(
    std::vector<cv::Mat>& srcs_light) {

  std::vector<int> return_value;
  std::vector<cv::Point>::iterator begin_100, end_900, itr;
  ims_points_.clear();
  for (int i = 0; i < srcs_light.size(); i++) {
    std::vector<cv::Point> pts; std::vector<cv::Point2f> ptfs;
    cv::Mat im = srcs_light[i], gray;
    if (im.channels() == 3) {
      cvtColor(im, gray, CV_BGR2GRAY);
    }
    else {
      gray = im;
    }
    cam_->UndistorImage(gray, gray);
    medianBlur(gray, gray, 11);
    cv::Mat threshold_out;
    cv::threshold(gray, threshold_out, 25, 255, CV_THRESH_BINARY);
    cv::Mat dialateStructure = cv::getStructuringElement(
      cv::MorphShapes::MORPH_RECT, cv::Size(15, 15));

    dilate(threshold_out, threshold_out, dialateStructure, cv::Point(-1, -1));
#ifdef _DEBUG_JIANG_  // for debug
    cv::namedWindow("threshold_medianBlur_out");
    cv::imshow("threshold_medianBlur_out", threshold_out);
    /*imwrite("threshold_medianBlur_out.bmp",threshold_out);*/
    cv::waitKey(200);
#endif
    std::vector<std::vector<cv::Point> > contours;
    std::vector<cv::Vec4i> hierarchy;
    findContours(threshold_out, contours, hierarchy, CV_RETR_EXTERNAL,
      CV_CHAIN_APPROX_NONE, cv::Point(0, 0));

    /// Draw contours
    cv::Mat drawing = cv::Mat::zeros(threshold_out.size(), CV_8U);
    for (size_t i = 0; i < contours.size(); i++) {
      if (contours[i].size() > 200) {
        //Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
        drawContours(drawing, contours, (int)i, cv::Scalar(255),
          CV_FILLED, 8, hierarchy, 0, cv::Point());

      }
    }
    cv::Mat  thining_output;
    Thinning(drawing, thining_output);
#ifdef _DEBUG_JIANG_
    cv::imshow("thining_output", thining_output);
    cv::waitKey(200);
#endif
    findNonZero(thining_output, pts);
    for (itr = pts.begin(); itr != pts.end(); ++itr) {
      if ((*itr).y <= 100) {
        begin_100 = itr;
      }
      if ((*itr).y <= 900) {
        end_900 = itr;
      }
    }
    ptfs.assign(begin_100, end_900);
    ims_points_.push_back(ptfs);
  }
  //std::ofstream log;
  //log.open("ims_points.txt");
  //for (int i = 0; i < ims_points_.size(); i++) {
  //  for (int j = 0; j < ims_points_[i].size(); j++) {
  //    log << ims_points_[i][j].x << " " << ims_points_[i][j].y << " " << i + 1 
  //        << std::endl;

  //  }
  //  log << std::endl;
  //}
  return return_value;
}