예제 #1
0
int TrainingSet::getTrainingSet(cv::vector<cv::Mat> &images, cv::vector<int> &labels, cv::vector<QString> &names){
  QStringList directoriesList = getAvailableFaces();

  labels.clear();
  images.clear();
  names.clear();
  for(int i=0; i<directoriesList.size(); i++){
    QString person = directoriesList.at(i);
    QDir faceDirectory;
    faceDirectory.setPath(mainDirectory.path() + "/" + person);
    QStringList photosList = faceDirectory.entryList(QDir::Files);
    foreach(QString photo, photosList){
      images.push_back(cv::imread(faceDirectory.path().toStdString() + "/" + photo.toStdString(), CV_LOAD_IMAGE_GRAYSCALE));
      labels.push_back(i);
    }
    names.push_back(person);
  }
예제 #2
0
void setWorldPoints(cv::vector<cv::vector<cv::Point3f> > &worldPoints, 
		    const cv::Size patternSize,
		    const int &fileNum){
  worldPoints.clear();
  worldPoints.resize(fileNum);
  for(int i = 0; i < fileNum; i++ )
    {
      for(int j = 0; j < patternSize.height; j++ )
	for(int k = 0; k < patternSize.width; k++ )
	  worldPoints[i].push_back(cv::Point3f(k*g_squareSize, j*g_squareSize, 0));
    }
}
예제 #3
0
void loadImages(cv::vector<cv::Mat> &rgb, 
		cv::vector<cv::Mat> &depth, 
		const int &fileNum){
  rgb.clear();
  depth.clear();
  
  for(int i = 0; i < fileNum; ++i){
    stringstream rgbfilename, depthfilename;
    rgbfilename << FLAGS_folder <<"/" << FLAGS_color << i << FLAGS_type;
    depthfilename << FLAGS_folder << "/" << FLAGS_depth << i << FLAGS_type;
    
    cout << "loading : " << rgbfilename.str() << " and " << depthfilename.str() << endl;

    // load RGB image
    cv::Mat tempRGB = cv::imread(rgbfilename.str(), 0);
    rgb.push_back(tempRGB);


    // load depth image
    cv::Mat tempDepth = cv::imread(depthfilename.str(), CV_LOAD_IMAGE_ANYDEPTH);
    tempDepth.convertTo(tempDepth, CV_8U, 255.0/1000.0);
    // cv::Mat maxDist = cv::Mat::ones(tempDepth.rows, tempDepth.cols, CV_8U) * MAX_DEPTH;
    // cv::Mat minDist = cv::Mat::ones(tempDepth.rows, tempDepth.cols, CV_8U) * MIN_DEPTH;
    // cv::min(tempDepth, maxDist, tempDepth);
    // tempDepth -= minDist;
    cv::resize(tempDepth, tempDepth, cv::Size(), 2.0,2.0);
    cv::Mat roiTempDepth;

    cv::resize(tempDepth(cv::Rect(40, 43,498,498 / 4 * 3)), roiTempDepth, cv::Size(640, 480));

    depth.push_back(roiTempDepth);

    std::cout << "loaded" << std::endl;

    cv::imshow("rgb",rgb[i]);
    cv::imshow("depth",depth[i]);
    cv::waitKey(100);
  }

}
void loadImages(cv::vector<cv::Mat> &lefts, cv::vector<cv::Mat> &rights,
                const int &fileNum) {
    cv::namedWindow("Left", CV_WINDOW_AUTOSIZE | CV_WINDOW_FREERATIO);
    cv::namedWindow("Right", CV_WINDOW_AUTOSIZE | CV_WINDOW_FREERATIO);

    lefts.clear();
    rights.clear();

    for (int i = 0; i < fileNum; i++) {
        std::stringstream lfile, rfile;
        lfile << FLAGS_dir << "/left_" << i << FLAGS_suffix;
        rfile << FLAGS_dir << "/right_" << i << FLAGS_suffix;
        std::cout << "load: " << lfile.str() << ", " << rfile.str() << std::endl;

        cv::Mat left = cv::imread(lfile.str(), 0);
        cv::Mat right = cv::imread(rfile.str(), 0);
        lefts.push_back(left);
        rights.push_back(right);

        cv::imshow("Left", left);
        cv::imshow("Right", right);
        cv::waitKey(100);
    }
}
예제 #5
0
void SiftGPUWrapper::detect(const cv::Mat& image, cv::vector<cv::KeyPoint>& keypoints, std::vector<float>& descriptors, const Mat& mask) const {
    if (error) {
        keypoints.clear();
        ROS_FATAL("SiftGPU cannot be used. Detection of keypoints failed");
    }

    //get image
    cvMatToSiftGPU(image, data);

    int num_features = 0;
    SiftGPU::SiftKeypoint* keys = 0;

    ROS_DEBUG("SIFTGPU: cols: %d, rows: %d", image.cols, image.rows);
    if (siftgpu->RunSIFT(image.cols, image.rows, data, GL_LUMINANCE, GL_UNSIGNED_BYTE)) {
        num_features = siftgpu->GetFeatureNum();
        ROS_INFO("Number of features found: %i", num_features);
        keys = new SiftGPU::SiftKeypoint[num_features];
        descriptors.resize(128 * num_features);
        //descriptors = new float[128 * num_features];
        siftgpu->GetFeatureVector(&keys[0], &descriptors[0]);
    } else {
        ROS_WARN("SIFTGPU->RunSIFT() failed!");
    }

    //copy to opencv structure
    keypoints.clear();
    for (int i = 0; i < num_features; ++i) {
        KeyPoint key(keys[i].x, keys[i].y, keys[i].s, keys[i].o);
        keypoints.push_back(key);
    }

    //	FILE *fp = fopen("bla.pgm", "w");
    //	WritePGM(fp, data, image.cols, image.rows);
    //	fclose(fp);

}
예제 #6
0
void paperRegistration::detectFigures(cv::vector<cv::vector<cv::Point>>& squares, cv::vector<cv::vector<cv::Point>>& triangles,
	float minLength, float maxLength, int tresh_binary)
{
	if (currentDeviceImg.empty())
		return;
	
	//cv::Mat image = currentDeviceImg;
	//cv::Mat image = cv::imread("C:/Users/sophie/Desktop/meinz.png", CV_LOAD_IMAGE_GRAYSCALE);// cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE);  
	//resize(image, image, cv::Size(500,700));

	squares.clear();  
	triangles.clear();	
	
	cv::Mat gray;
	cv::Mat element = getStructuringElement(cv::MORPH_RECT, cv::Size(7,7));
	cv::vector<cv::vector<cv::Point> > contours;

	//compute binary image
	//use dilatation and erosion to improve edges
	threshold(currentDeviceImg, gray, tresh_binary, 255, cv::THRESH_BINARY_INV);	
	dilate(gray, gray, element, cv::Point(-1,-1));
	erode(gray, gray, element, cv::Point(-1,-1));
	
	// find contours and store them all as a list
	cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
	
	//test each contour
	cv::vector<cv::Point> approx;
	cv::vector<cv::vector<cv::Point> >::iterator iterEnd = contours.end();
	for(cv::vector<cv::vector<cv::Point> >::iterator iter = contours.begin(); iter != iterEnd; ++iter)
	{
		// approximate contour with accuracy proportional
		// to the contour perimeter
		cv::approxPolyDP(*iter, approx, arcLength(*iter, true)*0.03, true);
	     
		//contours should be convex
		if (isContourConvex(approx))
		{
			// square contours should have 4 vertices after approximation and 
			// relatively large length (to filter out noisy contours)
			if( approx.size() == 4)
			{
				bool rectangular = true;	 
				for( int j = 3; j < 6; j++ )
				{
					// if cosines of all angles are small
					// (all angles are ~90 degree) then write
					// vertices to result
					 if (fabs(90 - fabs(computeAngle(approx[j%4], approx[j-3], approx[j-2]))) > 7)
					 {
						rectangular = false;
						break;
					 }
				}
				
				if (!rectangular)
					continue;
				
				float side1 = computeLength(approx[0], approx[1]);
				float side2 = computeLength(approx[1], approx[2]);
					
				if (side1 > minLength && side1 < maxLength && 
					side2 > minLength && side2 < maxLength)
					squares.push_back(approx);
			}		
			// triangle contours should have 3 vertices after approximation and 
			// relatively large length (to filter out noisy contours)
			else if ( approx.size() == 3)
			{
				float side1 = computeLength(approx[0], approx[1]);
				float side2 = computeLength(approx[1], approx[2]);
				float side3 = computeLength(approx[2], approx[0]);
				
				if (side1 > minLength && side1 < maxLength && 
					side2 > minLength && side2 < maxLength &&
					side3 > minLength && side3 < maxLength)
					triangles.push_back(approx);
			}
		}
	}
}