Example #1
0
//L2-normalise the norm of the histogram
Mat normalise(const vector<vector<int> > &hist)
{
	Mat histb;
	for (unsigned i=0; i<hist.size(); i++)
	{
		histb.push_back(float(hist[i].size()));
	}
	Mat nhist;
	normalize(histb, nhist);
	return nhist;
}
Example #2
0
Mat AAM::sampleMat(Mat mat, int x)
{
    Mat result;
    Mat converted;
    mat.convertTo(converted, CV_32F);
    for(int i=0; i<mat.cols/x; i++)
    {

        result.push_back(converted.at<float>(0, i*x));
    }
    return result.t();
}
Example #3
0
  void CharsIdentify::classifyChinese(std::vector<CCharacter>& charVec){
    size_t charVecSize = charVec.size();

    if (charVecSize == 0)
      return;

    Mat featureRows;
    for (size_t index = 0; index < charVecSize; index++) {
      Mat charInput = charVec[index].getCharacterMat();
      Mat feature = charFeatures(charInput, kChineseSize);
      featureRows.push_back(feature);
    }

    cv::Mat output(charVecSize, kChineseNumber, CV_32FC1);
    annChinese_->predict(featureRows, output);

    for (size_t output_index = 0; output_index < charVecSize; output_index++) {
      CCharacter& character = charVec[output_index];
      Mat output_row = output.row(output_index);
      bool isChinese = true;

      float maxVal = -2;
      int result = -1;

      for (int j = 0; j < kChineseNumber; j++) {
        float val = output_row.at<float>(j);
        //std::cout << "j:" << j << "val:" << val << std::endl;
        if (val > maxVal) {
          maxVal = val;
          result = j;
        }
      }

      // no match
      if (-1 == result) {
        result = 0;
        maxVal = 0;
        isChinese = false;
      }

      auto index = result + kCharsTotalNumber - kChineseNumber;
      const char* key = kChars[index];
      std::string s = key;
      std::string province = kv_->get(s);

      /*std::cout << "result:" << result << std::endl;
      std::cout << "maxVal:" << maxVal << std::endl;*/

      character.setCharacterScore(maxVal);
      character.setCharacterStr(province);
      character.setIsChinese(isChinese);
    }
  }
Example #4
0
void find_decision_boundary_EM()
{
    img.copyTo( imgDst );

    Mat trainSamples, trainClasses;
    prepare_train_data( trainSamples, trainClasses );

    vector<cv::EM> em_models(classColors.size());

    CV_Assert((int)trainClasses.total() == trainSamples.rows);
    CV_Assert((int)trainClasses.type() == CV_32SC1);

    for(size_t modelIndex = 0; modelIndex < em_models.size(); modelIndex++)
    {
        const int componentCount = 3;
        em_models[modelIndex] = EM(componentCount, cv::EM::COV_MAT_DIAGONAL);

        Mat modelSamples;
        for(int sampleIndex = 0; sampleIndex < trainSamples.rows; sampleIndex++)
        {
            if(trainClasses.at<int>(sampleIndex) == (int)modelIndex)
                modelSamples.push_back(trainSamples.row(sampleIndex));
        }

        // learn models
        if(!modelSamples.empty())
            em_models[modelIndex].train(modelSamples);
    }

    // classify coordinate plane points using the bayes classifier, i.e.
    // y(x) = arg max_i=1_modelsCount likelihoods_i(x)
    Mat testSample(1, 2, CV_32FC1 );
    for( int y = 0; y < img.rows; y += testStep )
    {
        for( int x = 0; x < img.cols; x += testStep )
        {
            testSample.at<float>(0) = (float)x;
            testSample.at<float>(1) = (float)y;

            Mat logLikelihoods(1, em_models.size(), CV_64FC1, Scalar(-DBL_MAX));
            for(size_t modelIndex = 0; modelIndex < em_models.size(); modelIndex++)
            {
                if(em_models[modelIndex].isTrained())
                    logLikelihoods.at<double>(modelIndex) = em_models[modelIndex].predict(testSample)[0];
            }
            Point maxLoc;
            minMaxLoc(logLikelihoods, 0, 0, 0, &maxLoc);

            int response = maxLoc.x;
            circle( imgDst, Point(x,y), 2, classColors[response], 1 );
        }
    }
}
Example #5
0
    int train(const String &imgdir)
    {
        string last_n("");
        int label(-1);

        Mat features;
        Mat labels;

        vector<String> vec;
        glob(imgdir,vec,true);
        if ( vec.empty())
            return 0;

        for (size_t i=0; i<vec.size(); i++)
        {
            // extract name from filepath:
            String v = vec[i];
            int r1 = v.find_last_of(SEP);
            String v2 = v.substr(0,r1);
            int r2 = v2.find_last_of(SEP);
            String n = v2.substr(r2+1);
            if (n!=last_n)
            {
                last_n=n;
                label++;
            }
            persons[label] = n;

            // process img & add to trainset:
            Mat img=imread(vec[i],0);

            Mat feature;
            extractor->extract(pre.process(img), feature);
            if (!filter.empty())
                filter->filter(feature.reshape(1,1), feature);
            features.push_back(feature);
            labels.push_back(label);
        }
        return classifier->train(features, labels);
    }
Example #6
0
Mat BOWMSCTrainer::cluster(const Mat& _descriptors) const {

    CV_Assert(!_descriptors.empty());

    // TODO: sort the descriptors before clustering.


    Mat icovar = Mat::eye(_descriptors.cols,_descriptors.cols,_descriptors.type());

    std::vector<Mat> initialCentres;
    initialCentres.push_back(_descriptors.row(0));
    for (int i = 1; i < _descriptors.rows; i++) {
        double minDist = DBL_MAX;
        for (size_t j = 0; j < initialCentres.size(); j++) {
            minDist = std::min(minDist,
                cv::Mahalanobis(_descriptors.row(i),initialCentres[j],
                icovar));
        }
        if (minDist > clusterSize)
            initialCentres.push_back(_descriptors.row(i));
    }

    std::vector<std::list<cv::Mat> > clusters;
    clusters.resize(initialCentres.size());
    for (int i = 0; i < _descriptors.rows; i++) {
        int index = 0; double dist = 0, minDist = DBL_MAX;
        for (size_t j = 0; j < initialCentres.size(); j++) {
            dist = cv::Mahalanobis(_descriptors.row(i),initialCentres[j],icovar);
            if (dist < minDist) {
                minDist = dist;
                index = (int)j;
            }
        }
        clusters[index].push_back(_descriptors.row(i));
    }

    // TODO: throw away small clusters.

    Mat vocabulary;
    Mat centre = Mat::zeros(1,_descriptors.cols,_descriptors.type());
    for (size_t i = 0; i < clusters.size(); i++) {
        centre.setTo(0);
        for (std::list<cv::Mat>::iterator Ci = clusters[i].begin(); Ci != clusters[i].end(); Ci++) {
            centre += *Ci;
        }
        centre /= (double)clusters[i].size();
        vocabulary.push_back(centre);
    }

    return vocabulary;
}
Example #7
0
  Mat read_csv_double(const string&filename,char separator)
  {
    Mat m;
    cout << "++read_csv_double: " << filename << endl;
    // check cache
    {
      ifstream cache(filename + ".bin");
      if(false && cache.good())
      {
	cout << "getting from bin cache" << endl;
	assert(readFileToMat(m, filename + ".bin") == 0);
	return m;
      }
    }
    
    ifstream ifs(filename);
    if(not ifs)
    {
      cout << "error: failed to open " << filename << endl;
      assert(false);
    }

    while(ifs)
    {
      string line;
      std::getline(ifs,line);
      istringstream iss(line);
      vector<double> line_values;
      while(iss)
      {
	double value;
	string svalue;
	getline(iss,svalue,separator);
	value = fromString<double>(svalue);
	line_values.push_back(value);
      } // read all numbers from line
      //cout << "[";
      //for(auto && vec : line_values)
      //cout << "(" << vec << ")";
      //cout << "]" << endl;
      Mat row = Mat(line_values).t();
      if(m.cols == 0 or row.cols == m.cols)
	m.push_back(row);
      else
	break;
    }        

    assert(writeMatToFile(m, filename + ".bin") == 0);
    cout << "--read_csv_double: " << m.size() << endl;
    return m;
  }
Example #8
0
void
read(Mat &train, Mat &test) {

    QDir dir(QString("db/"));
    QStringList files = dir.entryList({"*.png"}, QDir::Files);

    if(files.empty()){
        cout << "empty db" << endl;
        exit(1);
    }

    // 15 subjektov
    for(int i = 0; i < 15; i++) {
        vector<Mat> v;
        v.reserve(11);
        // 11 tvari
        for(int j = 0; j < 11; j++){
            int k = (i*11) + j;
            Mat img = imread(QString("db/" + files[k]).toStdString(), 0);
            img = img.reshape(1, 1);

            Mat m;
            img.convertTo(m, CV_32F);
            v.push_back(m);
        }
        random_shuffle(v.begin(), v.end());

        while(v.size() > 0){
            Mat m = v.back();
            if(v.size() <= 3) {
                test.push_back(m);
            }else{
                train.push_back(m);
            }
            v.pop_back();
        }
    }
}
Example #9
0
Mat AAM::countVariationMatrix(Mat matrix, Mat mean)
{
    matrix.convertTo(matrix, mean.type());
    Mat variationMatrix;
    for (int i=0; i<matrix.rows; i++)
    {
        Mat row=matrix.row(i) - mean;
        //cout<<"mean: "<<mean<<endl;
        //cout<<"row:  "<<matrixCopy.row(i)<<endl;
        //cout<<"resul:"<<row<<endl;
        variationMatrix.push_back(row);
    }
    return variationMatrix;
}
Example #10
0
Mat EkfSlam::step(Mat u)
{
	Mat observations;
	Mat n = (Mat_<double>(2,1) << 0, 0);
	Mat v = (Mat_<double>(2,1) << 0, 0);
	Mat Y;

	// motion prediction updates covariance and state vector
	move(mState(Rect(0,0,1,3)), u, n);

	// landmark correction
	for(int i=0; i < (mState.rows-3)/2; i++) {
		v.at<double>(0) = s.at<double>(0)*r->gaussian(.5);
		v.at<double>(1) = s.at<double>(1)*r->gaussian(.5);

		Mat yi = scan(i);
		if(yi.rows) {
			observations.push_back(yi);
			yi+=v;
			correct(scan(i), v, i);
		}
	}

	// initialize any new landmarks discovered
	for(int i=(mState.rows-3)/2;;i++) { 
		Mat f = scan(i);
		if(f.rows == 2) {
			addFeature(f,i);
			observations.push_back(f);
		}
		else
			break;
	}

	// return raw sensor data for simple navigation tasks
	return observations;
}
Example #11
0
/* Bag the words
* imagesDirectory: folder of the images
* descriptorsDirectory: directory of the descriptors to bag
* vocFileName: path to the file of the vocabulary
* histsFileName: path to the outputFile of the bags of words
*/
void bag(char* imagesDirectory, string descriptorsDirectory, string vocFileName, string histsFileName)
{
	DIR *dir;
	dir = opendir(imagesDirectory);
	struct dirent *ent;
	
	//The histograms
	Mat hists;
	
	//BOW
	DescriptorExtractor* de = new ORB(1000);
	Ptr<DescriptorExtractor> descex(de);
	Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
	BOWImgDescriptorExtractor bow(descex, matcher);
	
	//ORB vocabulary
	Mat vocabulary;
	FileStorage voc(vocFileName, FileStorage::READ);
	voc["centers"] >> vocabulary;
	voc.release();
	bow.setVocabulary(vocabulary);
	cout << vocabulary.rows << endl;
	cin.ignore();
	//ORB
	OrbDescriptorExtractor ORBde(1000);
	while ((ent = readdir (dir)) != NULL)
	{
		if(ent->d_name[0] == '.') //FIX ME (code to avoid "." and "..")
			continue;
		cout << ent->d_name << endl;
		Mat input = imread(string(imagesDirectory)+(ent->d_name), CV_LOAD_IMAGE_GRAYSCALE);
		vector<vector<int> > words;
		vector<KeyPoint> keypoints;
		ORBde(input, Mat(), keypoints);
		bow.compute(input, keypoints, Mat(), &words);
		cout << words.size() << endl;
		Mat words2;
		transpose(Mat(normalise(words)), words2);
		hists.push_back(words2);
		cout << hists.rows << " " << hists.cols << endl;
	}
	SparseMat sparsehists(hists);
	FileStorage fs(histsFileName, FileStorage::WRITE);
	fs << "hists" << sparsehists;
	fs.release();
	cout << "with clustering" << endl;
	cin.ignore();
	return;
}
Example #12
0
void lbp_xy(const Mat_<uchar> &img, Mat &hist, const Rect &rec) {
    Mat_<float> H(1,59, 0.0f);
    const int m = 1;
    for (int c=rec.x+m; c<rec.x+rec.width-m; c++) {
        for (int r=rec.y+m; r<rec.y+rec.height-m; r++) {
            uchar v = 0;
            uchar cen = img(r,c);
            for (int o=0; o<8; o++)
                v |= (img(r + off_9[o*2+1], c + off_9[o*2]) > cen) << o;
            H(uniform[v])++;
        }
    }
    normalize(H,H);
    hist.push_back(H);
}
Example #13
0
void lbp_xz(const Sequence &seq, Mat &hist, const Rect &rec) {
    Mat_<float> H(1,59, 0.0f);
    const int m = 1, fixed = seq[0].rows / 2;
    for (int c=rec.x+m; c<rec.x+rec.width-m; c++) {
        for (int z=m; z<int(seq.size())-m; z++) {
            uchar v = 0;
            uchar cen = seq[z](fixed,c);
            for (int o=0; o<8; o++)
                v |= (seq[z + off_9[o*2+1]](fixed, c + off_9[o*2]) > cen) << o;
            H(uniform[v])++;
        }
    }
    normalize(H,H);
    hist.push_back(H);
}
Example #14
0
void lbp_yz(const Sequence &seq, Mat &hist, const Rect &rec) {
    Mat_<float> H(1,59,0.0f);
    const int m = 1, fixed = seq[0].cols / 2;
    for (int r=rec.y+m; r<rec.y+rec.height-m; r++) {
        for (int z=m; z<int(seq.size())-m; z++) {
            uchar v = 0;
            uchar cen = seq[z](r,fixed);
            for (int o=0; o<8; o++)
                v |= (seq[z + off_9[o*2]](r + off_9[o*2+1], fixed) > cen) << o;
            H(uniform[v])++;
        }
    }
    normalize(H,H);
    hist.push_back(H);
}
Example #15
0
int main()
{
	Mat A;
	Mat row = Mat::ones(1, 3, CV_32F);
	A.push_back(row);
	cout << "A.size()=" << A.size() << endl;
	cout << "A.type()=" << A.type() << endl;
	cout << "A=" << A << endl;

	A.push_back((Mat)Mat::zeros(1, 3, CV_32F));
	A.push_back((Mat)Mat::ones(1, 3, CV_32F));
	A.push_back((Mat)Mat(Vec3f(10.0f, 20.0f, 30.0f)).t());
	cout << "A=" << A.size() << "=" << A << endl;

	A.pop_back();
	cout << "A=" << A.size() << "=" << A << endl;
	A.pop_back(2);
	cout << "A=" << A.size() << "=" << A << endl;

	Mat B;
	B.push_back(10);
	B.push_back(20);
	B.push_back(30);
	B.push_back(40);
	cout << "B.size()=" << B.size() << endl;
	cout << "B.type()=" << B.type() << endl;
	cout << "B=" << B << endl;

	Mat C;
	C.push_back(10.0f);
	C.push_back(20.0f);
	C.push_back(30.0f);
	C.push_back(40.0f);
	cout << "C.size()=" << C.size() << endl;
	cout << "C.type()=" << C.type() << endl;
	cout << "C=" << C << endl;

	C.pop_back();
	cout << "C=" << C.size() << "=" << C << endl;
	C.pop_back(2);
	cout << "C=" << C.size() << "=" << C << endl;


	return 0;
}
Example #16
0
 virtual int addTraining(const Mat & img, int label)
 {
     Mat feat = extract(img);
     if (! fil.empty())
     {
         fil->filter(feat,feat);
     }
     if ( features.empty() )
     {
         features = Mat(nimg, feat.total(), feat.type());
     }
     feat.copyTo(features.row(labels.rows));
     labels.push_back(label);
     cerr << feat.cols << " i_" << labels.rows << "\r";
     return labels.rows;
 }
void writeImages(const vector<Mat> &images, const string &path, int testIdx, int translationIdx, int rotationIdx, const string &postfix)
{
  stringstream name;
  name << path << "/" << testIdx << "_" << translationIdx << "_" << rotationIdx << "_" << postfix << ".png" ;

  Mat commonImage;
  commonImage = images[0].t();
  for(size_t i=1; i<images.size(); i++)
  {
    Mat transposedImage = images[i].t();
    commonImage.push_back(transposedImage);
  }
  commonImage = commonImage.t();

  imwrite(name.str(), commonImage);
}
	void train(cv::Mat parameters, cv::Mat initialisations, cv::Mat templates, ProjectionFunction projection, OnTrainingEpochCallback onTrainingEpochCallback)
	{
		using cv::Mat;
		Mat currentX = initialisations;
		for (size_t regressorLevel = 0; regressorLevel < regressors.size(); ++regressorLevel) {
			// 1) Project current parameters x to feature space:
			// Enqueue all tasks in a thread pool:
			auto concurentThreadsSupported = std::thread::hardware_concurrency();
			if (concurentThreadsSupported == 0) {
				concurentThreadsSupported = 4;
			}
			utils::ThreadPool threadPool(concurentThreadsSupported);
			std::vector<std::future<typename std::result_of<ProjectionFunction(Mat, size_t, int)>::type>> results; // will be float or Mat. I might remove float for the sake of code clarity, as it's only useful for very simple examples.
			results.reserve(currentX.rows);
			for (int sampleIndex = 0; sampleIndex < currentX.rows; ++sampleIndex) {
				results.emplace_back(
					threadPool.enqueue(projection, currentX.row(sampleIndex), regressorLevel, sampleIndex)
				);
			}
			// Gather the results from all threads and store the features:
			Mat features;
			for (auto&& result : results) {
				features.push_back(result.get());
			}
			// Set the observed values, depending on if a template y is used:
			Mat observedValues;
			if (templates.empty()) { // unknown template training case
				observedValues = features;
			}
			else { // known template
				observedValues = features - templates;
			}
			Mat b = currentX - parameters; // currentX - x;
			// 2) Learn using that data:
			regressors[regressorLevel].learn(observedValues, b);
			// 3) Apply the learned regressor and use the predictions to learn the next regressor in next loop iteration:
			Mat x_k; // x_k = currentX - R * (h(currentX) - y):
			for (int sampleIndex = 0; sampleIndex < currentX.rows; ++sampleIndex) {
				// No need to re-extract the features, we already did so in step 1)
				x_k.push_back(Mat(currentX.row(sampleIndex) - regressors[regressorLevel].predict(observedValues.row(sampleIndex))));
			}
			currentX = x_k;
			onTrainingEpochCallback(currentX);
		}
	};
//ratio must <= 0.5
void getEdgeKeypoint(int w, int h, double ratio,const vector<KeyPoint>& kp,const Mat& des, vector<KeyPoint>& kpEdge, Mat& desEdge)
{
	kpEdge.clear();
	desEdge.release();

	Mat mask = Mat::zeros(h, w, CV_8U);
	Rect roi(w*ratio, h*ratio, w*(1.0f - 2.0f*ratio), h*(1.0f - 2.0f*ratio));
	mask(roi) = 1;
	for (int i = 0; i < kp.size(); ++i)
	{
		if (mask.at<uchar>(kp[i].pt) == 0)
		{
			kpEdge.push_back(kp[i]);
			desEdge.push_back(des.row(i));
		}
	}
	return;
}
Example #20
0
void LBP::Procura(Mat &Query)
{
    Mat ROI(Size(WIDTH,HEIGHT),CV_32FC1, Scalar::all(0));
    Mat LBP;
    Point roi;  // Armazena as coordenadas das Features
    int raio=1; int vizinhaca=8;
    DetectFace df;
    faces.clear();
    IMAGENSPOSITIVAS=0;IMAGENSNEGATIVAS=0;

    // convoluçao para gerar uma imagem de 320 x 240 px em NAO faces de 25 x 30 px
    for(int i = 0; i <= Query.rows - HEIGHT ; i++)
    {
        roi.y = i;

        for(int j =0; j <= Query.cols - WIDTH ; j++)
        {
            roi.x = j;
            Query.operator ()(Rect(roi.x,roi.y,WIDTH,HEIGHT)).convertTo(ROI,CV_32FC1,1,0);

            PadraoLocal(ROI,LBP,raio,vizinhaca);

            Mat temp;
            MatConstIterator_<float> it = LBP.begin<float>(), it_end = LBP.end<float>();

            for(; it != it_end; ++it) temp.push_back(*it);

            PREDICAO = boost.predict( temp, Mat(),Range::all(),false,true);
            //Bboost.predict( const float* row_sample, int row_len, bool returnDFVal=false ) const;
            //svm.predict()
            QString nome = QString("ROI00%1-%2.jpg").arg(i).arg(j);
            string result2 = nome.toUtf8().constData();

            if ( PREDICAO > 12 )
            {
                df.predicao = PREDICAO; df.ponto = roi;
                faces.push_back( df );
                IMAGENSPOSITIVAS++;
            }
            else
                IMAGENSNEGATIVAS++;
        }
    }
}
Example #21
0
Mat FeatureExtract::tomyFeature(string file)
{
	printf("tomygraph feature ...\n");

	//file or directory
    bool directory = directoryAutoSet(file);    
    //record total number of images  
    int readNumber = 0;  

    Mat frame;
    Mat allFrames;
    allFrames.type = CV_8UC1;

    imgStrs.clear();
    if(!initialVideo(files[i], directory))
    {
        printf("!!!!!!!!!!!\n");
        continue;
    }

    while(1)
    {
        if(!getFrame(countss, directory, frame))
            break;
        readNumber ++;
        printf("* ");
        //printf("readNumber = %d\n", readNumber);
        if(frame.channels() > 1)
            cvtColor(frame, frame, CV_BGR2GRAY);
            
		allFrames.push_back(&(sampleTomy(frame)));
    }
	
	if(allFrames.empty())
	{
		printf("feature extration error\n");
		exit(1);
	}
	
    if(vc.isOpened())
        vc.release();
    
    return allFrames;
}
Example #22
0
void EkfSlam::addFeature(Mat yi, int i)
{
	printf("Discovered Landmark %d\n", i);
	Mat s2 = s.clone();
	s2.at<double>(0) *= s2.at<double>(0);
	s2.at<double>(1) *= s2.at<double>(1);

	Mat R = Mat::diag(s2);

	Mat Gr = dg_dxb(mState(Rect(0,0,1,3)), yi);
	Mat Gy = dg_dyi(mState(Rect(0,0,1,3)), yi);

	Mat L = g(mState(Rect(0,0,1,3)), yi);
	mState.push_back(L);

	Mat Pll = Gr*Prr*Gr.t() + Gy*R*Gy.t();
	Mat Plr = Gr*Prr;
	Mat Plm;

	if(!i) {
		Pmr = Plr.clone();
		Prm = Pmr.t();
		Pmm = Pll.clone();
	}
	else {
		Plm = Gr*Prm;
		Pmr.push_back(Plr);
		Prm = Pmr.t();

		// append Pml to right of Pmm
		Pmm = Pmm.t();
		Pmm.push_back(Plm);
		Pmm = Pmm.t();
		// append Pll to right of Plm
		Plm = Plm.t();
		Pll = Pll.t();
		Plm.push_back(Pll);
		Plm = Plm.t();

		// append [Plm|Pll] to bottom of Pmm
		Pmm.push_back(Plm);
	}
}
void ASiftDetector::detectAndCompute(const Mat& img, std::vector< KeyPoint >& keypoints, Mat& descriptors)
{
    keypoints.clear();
    descriptors = Mat(0, 128, CV_32F);
    for(int tl = 1; tl < 6; tl++)
    {
        double t = pow(2, 0.5*tl);
        for(int phi = 0; phi < 180; phi += 72.0/t)
        {
            std::vector<KeyPoint> kps;
            Mat desc;

            Mat timg, mask, Ai;
            img.copyTo(timg);

            affineSkew(t, phi, timg, mask, Ai);

#if 0
            Mat img_disp;
            bitwise_and(mask, timg, img_disp);
            namedWindow( "Skew", WINDOW_AUTOSIZE );// Create a window for display.
            imshow( "Skew", img_disp );
            waitKey(0);
#endif

            SiftFeatureDetector detector;
            detector.detect(timg, kps, mask);

            SiftDescriptorExtractor extractor;
            extractor.compute(timg, kps, desc);

            for(unsigned int i = 0; i < kps.size(); i++)
            {
                Point3f kpt(kps[i].pt.x, kps[i].pt.y, 1);
                Mat kpt_t = Ai*Mat(kpt);
                kps[i].pt.x = kpt_t.at<float>(0,0);
                kps[i].pt.y = kpt_t.at<float>(1,0);
            }
            keypoints.insert(keypoints.end(), kps.begin(), kps.end());
            descriptors.push_back(desc);
        }
    }
}
Example #24
0
Mat Classifier::svm_test(cv::Mat& testData, cv::Mat& testClasses, std::string fileName)
{
    // SVM load
    CvSVM svm;

    svm.load(fileName.c_str());

    //Mat prediccion(testClasses.rows,testClasses.cols,CV_32FC1);
    Mat prediccion;

    for (int i=0; i<=testClasses.rows-1; ++i)
    {
        prediccion.push_back(svm.predict(testData.row(i)));
    }

    cout << "Matriz Clases: " << testClasses.t() << endl;
    cout << "Matriz Predic: " << prediccion.t() << endl;
    return prediccion;
}
Example #25
0
void generateData( Mat& query, Mat& train )
{
  RNG& rng = theRNG();

  Mat buf( QUERY_DES_COUNT, DIM, CV_8UC1 );
  rng.fill( buf, RNG::UNIFORM, Scalar( 0 ), Scalar( 255 ) );
  buf.convertTo( query, CV_8UC1 );

  for ( int i = 0; i < query.rows; i++ )
  {
    for ( int j = 0; j < COUNT_FACTOR; j++ )
    {
      train.push_back( query.row( i ) );
      int randCol = rand() % 32;
      uchar u = query.at<uchar>( i, randCol );
      uchar modified_u = invertSingleBits( u, j + 1 );
      train.at<uchar>( i * COUNT_FACTOR + j, randCol ) = modified_u;
    }
  }
}
	cv::Mat test(cv::Mat initialisations, cv::Mat templates, ProjectionFunction projection, OnRegressorIterationCallback onRegressorIterationCallback)
	{
		using cv::Mat;
		Mat currentX = initialisations;
		for (size_t regressorLevel = 0; regressorLevel < regressors.size(); ++regressorLevel) {
			// Enqueue all tasks in a thread pool:
			auto concurentThreadsSupported = std::thread::hardware_concurrency();
			if (concurentThreadsSupported == 0) {
				concurentThreadsSupported = 4;
			}
			utils::ThreadPool threadPool(concurentThreadsSupported);
			std::vector<std::future<typename std::result_of<ProjectionFunction(Mat, size_t, int)>::type>> results; // will be float or Mat. I might remove float for the sake of code clarity, as it's only useful for very simple examples.
			results.reserve(currentX.rows);
			for (int sampleIndex = 0; sampleIndex < currentX.rows; ++sampleIndex) {
				results.emplace_back(
					threadPool.enqueue(projection, currentX.row(sampleIndex), regressorLevel, sampleIndex)
					);
			}
			// Gather the results from all threads and store the features:
			Mat features;
			for (auto&& result : results) {
				features.push_back(result.get());
			}

			Mat observedValues;
			if (templates.empty()) { // unknown template training case
				observedValues = features;
			}
			else { // known template
				observedValues = features - templates;
			}
			Mat x_k;
			// Calculate x_k = currentX - R * (h(currentX) - y):
			for (int sampleIndex = 0; sampleIndex < currentX.rows; ++sampleIndex) {
				x_k.push_back(Mat(currentX.row(sampleIndex) - regressors[regressorLevel].predict(observedValues.row(sampleIndex)))); // we need Mat() because the subtraction yields a (non-persistent) MatExpr
			}
			currentX = x_k;
			onRegressorIterationCallback(currentX);
		}
		return currentX; // Return the final predictions
	};
Example #27
0
static Mat vlad_feature(Ptr<Feature2D> f2d, Ptr<DescriptorMatcher> matcher, const vector<Mat> &vocabs, const Mat &img) {
    PROFILE
    std::vector<KeyPoint> kp;
    Mat feat, desc;
    {
        PROFILEX("vlad:detect")
        f2d->detectAndCompute(img, Mat(), kp, desc);
    }
    if (desc.rows>0) {
        PROFILEX("vlad:compute")
        rootsift(desc);

        Mat feat;
        for (size_t v=0; v<vocabs.size(); v++) {
            vector<DMatch> matches;
            matcher->match(desc, vocabs[v], matches);
            Mat f = Mat(vocabs[v].size(), CV_32F, 0.0f);
            {
                PROFILEX("vlad:vlad")
                for (size_t j=0; j<matches.size(); j++) {
                    Mat dr = desc.row(matches[j].queryIdx);
                    Mat vr = vocabs[v].row(matches[j].trainIdx);
                    Mat re = vr - dr;
                    normalize(re,re); // innorm
                    f.row(matches[j].trainIdx) += re / vocabs[v].rows;
                }
            }
            feat.push_back(f);
        }
        {
            PROFILEX("vlad:post")
            // power normalization
            Mat f2;
            sqrt(abs(feat), f2);
            feat = sign(feat).mul(f2);
            // L2 normalization
            normalize(feat,feat);
        }
        return feat.reshape(1,1);
    }
Example #28
0
    virtual int addTraining(const Mat & img, int label) 
    {
        Mat feat1;
        ext->extract(pre.process(img), feat1);

        Mat fr = feat1.reshape(1,1);
        //if (fr.type() != CV_32F)
        //    fr.convertTo(fr,CV_32F);

        if (! fil.empty())
            fil->filter(fr,fr);

        //features.push_back(fr); // damn memory problems
        if ( features.empty() )
        {
            features = Mat(nimg, feat1.total(), feat1.type()); 
        }
        feat1.copyTo(features.row(labels.rows));
        labels.push_back(label);
        cerr << fr.cols << " i_" << labels.rows << "\r";
        return labels.rows;
    }
static void make_vocabulary()
{
    if(flag==1)
    {
        return ;
    }
    cout<<" MAKING VOCABULARY...."<<endl;
    for(int i=1; i<=20; i++)
    {
        cout<<" Reading File "<<i<<endl;
        stringstream ss;
        ss << path_People << "person_"<<setfill('0') << setw(3) << i <<".image.png";
        cout<<ss.str()<<endl;
        img=imread(ss.str(),0);
        Mat tempp=imread(ss.str(),1);
        //vector< vector<Point > > superpixel=make_superpixels(tempp);
        //cout<<superpixel.size()<<" Superpixel size "<<endl;
        for(int  k=0; k<1; k++)
        {
            /*   int x1=superpixel[k][0].x;
               int y1=superpixel[k][0].y;
               int x2=superpixel[k][1].x;
               int y2=superpixel[k][1].y;
               Mat newimg=Mat(x2-x1+1,y2-y1+1,0,Scalar(255,255,255));
               for(int l=2; l<superpixel[k].size(); l++)
               {
                  int x=superpixel[k][l].x;
                  int y=superpixel[k][l].y;
                  newimg.at<uchar>(x-x1,y-y1)=img.at<uchar>(x,y);
               }*/
            keypoints.clear();
            detector.detect(img,keypoints);
            detector.compute(img,keypoints,descriptor);
            features_unclustered.push_back(descriptor);
        }
    }
    cout<<"VOCABULARY BUILT...."<<endl;
    cout<<endl;
}
Example #30
0
int face_recognition::similarityOrderedList(Mat image, 
vector<Mat>* imageList, 
int (*similarityFunction)(Mat,Mat,float*),
OutputArray sortedIndexList){

	long listLength = imageList->size();
	Mat unsortedList;
	float similarityValue;
	int err_code;
	for(Mat image_2 : *imageList){
		similarityValue = 0.0;
		if((err_code = similarityFunction(image,image_2,&(similarityValue) )) != 0){
			if(DEBUG)
				cerr << "similarityFunction returned: " << err_code << endl;
			return ERROR_SIMILRITY_LIST_FUNCTION;
		}
		unsortedList.push_back(similarityValue);
	}
	Mat sortIdxList;
	sortIdx(unsortedList, sortedIndexList, CV_SORT_EVERY_COLUMN + CV_SORT_DESCENDING);
	// sort(unsortedList, sortedList, CV_SORT_EVERY_COLUMN + CV_SORT_DESCENDING);
	return 0;
}