Exemplo n.º 1
0
// static
void CrossValidator::splitIntoPositiveAndNegativeClasses( const cv::Mat_<float>& xs, const cv::Mat_<int>& labels,
                                                          vector<cv::Mat_<float> >& pset,
                                                          vector<cv::Mat_<float> >& nset)
{
    const int *labsVec = labels.ptr<int>(0);
    for ( int i = 0; i < xs.rows; ++i)
    {
        assert( labsVec[i] == 0 || labsVec[i] == 1);
        if (labsVec[i] == 1)
            pset.push_back(xs.row(i));
        else if (labsVec[i] == 0)
            nset.push_back(xs.row(i));
    }   // end for
}   // end splitIntoPositiveAndNegativeClasses
Exemplo n.º 2
0
void EncoderBoFSoft::encode(const cv::Mat_<float>& descriptors, cv::Mat_<float>& encoded)
{
	int ndata = descriptors.rows;
	int ndim = descriptors.cols;

	if ( ndim != this->_m_nDim)
	{
		throw std::runtime_error("dimension not match when encode");
	}
	
	encoded.create(ndata,this->_m_nCode);
	encoded.setTo(0.0f);
	//encoded.zeros(ndata,this->_m_nCode);

#pragma omp parallel for
	for(int i=0;i<ndata;i++)
	{
		Mat index,dist;
		this->_m_pTree->findNearest(descriptors.row(i),_m_nz,INT_MAX,index,noArray(),dist);

		Scalar mean,std;
		cv::meanStdDev(dist,mean,std);
		cv::divide(std(0),dist,dist);
		
		for(int j=0;j<_m_nz;j++)
		{
			encoded(i,index.at<int>(j)) = dist.at<float>(j);
		}
	}
}
Exemplo n.º 3
0
	void AddDescriptor(cv::Mat_<double>& descriptors, cv::Mat_<double> new_descriptor, int curr_frame, int num_frames_to_keep)
	{
		if(descriptors.empty())
		{
			descriptors = Mat_<double>(num_frames_to_keep, new_descriptor.cols, 0.0);
		}

		int row_to_change = curr_frame % num_frames_to_keep;

		new_descriptor.copyTo(descriptors.row(row_to_change));
	}	
Exemplo n.º 4
0
cv::Mat_<float> DescriptorJoiner::loadDescriptors( const string& dfile, int* label) throw (DescriptorLengthException)
{
    const cv::Mat_<float> vecs = RFeatures::readDescriptors( dfile, false);
    const int numVecs = vecs.rows;
    const int lab = (int)_labCounts.size();  // Label for these desciptors
    _labCounts.push_back(numVecs);  // Store the number of descriptors for this class label (vector index)

    // Add vecs to _xs
    for ( int i = 0; i < numVecs; ++i)
    {
        _xs.push_back( vecs.row(i));
        _labs.push_back(lab);
    }   // end for

    return vecs.clone();
}   // end loadDescriptors
   cv::Mat_<int> labels;
   randomGaussianBlobs(3, 20, 2, 0.4f, centers, features, labels, -10.f, 10.f);

   cv::Mat_<float> msCenters;
   cv::Mat_<int> msLabels;
   
   // We use weights here just for the purpose implicitly sort the cluster results
   // the same way we provide the inputs.
   cv::Mat_<float> msWeights(1, features.rows);
   msWeights.colRange(0, 20).setTo(2);
   msWeights.colRange(20, 40).setTo(1);
   msWeights.colRange(40, 60).setTo(0.5);

   // Note, 300 iterations are way too much, 5-10 suffice. Tests binning as well.
   meanShift(features, cv::noArray(), msWeights, msCenters, msLabels, cv::noArray(), 1.2f, 300);

   REQUIRE(msCenters.rows == 3);
   REQUIRE(cv::norm(centers, msCenters) < 1);
   REQUIRE(cv::countNonZero(msLabels == 0) == 20);
   REQUIRE(cv::countNonZero(msLabels == 1) == 20);
   REQUIRE(cv::countNonZero(msLabels == 2) == 20);

   // Run again, but provide just one center;
   cv::Mat_<float> oneCenter(1, 2);
   oneCenter << 12, 12;
   meanShift(features, oneCenter, msWeights, msCenters, msLabels, cv::noArray(), 3.0f, 300);

   REQUIRE(msCenters.rows == 1);
   REQUIRE(cv::norm(centers.row(0), msCenters) < 1);

}