Example #1
0
vector<double> sampleAlpha( std::mt19937& rng )
{
	vector<double> res(3);
	res[0] = (double)rng()/rng.max();
	res[1] = (double)rng()/rng.max();
	std::sort(res.begin(), res.begin() + 2);
	res[2] = 1.0 - res[1];
	res[1] = res[1] - res[0];
	res[0] = res[0] - 0.0;
	return res;
}
void SeedFeatureFactory::train( const std::vector< std::shared_ptr<ImageOverSegmentation> > &ios, const std::vector<VectorXs> & lbl ) {
	printf("  * Training SeedFeature\n");
	static std::mt19937 rand;
	const int N_SAMPLES = 5000;
	int n_pos=0, n_neg=0;
	for( VectorXs l: lbl ) {
		n_pos += (l.array()>=0).cast<int>().sum();
		n_neg += (l.array()==-1).cast<int>().sum();
	}
	
	// Collect training examples
	float sampling_freq[] = {0.5f*N_SAMPLES / n_neg, 0.5f*N_SAMPLES / n_pos};
	std::vector<RowVectorXf> f;
	std::vector<float> l;
#pragma omp parallel for
	for( int i=0; i<ios.size(); i++ ) {
		RMatrixXf ftr = SeedFeature::computeObjFeatures( *ios[i] );
		for( int j=0; j<ios[i]->Ns(); j++ )
			if( lbl[i][j] >= -1 && rand() < rand.max()*sampling_freq[ lbl[i][j]>=0 ] ) {
#pragma omp critical
				{
					l.push_back( lbl[i][j]>=0 );
					f.push_back( ftr.row(j) );
				}
			}
	}
	
	printf("    - Computing parameters\n");
	// Fit the ranking functions
	RMatrixXf A( f.size(), f[0].size() );
	VectorXf b( l.size() );
	for( int i=0; i<f.size(); i++ ) {
		A.row(i) = f[i];
		b[i] = l[i];
	}
	
	// Solve A*x = b
	param_ = A.colPivHouseholderQr().solve(b);
	printf("    - done %f\n",(A*param_-b).array().abs().mean());
}
Example #3
0
inline double rndf()
{
	return fabs(rng())/rng.max();
}