Exemplo n.º 1
0
 inline
 Tp1 dot_product(const WeightVector<Tp1, Alloc1>& x, const WeightVector<Tp2, Alloc2>& y)
 {
   const size_t size = utils::bithack::min(x.size(), y.size());
   
   return std::inner_product(x.begin(), x.begin() + size, y.begin(), Tp1());
 }
Exemplo n.º 2
0
int main(int argc, char** argv) {
	// handle parameters
	po::variables_map cfg;
	if (!init_params(argc,argv, &cfg)) exit(1); // something is wrong

	// init weights
	DenseWeightVector dense_weights;
	WeightVector weights;
	if (cfg.count("weights")) Weights::InitFromFile(cfg["weights"].as<string>(), &dense_weights);
	Weights::InitSparseVector(dense_weights, &weights);
	cerr << "Current Weight Vector:\n";
	for (WeightVector::iterator i=weights.begin(); i!=weights.end(); ++i)
		cerr << i->first << " " << FD::Convert(i->first) << "=" << i->second << endl;
	/*cerr << "\nDense Weights:\n";
	for (int i = 0 ; i < dense_weights.size(); ++i)
		cerr << i << " " << dense_weights[i] << endl;*/
	cerr << "# of features: " << FD::NumFeats() << " (-1 dummy feature @ idx 0)\n\n";

	// load instances
	vector<TrainingInstance> instances;
	loadInstances(cfg["input"].as<string>(), instances);

 	// setup output directory
	//MkDirP(cfg["output"].as<string>());
	//stringstream outss;
	//outss << cfg["output"].as<string>() << "/";
	//const string out_path = outss.str();

	// setup loss function
	ListwiseLossFunction* lossfunc = set_loss(&cfg);
	cerr << "listwise loss function: " << cfg["loss"].as<string>() << "\n";

	// run AdaRank optimizer
	AdaRank adarank(
			instances,
			instances.size(),
			cfg["iterations"].as<int>(),
			cfg["epsilon"].as<double>(),
			dense_weights,
			lossfunc,
			cfg.count("verbose")
			);

	adarank.run();

	// write output weight vector
	DenseWeightVector new_dense_weights = adarank.GetWeightVector();
	WeightVector new_weights;
	Weights::InitSparseVector(new_dense_weights, &new_weights);

	cerr << "Final Weight Vector:\n";
	for (WeightVector::iterator i=new_weights.begin(); i!=new_weights.end(); ++i)
		cerr << i->first << " " << FD::Convert(i->first) << "=" << i->second << endl;
	Weights::WriteToFile(cfg["output"].as<string>(), new_dense_weights, true, NULL);

}
Exemplo n.º 3
0
  inline
  Tp1 dot_product(const WeightVector<Tp1, Alloc1>& x, const WeightVector<Tp2, Alloc2>& y, BinaryOp op)
  {
    typedef WeightVector<Tp1, Alloc1> weight_vector1_type;
    typedef WeightVector<Tp2, Alloc2> weight_vector2_type;
    
    const size_t size = utils::bithack::min(x.size(), y.size());
    
    Tp1 __dot =  std::inner_product(x.begin(), x.begin() + size, y.begin(), Tp1(), std::plus<Tp1>(), op);
    
    if (x.size() > y.size()) {
      typename weight_vector1_type::const_iterator iter_end = x.end();
      for (typename weight_vector1_type::const_iterator iter = x.begin() + size; iter != iter_end; ++ iter)
	__dot += op(*iter, Tp2());
    } else {
      typename weight_vector2_type::const_iterator iter_end = y.end();
      for (typename weight_vector2_type::const_iterator iter = y.begin() + size; iter != iter_end; ++ iter)
	__dot += op(Tp1(), *iter);
    }
    return __dot;
  }
Exemplo n.º 4
0
int main(int argc, char** argv) {

	if (argc < 2 || argc > 3) {
		cerr << "USAGE: view-instances <binary instance file> [<weights>]\n";
		exit(1);
	}

	bool has_w = (argc==3);
		
	// load instances
	vector<Instance> instances;
	loadInstances(string(argv[1]), instances);
	
	// weights
	WeightVector weights;
	if (has_w) {
		DenseWeightVector dense_weights;
		Weights::InitFromFile(string(argv[2]), &dense_weights);
		Weights::InitSparseVector(dense_weights, &weights);
		cerr << "Current Weight Vector:\n";
		for (WeightVector::iterator i=weights.begin(); i!=weights.end(); ++i)
			cerr << i->first << " " << FD::Convert(i->first) << "=" << i->second << endl;
	}

	double likelihood = 0.0;
	double likelihood_i=0.0;
	for (int i=0;i<instances.size();++i) {
		if (instances[i].ir_sorted) {
			if (has_w)
				likelihood_i = PlackettLuce::pl_likelihood(instances[i], weights);
			else
				likelihood_i = PlackettLuce::pl_likelihood(instances[i]);
			cout << "P(y|x;";
			if (has_w) cout << "w)=";
			else cout << "D)=";
			cout << likelihood_i << "\n";
		}
		
		if (has_w)
			cout << instances[i].AsString(weights) << endl;
		else
			cout << instances[i].AsString() << endl;
		likelihood +=likelihood_i;
		

		
	}
	
	cerr << "Likelihood=" << likelihood << "\n";

}
Exemplo n.º 5
0
boost::shared_ptr<HypothesisInfo> MakeHypothesisInfo(Hypergraph& hg) {
	/*
	 * create an HypothesisInfo with feature vector, translation and its relevance
	 * relevance feature values are removed (and optionally any frozen features)
	 */
	boost::shared_ptr<HypothesisInfo> h(new HypothesisInfo);
	h->features = ViterbiFeatures(hg);
	h->rel = h->features.dot(relw);
	// clean relevance weights from feature vector
	for (WeightVector::iterator it=relw.begin(); it!=relw.end(); ++it) { h->features.set_value(it->first, .0); }
	ViterbiESentence(hg, &(h->hyp));
	if (freeze) { for (unsigned x=0;x<frozen_features.size();++x) { h->features.set_value(frozen_features[x], .0); } }
	// for rel scaling:
	if (h->rel > MAX_REL) MAX_REL = h->rel;
	if (h->rel < MIN_REL) MIN_REL = h->rel;
	return h;
}