Ejemplo n.º 1
0
int main(int const argc, char const * const * const argv) {
    Option opt;
    try {
        opt = parse_option(argv_to_args(argc, argv));
    }
    catch(std::invalid_argument const &e) {
        std::cout << e.what();
        return EXIT_FAILURE;
    }

    std::cout << "reading data..." << std::flush;
    Problem const Va = read_problem(opt.Va_path);
    Problem const Tr = read_problem(opt.Tr_path);
    std::cout << "done\n" << std::flush;

    std::cout << "initializing model..." << std::flush;
    Model model(Tr.nr_feature, opt.nr_factor, Tr.nr_field);
    init_model(model);
    std::cout << "done\n" << std::flush;

	omp_set_num_threads(static_cast<int>(opt.nr_threads));

    train(Tr, Va, model, opt);

	omp_set_num_threads(1);

    if(opt.do_prediction) predict(Va, model, opt.Va_path+".out");

    return EXIT_SUCCESS;
}
Ejemplo n.º 2
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = svm_check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		model = svm_train(&prob,&param);
		svm_save_model(model_file_name,model);
		svm_destroy_model(model);
	}
	svm_destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);

	return 0;
}
Ejemplo n.º 3
0
Archivo: train.cpp Proyecto: cental/stc
int train_fs(const char* input_file_name, const char* model_file_name){
	// Initialization
	const char* error_msg;
	set_default_params();
	read_problem(input_file_name);
	error_msg = check_parameter(&prob,&param);
	if(error_msg){
		fprintf(stderr,"Error: %s\n",error_msg);
		return -1;
	}

	// Do the cross-validation and save accuracy
	double accuracy = do_cross_validation(nr_fold);
	std::string info_fpath = std::string(model_file_name) + ".info";
	FILE* info = fopen(info_fpath.c_str(), "w");
	fprintf(info, "Accuracy : %f", accuracy);
	//fflush(info);	
	fclose(info);

	// Train a model on the whole dataset
	model_train=train(&prob, &param);
	if(save_model(model_file_name, model_train)){
		fprintf(stderr,"can't save model to file %s\n",model_file_name);
		return -1;
	}

	// Free resources
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);
	
	return 0;
}
Ejemplo n.º 4
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}
	if( flag_find_C && flag_warm_start)
	{
		fprintf(stderr,"ERROR: Option -C and -i can't both exist\n");
		exit(1);
	}
	if (flag_find_C)
	{
		do_find_parameter_C();
	}
	else if(flag_cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		if(flag_warm_start)
		{
			if(prob.n != initial_model->nr_feature)
				fprintf(stderr,"WARNING: The number of features in the input file does not match that in the initial model\n");
			model_=warm_start_train(&prob, &param, initial_model);
			free_and_destroy_model(&initial_model);
		}
		else
			model_=train(&prob, &param);
		if(save_model(model_file_name, model_))
		{
			fprintf(stderr,"can't save model to file %s\n",model_file_name);
			exit(1);
		}
		free_and_destroy_model(&model_);
	}
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 5
0
bool CmySvmArth::LoadTrainData(const char* pPath)
{
	if(pPath==NULL)
		return false;
	if(!ChangeDataFormat(pPath))
		return false;
	Stringoper oper;
	char p[1024];
	oper.CStringtoChars(TmpFileName , p);
	read_problem(p);
	return true;
}
Ejemplo n.º 6
0
int main() {
	int n_problems, vertex_id, max_flow;
	t_node vertex;
	read_graph();

	initialize_preflow(graph, graph->vertexs[0]);

	scanf("%d", &n_problems);

	while (n_problems > 0) {
		links_to_cut = -1;
		reset_crit_points(graph);
		if (read_problem(graph) == 0) { /* menos de 2 pontos criticos */
			printf("0\n");
			--n_problems;
			continue;
		}
		for (vertex_id = 0; vertex_id < graph->max_vertex; ++vertex_id) {
			vertex = graph->vertexs[vertex_id];
			if (vertex->is_critical) {
				max_flow = relabel_to_front(graph, vertex);
				if (max_flow == 0) {
					links_to_cut = 0;
					break;
				} else if (max_flow < links_to_cut || links_to_cut < 0) {
					links_to_cut = max_flow;
				}
			}

		}
		--n_problems;
		printf("%d\n", links_to_cut);
	}

	/*n_problems = read the problem line count

	 for each line (problem)
	 reset critic points
	 read_problem(); <- set the critic points
	 for each V in critics
	 maxflow = relabel_to_front()
	 if maxflow == 0
	 links_to_cut < 2;
	 break;
	 else if maxflow < links_to_cut
	 links_to_cut = maxflow

	 print links_to_cute*/

	return 0;
}
Ejemplo n.º 7
0
Archivo: train.c Proyecto: Joelone/MLEA
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		exit(1);
	}

	if(flag_cross_validation)
	{
		if (nr_fold <= 10)
		{
			do_cross_validation();
		}
		else
		{
			double cv;
			nr_fold = nr_fold - 10;
			cv =  binary_class_cross_validation(&prob, &param, nr_fold);
			printf("Cross Validation = %g%%\n",100.0*cv);
		}
	}
	else
	{
		model_=train(&prob, &param);
		if(save_model(model_file_name, model_))
		{
			fprintf(stderr,"can't save model to file %s\n",model_file_name);
			exit(1);
		}
		free_and_destroy_model(&model_);
	}
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(prob.W);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 8
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
#ifdef CUSTOM_SOLVER
    if (param.svm_type == ONE_CLASS)
    {
        param.strong_footlier_indexes = filter_strong_footliers(input_file_name);
    }
#endif
	error_msg = svm_check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation)
	{
		if(param.svm_type == R2 || param.svm_type == R2q)
			fprintf(stderr, "\"R^2\" cannot do cross validation.\n");
		else
			do_cross_validation();
	}
	else
	{
		model = svm_train(&prob,&param);
		if(param.svm_type == R2 || param.svm_type == R2q)
			fprintf(stderr, "\"R^2\" does not generate model.\n");
		else if(svm_save_model(model_file_name,model))
		{
			fprintf(stderr, "can't save model to file %s\n", model_file_name);
			exit(1);
		}
		svm_free_and_destroy_model(&model);
	}
	svm_destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 9
0
static void
run_controller(list<string>& files, list<string>& phrases, int time_limit, int memory_limit)
{
  Json::Value result(Json::arrayValue);

  for (string fn : files)
  {
    problem p = read_problem(fn);
    list<solution> solved = solve_problem(p);
    for (solution& r : solved)
      result.append(toJson(r));
  }

  cout << result << endl;
}
Ejemplo n.º 10
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	param.train_file = Malloc(char,1024);
	strcpy(param.train_file, input_file_name);
	error_msg = check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}

	if(flag_cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		clock_t start_cpu, end_cpu;
		double cpu_time_used;
     	start_cpu = clock();
		model_=train(&prob, &param);
		end_cpu = clock();
     	cpu_time_used = ((double) (end_cpu - start_cpu)) / CLOCKS_PER_SEC;
		if(save_model(model_file_name, model_))
		{
			fprintf(stderr,"can't save model to file %s\n",model_file_name);
			exit(1);
		}
		free_and_destroy_model(&model_);
	}
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 11
0
int main(int argc, char* argv[])
{
	boostStruct		bst;
	char			input_file_name[1024], model_file_name[1024];

	boost_config(&bst);

	parse_command_line(argc, argv, &input_file_name[0], &model_file_name[0], &bst);

	read_problem(input_file_name, &bst);

	adaBoost(&bst);

	writeModel(&bst, model_file_name);

	boost_destroy(&bst);

}
Ejemplo n.º 12
0
	struct model* main(int argc, char **argv)
	{
		char input_file_name[1024];
		char model_file_name[1024];
		const char *error_msg;

		parse_command_line(argc, argv, input_file_name, model_file_name);
		auto prob = read_problem(input_file_name);
		error_msg = check_parameter(&prob, &param);

		if (error_msg)
		{
			fprintf(stderr, "ERROR: %s\n", error_msg);
			exit(1);
		}

		struct model *pmodel;

		if (flag_find_C)
		{
			do_find_parameter_C(&prob);
		}
		else if (flag_cross_validation)
		{
			do_cross_validation(&prob);
		}
		else
		{
			pmodel = train(&prob, &param);
			/*if (save_model(model_file_name, pmodel))
			{
				fprintf(stderr, "can't save model to file %s\n", model_file_name);
				exit(1);
			}
			free_and_destroy_model(&pmodel);*/
		}
		destroy_param(&param);
		free(prob.y);
		free(prob.x);
		free(x_space);
		free(line);

		return pmodel;
	}
Ejemplo n.º 13
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;
	//srand(time(0));

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = svm_check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation)
	{
		
//	do_cross_validation();
	double cv =  binary_class_cross_validation(&prob, &param, nr_fold);
		printf("Cross Validation = %g%%\n",100.0*cv); //Modified

	}
	else
	{
		model = svm_train(&prob,&param);
		if(svm_save_model(model_file_name,model))
		{
			fprintf(stderr, "can't save model to file %s\n", model_file_name);
			exit(1);
		}
		svm_free_and_destroy_model(&model);
	}
	svm_destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 14
0
// Interface function of matlab
// now assume prhs[0]: label prhs[1]: features
int main(int argc, char **argv)
{
	const char *error_msg;
	// fix random seed to have same results for each run
	// (for cross validation)
	srand(1);

	char input_file_name[1024];
	char model_file_name[1024];

   
	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = check_parameter(&prob,&param);
	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		destroy_param(&param);
		free(prob.y);
		free(prob.x);
		free(x_space);
		exit(1);
	}

	if(cross_validation_flag)
	{
		do_cross_validation();
	}
	else
	{
		model_=FGM_train(&prob, &param);
		printf("training is done!\n");
		save_model_poly(model_file_name, model_);
		printf("model is saved!\n");
		destroy_model(model_);
	}
        destroy_param(&param);
		free(prob.y);
		free(prob.x);
		free(x_space);
		
}
Ejemplo n.º 15
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = svm_check_parameter(&prob,&param);
	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		model = svm_train(&prob,&param);
		if(svm_save_model(model_file_name,model))
		{
			fprintf(stderr, "can't save model to file %s\n", model_file_name);
			exit(1);
		}
		svm_free_and_destroy_model(&model);
	}
	svm_destroy_param(&param);
	free(prob.y);
#ifdef _DENSE_REP
	for (int i = 0; i < prob.l; ++i)
		free((prob.x+i)->values);
#else
	free(x_space);
#endif
	free(prob.x);
	free(line);

	return 0;
}
Ejemplo n.º 16
0
/*
 * Given a stream to the input file in the *.gr-format, this reads from the file
 * the graph represented by this file.  If the file is not conforming to the
 * format, it throws a corresponding std::invalid_argument with one of the error
 * messages defined above.
 */
void read_graph(std::ifstream& fin, graph& g) {
  current_state = COMMENT_SECTION;
  n_edges = -1;
  n_vertices = -1;

  if(!fin.is_open()){
    throw std::invalid_argument(FILE_ERROR);
  }

  std::string line;
  std::string delimiter = " ";

  while(std::getline(fin, line)) {
    if(line == "" || line == "\n") {
      throw std::invalid_argument(EMPTY_LINE);
    }

    std::vector<std::string> tokens;
    size_t oldpos = 0;
    size_t newpos = 0;

    while(newpos != std::string::npos) {
      newpos = line.find(delimiter, oldpos);
      tokens.push_back(line.substr(oldpos, newpos-oldpos));
      oldpos = newpos + delimiter.size();
    }

    if (tokens[0] == "c") {
      continue;
    } else if (tokens[0] == "p") {
      read_problem(tokens,g);
    } else {
      read_graph_edge(tokens, g);
    }
  }

  if (g.num_edges != n_edges) {
    throw std::invalid_argument(INV_PROB);
  }
}
Ejemplo n.º 17
0
int svmtrain(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = svm_check_parameter(&prob,&param);

	if(error_msg)
	{
		LOGD("ERROR: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		modelt = svm_train(&prob,&param);
		if(svm_save_model(model_file_name,modelt))
		{
			LOGD("can't save model to file %s\n", model_file_name);
			exit(1);
		}
		svm_free_and_destroy_model(&modelt);
	}
	svm_destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 18
0
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
	read_problem(input_file_name);
	error_msg = check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}

	if(flag_cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		model_=train(&prob, &param);
		if(save_model(model_file_name, model_))
		{
			fprintf(stderr,"can't save model to file %s\n",model_file_name);
			exit(1);
		}
		free_and_destroy_model(&model_);
	}
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);
	free(line);

	return 0;
}
Ejemplo n.º 19
0
void mexFunction( int nlhs, mxArray *plhs[],
		int nrhs, const mxArray *prhs[] )
{
	if(nrhs == 1)
	{
		char filename[256];

		mxGetString(prhs[0], filename, mxGetN(prhs[0]) + 1);

		if(filename == NULL)
		{
			mexPrintf("Error: filename is NULL\n");
			return;
		}

		read_problem(filename, plhs);
	}
	else
	{
		exit_with_help();
		fake_answer(plhs);
		return;
	}
}
Ejemplo n.º 20
0
int main(int argc, char** argv)
{
  struct svm_parameter param;                // set by parse_command_line
  struct svm_problem prob;                // set by read_problem

  // Instantiate a ModelManager:
  ModelManager manager("Test SVM");

  // Parse command-line:
  if (manager.parseCommandLine((const int)argc, (const char**)argv, "", 0, 0) == false)
    return(1);

  manager.start();

  //default paramaters
  param.svm_type = C_SVC;
  param.kernel_type = RBF;
  param.degree = 3;
  param.gamma = 0;        // 1/k
  param.coef0 = 0;
  param.nu = 0.5;
  param.cache_size = 100;
  param.C = 1;
  param.eps = 1e-3;
  param.p = 0.1;
  param.shrinking = 1;
  param.probability = 0;
  param.nr_weight = 0;
  param.weight_label = NULL;
  param.weight = NULL;


  read_problem("tests/train.1.scale", prob, param);

  struct svm_model *model = svm_train(&prob,&param);
  //if((model=svm_load_model(argv[i+1]))==0)
  //{
  //        fprintf(stderr,"can't open model file %s\n",argv[i+1]);
  //        exit(1);
  //}

  //svm_save_model(model_file_name,model);
  //svm_destroy_model(model);

  //predict

  LINFO("Predicting");
  predict("tests/test.1.scale", model);
  LINFO("Done");

  svm_destroy_param(&param);
  free(prob.y);
  free(prob.x);
  //free(x_space);


  // stop all our ModelComponents
  manager.stop();

  // all done!
  return 0;
}
Ejemplo n.º 21
0
int main(int argc, char* argv[]) {
	bool verbose;
	bool interior_point;
	double nu;
	bool weight_sharing;
	bool force;
	std::string train_filename;
	std::string output_filename;
	std::string solver;
	std::string mpsfile;

	// Command line options
	po::options_description generic("Generic Options");
	generic.add_options()
		("help", "Produce help message")
		("verbose", "Verbose output")
		;

	po::options_description input_options("Input/Output Options");
	input_options.add_options()
		("train", po::value<std::string>
			(&train_filename)->default_value("training.txt"),
			"Training file in \"label s0-m0.txt s0-m1.txt ...\" format, "
			"one sample per row.")
		("output", po::value<std::string>
			(&output_filename)->default_value("output.txt"),
			"File to write weight matrix to.  If \"--weight_sharing 1\" is "
			"used, this is a single line containing the alpha vector.  If "
			"no weight sharing is used, it is a matrix with number-of-classes "
			"rows and number-of-weak-learners columns.")
		("force", po::value<bool>(&force)->default_value(false),
			"Force overwriting the output file.  Otherwise, if the "
			"output file already exists, the program is aborted immediately.")
		("writemps", po::value<std::string>(&mpsfile)->default_value(""),
			"Write linear programming problem as MPS file.")
		;

	po::options_description lpboost_options("LPBoost Options");
	lpboost_options.add_options()
		("nu", po::value<double>(&nu)->default_value(0.1),
			"nu-parameter for 2-class LPBoost.  A larger value "
			"indicates stronger regularization")
		("weight_sharing", po::value<bool>(&weight_sharing)->default_value(true),
			"Share classifier weights among all classes.")
		("interior_point",
			po::value<bool>(&interior_point)->default_value(true),
			"Use interior point (true) or simplex method (false) to "
			"solve the LPBoost master problem")
		("solver", po::value<std::string>(&solver)->default_value("clp"),
			"LP solver to use.  One of \"clp\" or \"mosek\".")
		;

	po::options_description all_options;
	all_options.add(generic).add(input_options).add(lpboost_options);
	po::variables_map vm;
	po::store(po::command_line_parser(argc, argv).options(all_options).run(), vm);
	po::notify(vm);

	// Boolean flags
	verbose = vm.count("verbose");

	if (vm.count("help")) {
		std::cerr << "mclp $Id: mclp.cpp 1229 2008-03-10 10:26:34Z nowozin $" << std::endl;
		std::cerr << "===================================================="
			<< "===========================" << std::endl;
		std::cerr << "Copyright (C) 2008 -- "
			<< "Sebastian Nowozin <*****@*****.**>"
			<< std::endl;
		std::cerr << std::endl;
		std::cerr << "Usage: mclp [options]" << std::endl;
		std::cerr << std::endl;
		std::cerr << "Train a multiclass LPBoost model for given and fixed multiclass "
			<< "weak learners." << std::endl;
		std::cerr << all_options << std::endl;

		exit(EXIT_SUCCESS);
	}

	// Check if output file already exists
	if (boost::filesystem::exists(boost::filesystem::path(output_filename))
		&& force == false) {
		std::cout << "Output file \"" << output_filename << "\" "
			<< "already exists, exiting." << std::endl;
		exit(EXIT_SUCCESS);
	}

	// Read in training data
	std::cout << "Training file: " << train_filename << std::endl;
	std::vector<int> labels;	// discrete class labels, >= 0, < K.
	std::vector<std::vector<std::string> > data_S_M;	// [n][m]
	int number_classes = read_problem(train_filename, labels, data_S_M);
	if (number_classes <= 0) {
		std::cerr << "Failed to read in training data." << std::endl;
		exit(EXIT_FAILURE);
	}
	std::cout << labels.size() << " samples, "
		<< number_classes << " classes." << std::endl;

	// Instantiate multiclass classifier and fill it with training data
	Boosting::LPBoostMulticlassClassifier mlp(number_classes, nu, weight_sharing);
	mlp.InitializeBoosting(labels, interior_point, solver);
	read_problem_data(mlp, data_S_M, number_classes);

	if (mpsfile.empty() == false)
		mlp.WriteMPS(mpsfile);

	// Solve
	std::cout << "Solving linear program..." << std::endl;
	mlp.Update();
	std::cout << "Done." << std::endl;
	std::cout << "Soft margin " << mlp.Rho() << ", objective "
		<< mlp.Gamma() << std::endl;

	// Print weights
	const std::vector<std::vector<double> >& clw = mlp.ClassifierWeights();
	std::cout << "Writing (K,M) weight matrix to \""
		<< output_filename << "\", K = "
		<< (weight_sharing ? 1 : number_classes)
		<< ", M = " << clw[0].size() << std::endl;

	std::ofstream wout(output_filename.c_str());
	if (wout.fail()) {
		std::cerr << "Failed to open \"" << output_filename
			<< "\" for writing." << std::endl;
		exit(EXIT_FAILURE);
	}
	wout << std::setprecision(12);
	for (unsigned int aidx = 0; aidx < clw.size(); ++aidx) {
		for (unsigned int bidx = 0; bidx < clw[aidx].size(); ++bidx) {
			wout << (bidx == 0 ? "" : " ") << clw[aidx][bidx];
		}
		wout << std::endl;
	}
	wout.close();

	exit(EXIT_SUCCESS);
}
Ejemplo n.º 22
0
int main(int argc, char *argv[])
{
   char *ftest = NULL;
   struct timeval t0, t1, diff;
   problem *train, *test;
   int regpath_flag = 0, backtracking_flag = 0, std_flag = 1, verbose_flag = 0;
   int iter = 1000, c, crossval_flag = 0, nr_folds = 10, nval = 100, nzerow;
   double *w, *y_hat, *mean, *var;
   double lambda_1 = 1e-6, lambda_2 = 0, tol = 1e-9, epsilon, fret;

   while (1)
   {
      static struct option long_options[] =
      {
         /* These options don't set a flag.
          We distinguish them by their indices. */
         {"help",                   no_argument, 0, 'h'},
         {"verbose",                no_argument, 0, 'v'},
         {"backtracking",           no_argument, 0, 'b'},
         {"original",               no_argument, 0, 'o'},
         {"test",             required_argument, 0, 't'},
         {"l1",               required_argument, 0, 'l'},
         {"l2",               required_argument, 0, 'r'},
         {"cross-validation", optional_argument, 0, 'c'},
         {"tolerance       ", optional_argument, 0, 'e'},
         {"regpath",          optional_argument, 0, 'p'},
         /*{"stop",             optional_argument, 0, 's'},*/
         {"max-iters",        optional_argument, 0, 'i'},
         {0, 0, 0, 0}
      };

      int option_index = 0;

      c = getopt_long (argc, argv, "vhbot:r:l:p::c::e::s::i::", long_options, &option_index);

      /* Detect the end of the options. */
      if (c == -1)
         break;

      switch(c)
      {
         case 'h':
            exit_with_help(argv[PROG]);
            break;

         case 'b':
            backtracking_flag = 1;
            break;

         case 'v':
            verbose_flag = 1;
            break;

         case 'o':
            std_flag = 0;
            break;

         case 't':
            ftest = optarg;
            break;

         case 'c':
            crossval_flag = 1;
            if (optarg)
               if (sscanf(optarg, "%d", &nr_folds) != 1)
               {
                  fprintf(stderr, "%s: option -c requires an int\n", argv[PROG]);
                  exit_without_help(argv[PROG]);
               }
            break;

         case 'e':
            if (optarg)
               if (sscanf(optarg, "%lf", &tol) != 1)
               {
                  fprintf(stderr, "%s: option -e requires a double\n", argv[PROG]);
                  exit_without_help(argv[PROG]);
               }
            break;

         case 'p':
            regpath_flag = 1;
            if (optarg)
               if (sscanf(optarg, "%d", &nval) != 1)
               {
                  fprintf(stderr, "%s: option -p requires an int\n", argv[PROG]);
                  exit_without_help(argv[PROG]);
               }
            break;

         //case 's':
         //   search_flag = 1;
         //   if (optarg)
         //      if (sscanf(optarg, "%lf:%d:%lf", &lmax, &nval, &lmin) != 3)
         //      {
         //         printf("%s\n", optarg);
         //         fprintf(stderr, "%s: option -s requires a range in the format MAX:NVAL:MIN\n", argv[PROG]);
         //         exit_without_help(argv[PROG]);
         //      }
         //   break;

         case 'l':
            if (sscanf(optarg, "%lf", &lambda_1) != 1)
            {
               fprintf(stderr, "%s: option -l requires a float\n", argv[PROG]);
               exit_without_help(argv[PROG]);
            }
            break;

         case 'r':
            if (sscanf(optarg, "%lf", &lambda_2) != 1)
            {
               fprintf(stderr, "%s: option -r requires a float\n", argv[PROG]);
               exit_without_help(argv[PROG]);
            }
            break;

         case 'i':
            if (optarg)
               if (sscanf(optarg, "%d", &iter) != 1)
               {
                  fprintf(stderr, "%s: option -i requires an int\n", argv[PROG]);
                  exit_without_help(argv[PROG]);
               }
            break;

         case '?':
            /* getopt_long already printed an error message. */
            exit_without_help(argv[PROG]);
            break;

         default:
            printf("?? getopt returned character code 0%o ??\n", c); 
      }
   }

   if ((argc - optind) < ARGC_MIN || (argc - optind) > ARGC_MAX)
   {
      fprintf(stderr, "%s: missing file operand\n", argv[PROG]);
      exit_without_help(argv[PROG]);
   }

   /* start time */
   gettimeofday(&t0, 0);

   train = read_problem(argv[optind]);

   fprintf(stdout, "n:%d dim:%d\n", train->n, train->dim);

   /* alloc vector for means and variances, plus 1 for output */
   if (std_flag)
   {
      fprintf(stdout, "Standarizing train set...\n");
      mean = dvector(1, train->dim+1);
      var = dvector(1, train->dim+1);
      standarize(train, 1, mean, var);
   }

   if (ftest)
   {
      test = read_problem(ftest);
      if (std_flag)
         standarize(test, 0, mean, var);
   }

   if (regpath_flag)
   {
      fprintf(stdout, "Regularization path...\n");
      /* in glmnet package they use 0.0001 instead of 0.001 ? */
      epsilon = train->n > train->dim ? 0.001 : 0.01;
      lambda_1 = regularization_path(train, epsilon, nval);
   }

   fprintf(stdout, "lambda_1: %g\n", lambda_1);
   fprintf(stdout, "lambda_2: %g\n", lambda_2);

   /* initialize weight vector to 0 */
   w = dvector(1, train->dim);
   dvset(w, train->dim, 0);

   fprintf(stdout, "Training model...\n");
   if (backtracking_flag)
      /*fista_backtrack(train, w, lambda_1, lambda_2, tol, &iter, &fret);*/
      fista_nocov(train, w, lambda_1, lambda_2, tol, &iter, &fret);
   else
      fista(train, w, lambda_1, lambda_2, tol, verbose_flag, &iter, &fret);

   y_hat = dvector(1, train->n);
   fista_predict(train, w, y_hat);

   nzerow = dvnotzero(w, train->dim);

   fprintf(stdout, "Iterations: %d\n", iter);
   fprintf(stdout, "Active weights: %d/%d\n", nzerow, train->dim);
   if (std_flag)
      fprintf(stdout, "MAE train: %g\n", var[train->dim+1]*mae(train->y, train->n, y_hat));
   fprintf(stdout, "MAE train (standarized): %g\n", mae(train->y, train->n, y_hat));

   free_dvector(y_hat, 1, train->n);

   if (crossval_flag)
   {
      dvset(w, train->dim, 0);
      y_hat = dvector(1, train->n);
      cross_validation(train, w, lambda_1, lambda_2, nr_folds, y_hat);
      fprintf(stdout, "MAE cross-validation: %lf\n",
              mae(train->y, train->n, y_hat));
      free_dvector(y_hat, 1, train->n);
   }

   if (ftest)
   {
      /* we alloc memory again since test size is different from train size */
      y_hat = dvector(1, test->n);
      fista_predict(test, w, y_hat);
      fprintf(stdout, "MAE test: %g\n", mae(test->y, test->n, y_hat));
      free_dvector(y_hat, 1, test->n);
   }

   /* stop time */
   gettimeofday(&t1, 0);
   timeval_subtract(&t1, &t0, &diff);
   fprintf(stdout, "Time(h:m:s.us): %02d:%02d:%02d.%06ld\n",
           diff.tv_sec/3600, (diff.tv_sec/60), diff.tv_sec%60, diff.tv_usec);

   if (verbose_flag)
   {
      fprintf(stdout, "Weights: ");
      dvprint(stdout, w, train->dim);
   }

   free_dvector(w, 1, train->dim);

   if (std_flag)
   {
      free_dvector(mean, 1, train->dim+1);
      free_dvector(var, 1, train->dim+1);
   }

   if (ftest)
   {
      free_dvector(test->y, 1, test->n);
      free_dmatrix(test->X, 1, test->n, 1, test->dim);
      free(test);
   }

   free_dvector(train->y, 1, train->n);
   free_dmatrix(train->X, 1, train->n, 1, train->dim);
   free(train);

   return 0;
}
Ejemplo n.º 23
0
int main(int argc, char **argv)
{
    char input_file_name[1024];
    char model_file_name[1024];
    const char *error_msg;
    /*
     * Some bookkeeping variables for MPI. The 'rank' of a process is its numeric id
     * in the process pool. For example, if we run a program via `mpirun -np 4 foo', then
     * the process ranks are 0 through 3. Here, N and size are the total number of processes 
     * running (in this example, 4).
    */

    
    start_t = time(NULL);     
    MPI_Init(&argc, &argv);               // Initialize the MPI execution environment
    MPI_Comm_rank(MPI_COMM_WORLD, &param.rank); // Determine current running process
    MPI_Comm_size(MPI_COMM_WORLD, &param.size); // Total number of processes
    //double N = (double) size;             // Number of subsystems/slaves for ADMM
    if (param.rank==param.root)
        printf ("Number of subsystems: %d \n", param.size);
    
    parse_command_line(argc, argv, input_file_name, model_file_name);
    // Read the meta data
    bprob.read_metadata(input_file_name);
    bprob.set_bias(bias);
    error_msg = block_check_parameter(&bprob,&param);
    
    if(error_msg)
    {
        fprintf(stderr,"Error: %s\n",error_msg);
        exit(1);
    }
    
    if (param.rank==param.root)
    {    
        if (param.solver_type == L2R_L2LOSS_SVC)
            printf("ADMM + Primal trust region Newton's method for L2 loss SVM:\n");
        else if (param.solver_type == L2R_L2LOSS_SVC_DUAL)
            printf("ADMM + Dual coordinate descent for L2 loss SVM: \n");
        else if (param.solver_type ==  L2R_L1LOSS_SVC_DUAL)
            printf("ADMM + Dual coordinate descent for L1 loss SVM:\n");
        else
            printf("Not supported. \n"); 
    }
    
    srand(1);
    // Now read the local data 
    problem  * prob = read_problem(&bprob, &param);
    
    
    if(flag_cross_validation)
        do_cross_validation(prob);
    else
    {
        model_=block_train(prob, &param);   
        save_model(model_file_name, model_);  
        free_and_destroy_model(&model_);
    }
    destroy_param(&param);
    MPI_Finalize(); 
    return 0;
}
Ejemplo n.º 24
0
int main(int argc, char **argv)
{
	#ifdef WIN32
		// Send all reports to STDOUT
		_CrtSetReportMode( _CRT_WARN, _CRTDBG_MODE_FILE );
		_CrtSetReportFile( _CRT_WARN, _CRTDBG_FILE_STDOUT );
		_CrtSetReportMode( _CRT_ERROR, _CRTDBG_MODE_FILE );
		_CrtSetReportFile( _CRT_ERROR, _CRTDBG_FILE_STDOUT );
		_CrtSetReportMode( _CRT_ASSERT, _CRTDBG_MODE_FILE );
		_CrtSetReportFile( _CRT_ASSERT, _CRTDBG_FILE_STDOUT );

		// enable the options
		SET_CRT_DEBUG_FIELD( _CRTDBG_DELAY_FREE_MEM_DF );
		SET_CRT_DEBUG_FIELD( _CRTDBG_LEAK_CHECK_DF );
	#endif
		
	printf("int %d, short int %d, char %d, double %d, float %d, node %d\n",sizeof(int),sizeof(short int), sizeof(char), sizeof(double), sizeof(float), sizeof(svm_node));

	char input_file_name[FILENAME_LEN];    
	char model_file_name[FILENAME_LEN];
	const char *error_msg;

	parse_command_line(argc, argv, input_file_name, model_file_name);
    read_problem(input_file_name);
	param.modelFile = model_file_name;

	printf ("Finish reading input files!\n");

	error_msg = svm_check_parameter(&prob,&param);	

	#ifdef WIN32
		assert(_CrtCheckMemory());
	#endif

	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		exit(1);
	}

    double duration;
	double start = getRunTime();
	if(cross_validation)
	{
		do_cross_validation();
	}
	else
	{
		printf("kernel: %d\n",param.kernel_type);
		model = svm_train(&prob,&param);
        double finish = getRunTime();	
        duration = (double)(finish - start);

    #ifdef WIN32
		assert(_CrtCheckMemory());
	#endif

		svm_save_model(model_file_name,model);
		svm_destroy_model(model);
	}
	
	printf("CPU Time = %f second\n", duration);
    FILE* fModel = fopen(model_file_name, "a+t");					// append mode
	fprintf(fModel, "CPU Time = %f second\n", duration);
	fclose(fModel);
	    
    svm_destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);

	#ifdef WIN32
		assert(_CrtCheckMemory());
	#endif

    return 0;
}
Ejemplo n.º 25
0
int main(int argc, char **argv)
{
	//set the mpi settings
	int threadprovided;
	MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &threadprovided);
	if(threadprovided != MPI_THREAD_MULTIPLE)
	{
		printf("MPI multiple thread isn't provided!\n");
		fflush(stdout);
		mpi_exit(1);
	}
	int current_rank = mpi_get_rank();
	int nr_ranks = mpi_get_size();
	param.nr_ranks = nr_ranks;
	
	char hostname[1024];
	int hostname_len;
	MPI_Get_processor_name(hostname, &hostname_len);
    printf("processor name: %s, number of processed: %d, rank: %d\n", hostname, nr_ranks, current_rank);
	fflush(stdout);
	//
	int global_l;
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;
	parse_command_line(argc, argv, input_file_name, model_file_name);
	
	//set the number of threads for the shared-memory system
	int nr_threads = param.thread_count;
	int max_thread_count = omp_get_max_threads();

	if(nr_threads > max_thread_count)
	{
		printf("[rank %d], please enter the correct number of threads: 1~%d\n", current_rank, max_thread_count);
		mpi_exit(1);
	}
	omp_set_num_threads(nr_threads);

	//set the cpu affnity
	/*int ithread, err, cpu;
	cpu_set_t cpu_mask;
#pragma omp parallel private(ithread, cpu_mask, err, cpu)
	{
		ithread = omp_get_thread_num();
		CPU_ZERO(&cpu_mask);//set mask to zero
		CPU_SET(ithread, &cpu_mask);//set mask with ithread
		err = sched_setaffinity((pid_t)0, sizeof(cpu_mask), &cpu_mask);
		cpu = sched_getcpu();
		printf("thread_id %d on CPU %d\n", ithread, cpu);
	}*/
	//now, read the problem from the input file
	read_problem(input_file_name);
	error_msg = rksvm_check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		mpi_exit(1);
	}

	//distributed code
	global_l = prob.l;
	mpi_allreduce(&global_l, 1, MPI_INT, MPI_SUM);//MPI_INT :int;MPI_SUM:sum
	prob.global_l = global_l;
	
	printf("#local instances = %d, #global instances = %d\n", prob.l, prob.global_l);
	fflush(stdout);

	if(current_rank==0){
	puts("Start to train!");
	}
	model = rksvm_train(&prob,&param);
	if(rksvm_save_model(model_file_name,model))
	{
		fprintf(stderr,"[rank %d] can't save model to file %s\n",mpi_get_rank(), model_file_name);
		mpi_exit(1);
	}
	rksvm_free_and_destroy_model(&model);
	free(prob.y);
	free(prob.x);
	free(prob.query);
	free(x_space);
	free(prob.length_of_each_rksvm_node);
	free(line);

	MPI_Finalize();
	return 0;
}
void parse_command_line(int argc, char **argv)
{
	int i;
	void (*print_func)(const char*) = NULL;	// default printing to stdout
	char input_file_name[1024];
	char test_file_name[1024];

	// default values
	param.solver_type = L2R_L2LOSS_SVC_DUAL;
	param.C = 1;
	param.eps = INF; // see setting below
	param.nr_weight = 0;
	param.weight_label = NULL;
	param.weight = NULL;
	bias = -1;

	// parse options
	for(i=1;i<argc;i++)
	{
		if(argv[i][0] != '-') break;
		if(++i>=argc)
		  exit_with_help();
		switch(argv[i-1][1])
		  {
		  case 'b':
		    flag_predict_probability = atoi(argv[i]);
		    break;

		  case 's':
		    param.solver_type = atoi(argv[i]);
		    break;
		    
		  case 'c':
		    param.C = atof(argv[i]);
		    break;
		    
		  case 'e':
		    param.eps = atof(argv[i]);
		    break;
		    
		  case 'B':
		    bias = atof(argv[i]);
		    break;

		  case 'w':
		    ++param.nr_weight;
		    param.weight_label = (int *) realloc(param.weight_label,sizeof(int)*param.nr_weight);
		    param.weight = (double *) realloc(param.weight,sizeof(double)*param.nr_weight);
		    param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
		    param.weight[param.nr_weight-1] = atof(argv[i]);
		    break;
		    
		  case 'q':
		    print_func = &print_null;
		    i--;
		    break;
		    
		  case 'r':
		    nb_runs = atoi(argv[i]);            
		    break;

		  case 'l':
		    trnsz = atoi(argv[i]);            
		    break;

		  default:
		    fprintf(stderr,"unknown option: -%c\n", argv[i-1][1]);
		    exit_with_help();
		    break;
		  }
	}
	
	set_print_string_function(print_func);
	
	// determine filenames
	if(i+2>=argc)
		exit_with_help();
	
	printf("reading train file %s\n",argv[i]);
	strcpy(input_file_name, argv[i]);
	prob=read_problem(input_file_name);

	printf("reading test file %s\n",argv[i+1]);
	strcpy(test_file_name, argv[i+1]);
	tprob=read_problem(test_file_name);

	output = fopen(argv[i+2],"a");
	if(output == NULL)
	{
		fprintf(stderr,"can't open output file %s\n",argv[i+2]);
		exit(1);
	}

	if(param.eps == INF)
	  {
	    if(param.solver_type == L2R_LR || param.solver_type == L2R_L2LOSS_SVC)
	      param.eps = 0.01;
	    else if(param.solver_type == L2R_L2LOSS_SVC_DUAL || param.solver_type == L2R_L1LOSS_SVC_DUAL || param.solver_type == MCSVM_CS)
	      param.eps = 0.1;
	    else if(param.solver_type == L1R_L2LOSS_SVC || param.solver_type == L1R_LR)
	      param.eps = 0.01;
	  }
}
Ejemplo n.º 27
0
//---------------------------- global variables -------------------------------
int main(int argc, char **argv)
{
	char input_file_name[1024];
	char model_file_name[1024];
	const char *error_msg;
	
#ifdef FIGURE56
	char test_file_name[1024];
	parse_command_line(argc, argv, input_file_name, test_file_name);
#else
	parse_command_line(argc, argv, input_file_name, model_file_name);//initialize global struct param, according to commond line 
	//_parse_command_line(argc, argv, input_file_name, model_file_name);//initialize global struct param, according to commond line 
#endif
	read_problem(input_file_name);//get all possible information about the train file into global struct prob
#ifdef FIGURE56
	read_problem_test(test_file_name);
#endif
	error_msg = check_parameter(&prob,&param);

	if(error_msg)
	{
		fprintf(stderr,"ERROR: %s\n",error_msg);
		exit(1);
	}
	//	struct model
//{
//	struct parameter param;
//	int nr_class;		/* number of classes */
//	int nr_feature;
//	double *w;
//	int *label;		/* label of each class */
//};
//	model_=train(&prob, &param);
//--------apply memory for V matrix--------------
	int i=0;
	double * p = Malloc(double,param.col_size * prob.l);
	//srand( (unsigned)time( NULL ) );  //种子函数
	for (i=0;i<param.col_size * prob.l;i++)
	{		
		p[i]=rand()/(RAND_MAX+1.0);  //产生随机数的函数
		//p[i]=rand();
	}
	double ** v_pp = Malloc(double* ,prob.l);
	param.v_pp = v_pp;
	
	for (i=0;i<prob.l;i++)
		param.v_pp[i] = &p[param.col_size * i];
	model_=_train(&prob, &param);

#ifdef FIGURE56
#else
	if(save_model(model_file_name, model_))
	{
		fprintf(stderr,"can't save model to file %s\n",model_file_name);
		exit(1);
	}
#endif
	free_and_destroy_model(&model_);
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(prob.query);
	free(x_space);
	////////free the variable
	free(v_pp);
	free(p);
#ifdef FIGURE56
	free(probtest.y);
	free(probtest.x);
	free(x_spacetest);
#endif
	free(line);
	return 0;
}
Ejemplo n.º 28
0
void merge_problems(const char *srcs[], const int num_srcs, INT64* offsets, const char *output_filename, char training, INT64 *error_code) { 
    int i, j;
    const double bias = -1;
    SVMProblem *svmproblems = Malloc(SVMProblem, num_srcs);
    FILE *fp = NULL;

    /**
     * error_code:
     * 0	no error
     * > 0	input format error. The error_code value
     * 	indicates the line number.
     * -1	can not open file
     * -2	memory exhausted
     * -3	input files contain different numbsers of instances
     * -4   no file given
     */

    if(num_srcs <= 0) {
        *error_code = -4;
        return;
    }

    for(i=0; i < num_srcs; i++)
    {
        svmproblems[i] = read_problem(srcs[i], bias, error_code);
        if(*error_code != 0) {
            switch (*error_code) {
                case -1:
                    fprintf(stderr,"ERROR: Cannot open input file: %s\n", srcs[i]);
                    break;
                case -2:
                    fprintf(stderr,"ERROR: Memory exhausted when reading %s\n", srcs[i]);
                    break;
                default: /* error_code  > 0 input format error*/
                    fprintf(stderr,"ERROR: input format error at line %ld in %s\n", (long)*error_code, srcs[i]);
                    break;
            }
            return;
        }
    }


    // Overwrite offsets
    if(training) {
        offsets[0] = svmproblems[0].prob.n;
        for(i = 1; i < num_srcs; i++)
            offsets[i] = offsets[i-1] + svmproblems[i].prob.n;
    }

    // Make sure # of instances are all equal.
    for(i = 1; i < num_srcs; i++)
    {
        if(svmproblems[i].prob.l != svmproblems[i-1].prob.l)
        {
            *error_code = -3;
            fprintf(stderr,"ERROR: #insts in %s = %ld, but #insts in %s = %ld\n",
                    srcs[i], (long)svmproblems[i].prob.l, srcs[i-1], (long)svmproblems[i-1].prob.l);
            return;
        }
    }

    fp = fopen(output_filename, "w");
    if(fp == NULL)
    {
        *error_code = -1;
        fprintf(stderr,"ERROR: Cannot open output file: %s \n", srcs[i]);
        return;
    }

    for(j = 0; j < svmproblems[0].prob.l; j++)
    {
        INT64 base = 0;

        fprintf(fp, "%g", svmproblems[0].prob.y[j]);
        for(i = 0; i < num_srcs; i++)
        {
            struct feature_node* node;

            for(node = svmproblems[i].prob.x[j]; node->index != -1; node++)
            {
                INT64 index = base+node->index;
                if(index <= offsets[i])
                    fprintf(fp, " %ld:%.17g", (long)index, node->value);
                else
                    break;
            }
            base = offsets[i];
        }
        fprintf(fp,"\n");
    }
    fclose(fp);

    for(i = 0; i < num_srcs; i++)
        freeSVMProblem(svmproblems[i]);
}
Ejemplo n.º 29
0
int main(int ac, char **av) {
    int val = 0;
    int i;
    wctproblem problem;
    wctproblem_init(&problem);
    CCcheck_val(val, "Failed in wctproblem_init");
    wctparms *parms = &(problem.parms);
    wctdata *pd = &(problem.root_pd);
    val = program_header(ac, av);
    CCcheck_val(val, "Failed in programheader");
    CCutil_start_timer(&(problem.tot_cputime));
    double start_time = CCutil_zeit();
    wctdata_init(pd);
    pd->id = 0;
    problem.nwctdata = 1;
    val = parseargs(ac, av, parms);
    problem.real_time = getRealTime();

    if (val) {
        goto CLEAN;
    }

    get_problem_name(pd->pname, parms->jobfile);

    if (dbg_lvl() > 1) {
        printf("Debugging turned on\n");
    }

    fflush(stdout);
    /** Reading and preprocessing the data */
    val  = read_problem(parms->jobfile, &(pd->njobs), &(problem.duration),
                        &(problem.weight));
    pd->nmachines = parms->nmachines;
    CCcheck_val(val, "read_adjlist failed");
    pd->orig_node_ids = (int *)CC_SAFE_MALLOC(pd->njobs, int);
    CCcheck_NULL_2(pd->orig_node_ids, "No memory to allocated orig_node_ids\n");

    for (i = 0; i < pd->njobs; i++) {
        pd->orig_node_ids[i] = i;
    }

    Preprocessdata(&problem, pd);
    printf("Reading and preprocessing of the data took %f seconds\n",
           CCutil_zeit() - start_time);
    /** Computing initial lowerbound */
    CCutil_start_timer(&(problem.tot_lb));
    problem.global_lower_bound = lowerbound_eei(pd->jobarray, pd->njobs,
                                 pd->nmachines);
    problem.global_lower_bound = CC_MAX(problem.global_lower_bound,
                                        lowerbound_cp(pd->jobarray, pd->njobs, pd->nmachines));
    problem.global_lower_bound = CC_MAX(problem.global_lower_bound,
                                        lowerbound_cw(pd->jobarray, pd->njobs, pd->nmachines));
    CCutil_stop_timer(&(problem.tot_lb), 0);
    printf("Computing lowerbound EEI, CP and CW took %f seconds\n",
           problem.tot_lb.cum_zeit);
    /** Construction Pricersolver at the root node */
    CCutil_start_resume_time(&(problem.tot_build_dd));
    pd->solver = newSolver(pd->duration, pd->weights, pd->releasetime, pd->duetime,
                           pd->njobs, pd->H_min, pd->H_max);
    CCutil_suspend_timer(&(problem.tot_build_dd));

    /** Construct Feasible solutions */
    if (parms->nb_feas_sol > 0) {
        construct_feasible_solutions(&problem);
    }

    /** Compute Schedule with Branch and Price */
    compute_schedule(&problem);

    problem.real_time = getRealTime() - problem.real_time;
    CCutil_stop_timer(&(problem.tot_cputime), 0);
    /** Print all the information to screen and csv */
    if (problem.parms.print) {
        print_to_csv(&problem);
    }

    print_to_screen(&problem);

CLEAN:
    wctproblem_free(&problem);
    return val;
}
Ejemplo n.º 30
0
Archivo: train.cpp Proyecto: fgtlss/sol
//void copy_parameter(param_temp,&param)
//{
//	param_t
//}
// Interface function of matlab
// now assume prhs[0]: label prhs[1]: features
int main(int argc, char **argv)
{
	const char *error_msg;
	// fix random seed to have same results for each run
	// (for cross validation)
	srand(1);

	char input_file_name[1024];
	char model_file_name[1024];
	char param_file_name[1024];

    
	int n_flag = 0;
	parse_command_line(argc, argv, input_file_name, model_file_name,n_flag,param_file_name);
	char char_para;
	int length_param = 0;
	double *para_entry; // 
	//n_flag = 1;
	//int para_B[20] = {2, 3, 4, 5, 6, 7, 8, 9, 10, 12,14,16, 18, 20, 25, 30,35,40,45,50};
	int para_B[40] = {2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 24, 25, 26, 30,32, 35, 38, 40, 42, 45, 48, 50, 55, 60, 65,70,75,80, 85, 90, 95, 100, 105, 110, 115, 120};
	//int para_B[32] = { 20, 24, 25, 26, 30,32, 35, 38, 40, 42, 45, 48, 50, 55,  60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280};

	if (n_flag==1)
	{
		read_parameter(param_file_name, para_entry, char_para, length_param);
	}

	read_problem(input_file_name);
	


	error_msg = check_parameter(&prob,&param);
//parameter *param_temp = new parameter[1];
//copy_parameter(param_temp,&param);
	if(error_msg)
	{
		fprintf(stderr,"Error: %s\n",error_msg);
		exit(1);
	}

	if(cross_validation_flag)
	{
		do_cross_validation();
	}
	else
	{
		if(n_flag==0)
		{
			model_ = FGM_train(&prob, &param);
			printf("training is done!\n");
			save_model_poly(model_file_name, model_);
			printf("model is saved!\n");
			destroy_model(model_);
		}
		else
		{
			int i;
			if (char_para=='C')
			{
				length_param = length_param;
			}else
			{
				length_param = 40;
			}
			for (i=0;i<length_param;i++)
			{
				char param_char[1000];
			    char model_file[1024];
				strcpy(model_file,model_file_name);
				if (char_para=='C')
				{
					param.C = para_entry[i];
					sprintf(param_char, "%.10lf ", para_entry[i]); 
					strcat(model_file,".c.");
					strcat(model_file,param_char);
				    model_=FGM_train(&prob, &param);
				}
				else
				{
					int B = para_B[i];
					param.B = B;
					sprintf(param_char, "%d ", param.B); 
					printf("%d\n ", param.B); 
					strcat(model_file,".B.");
					strcat(model_file,param_char);
				    model_=FGM_train(&prob, &param);
				}
				
				printf("training is done!\n");
				save_model_poly(model_file, model_);
				printf("model is saved!\n");
				destroy_model(model_);
				if(model_->feature_pair>600)
				{
					break;
				}
			}
		}

	}
	if (n_flag==1)
	{
		delete []para_entry;
	}
	
	destroy_param(&param);
	free(prob.y);
	free(prob.x);
	free(x_space);

}