Esempio n. 1
0
/* Try to compute a trajectory in the given region.
 *  l       Vector of length {params.N} that will receieve the left thruster
 *          values for each time step.
 *  r       Vector of length {params.N} that will receive the right thruster
 *          values for each time step.
 *  params  A structure containing the problem to solve.
 */
region_result_t compute_trajectory(double *l, double *r, region_params_t *params)
{
    lbfgsfloatval_t fx;
    lbfgs_parameter_t param;
    int i;
    int n = params->N;
    lbfgsfloatval_t *x = lbfgs_malloc(n*2);
    int ret = 0;

    if (!x) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        return 1;
    }

    for (i = 0; i < n*2; i++) {
        x[i] = 0;
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    param.linesearch = LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE;
    param.past = 100;
    param.delta = 1e-4;
    param.max_linesearch = 1000;
    param.m = 5;

    ret = lbfgs(n*2, x, &fx, evaluate, progress, params, &param);
    memcpy(l, x, sizeof(l[0])*n);
    memcpy(r, x+n, sizeof(r[0])*n);

    printf("\nL-BFGS optimization terminated with status code = %d\n", ret);
    printf("fx = %f\n", fx);
    printf("\n");

    printf("MATLAB variables:\n");
    printf("l = [ ");
    for (i = 0; i < n; i++) { printf("%f ", l[i]); }
    printf(" ];\nr = [ ");
    for (i = 0; i < n; i++) { printf("%f ", r[i]); }
    printf(" ];\n");

    printf("rx = [ ");
    for (i = 0; i < params->polycount; i++) { printf("%f ", params->poly[i].x); }
    printf(" ];\nry = [ ");
    for (i = 0; i < params->polycount; i++) { printf("%f ", params->poly[i].y); }
    printf(" ];\n");
    printf("parm = [ %f %f %f %f %f %f %f %f %f ];\n",
        params->p_0.x, params->p_0.y, params->v_0.x, params->v_0.y,
        params->d_0.x, params->d_0.y, params->omega_0, params->mass,
        params->radius);
    printf("dt = %f\n", params->dt);

    lbfgs_free(x);
    
    if (ret == 0 || ret == 1 || ret == 2) {
        return REGION_SOLVED;
    } else {
        return REGION_STUCK;
    }
}
Esempio n. 2
0
double Inference::get_bright_logprob(const int spot_index)
{
    active_spot_index = spot_index;
    // set E = 1 before optimize this spot
    frame.E[active_spot_index] = 1;
    
    int N = 3;
    double logp;
    double * x = new double[N];
    if(x==NULL)
    {
	std::cout<<"Allocating storage FAILED!"<< "\n";
	return -1;
    }

    for (int i=0; i<N; i++)
    {
	x[i] = 1.0;
    }
    lbfgs_parameter_t param;
    lbfgs_parameter_init(&param);
    param.m = 10;
    //param.epsilon = 1e-5;
    param.max_iterations = 20000;
    param.linesearch = LBFGS_LINESEARCH_BACKTRACKING_WOLFE;
    int status = lbfgs(N,x,&logp,inner_evaluate,inner_progress,this,&param);
    if (status == 0)
    {
	printf("Inner L-BFGS optimization terminated with status code = %d, logp=%f\n",status, logp);
	//getchar();
    }
    else
    {
	printf("Inner L-BFGS optimization terminated with status code = %d, logp=%f\n",status, logp);
	getchar();
    }

    // make sure to set E = 0 before exit to ensure that 
    // we don't the bright spot set. However, it will change
    // the parameters A, B, and phi.
    frame.E[active_spot_index] = 0;

    // double logp_piece;
    // double logp = 0.0;
    // for(std::vector<Evidence>::iterator iter = evidence_list.begin(); 
    // 	iter != evidence_list.end(); iter++)
    // {
    // 	logp_piece = 
    // 	    iter->s[active_spot_index] * log(frame.mu[active_spot_index])
    // 	    - frame.mu[active_spot_index];
    // 	logp = logp + logp_piece;
    // }
    return logp;
}
Esempio n. 3
0
void init_lbfgs_predicates( void ) 
{ 
  fcall3 = YAP_MkFunctor(YAP_LookupAtom("$lbfgs_callback_evaluate"), 3);
  fprogress8 = YAP_MkFunctor(YAP_LookupAtom("$lbfgs_callback_progress"), 8);

  //Initialize the parameters for the L-BFGS optimization.
  lbfgs_parameter_init(&param);


  YAP_UserCPredicate("optimizer_reserve_memory",optimizer_initialize,1);
  YAP_UserCPredicate("optimizer_run",optimizer_run,2);
  YAP_UserCPredicate("optimizer_free_memory",optimizer_finalize,0);

  YAP_UserCPredicate("optimizer_set_x",set_x_value,2);
  YAP_UserCPredicate("optimizer_get_x",get_x_value,2);
  YAP_UserCPredicate("optimizer_set_g",set_g_value,2);
  YAP_UserCPredicate("optimizer_get_g",get_g_value,2);

  YAP_UserCPredicate("optimizer_set_parameter",optimizer_set_parameter,2);
  YAP_UserCPredicate("optimizer_get_parameter",optimizer_get_parameter,2);
}  
Esempio n. 4
0
Matrix *runLbfgsOptim(Matrix *y, doubleVector *target, int L, int jobs, double epsilon) {
    int M = y->nrow;
    Matrix *w = createZeroMatrix(M, L);
    
    lbfgsfloatval_t fx;
    lbfgsfloatval_t *x = lbfgs_malloc(w->nrow*w->ncol);
    lbfgs_parameter_t param;

    if (x == NULL) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        exit(1);
    }

    /* Initialize the variables. */
    for (size_t i = 0; i < w->nrow; i++) {
        for (size_t j = 0; j < w->ncol;j++) {
            x[j*w->nrow + i] = getMatrixElement(w, i, j);
        }    
    }
    lbfgs_parameter_init(&param);
    param.epsilon = epsilon;
    param.m = 20;

    LbfgsInput inp;
    inp.target = target;
    inp.y = y;
    inp.jobs = jobs;
    inp.L = w->ncol;
    int ret = lbfgs(w->nrow*w->ncol, x, &fx, evaluate, progress, (void*)&inp, &param);

    printf("L-BFGS optimization terminated with status code = %d\n", ret);
    printf("  fx = %f\n", fx);
    Matrix *w_opt = formMatrixFromFloatVal(x, w->nrow, w->ncol);
    
    lbfgs_free(x);

    return(w_opt);
}
Esempio n. 5
0
int main(int argc, char *argv[])
{
    int i, ret = 0;
    int n = 2;
    lbfgsfloatval_t fx;
    lbfgsfloatval_t *x = lbfgs_malloc(n);
    lbfgs_parameter_t param;

    if (!x) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        return 1;
    }

    for (i = 0; i < n; i++) {
        x[i] = 10;
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    //param.linesearch = LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE;

    ret = lbfgs(n, x, &fx, evaluate, progress, NULL, &param);

    printf("L-BFGS optimization terminated with status code = %d\n", ret);
    printf("  fx = %f; ", fx);
    for (i = 0; i < n; i ++){
        printf("x[%d] = %f; ", i, x[i]);
    }
    printf("\n");

    /* Answer according to Wolfram Alpha:
     *  1.00607 x^2 - 0.363643 x + 0.554
     */

    lbfgs_free(x);
    return 0;
}
Esempio n. 6
0
File: RAE.cpp Progetto: zerkh/RAE
void RAE::training()
{
	x = lbfgs_malloc(getRAEWeightSize());
	Map<MatrixLBFGS>(x, getRAEWeightSize(), 1).setRandom();
	lbfgs_parameter_t param;
	iterTimes = atoi(para->getPara("IterationTime").c_str());

	loadTrainingData();
	lbfgs_parameter_init(&param);
	param.max_iterations = iterTimes;

	lbfgsfloatval_t fx = 0;
	int ret;

	ret = lbfgs(getRAEWeightSize(), x, &fx, RAELBFGS::evaluate, RAELBFGS::progress, this, &param);

	cout << "L-BFGS optimization terminated with status code = " << ret << endl;
	cout << " fx = " << fx << endl;

	updateWeights(x);
	logWeights(para);
	trainingData.clear();
	lbfgs_free(x);
}
Eigen::VectorXf minimizeLBFGS( EnergyFunction & efun, int restart, bool verbose ) {
	Eigen::VectorXf x0 = efun.initialValue();
	const int n = x0.rows();
	
	lbfgsfloatval_t *x = lbfgs_malloc(n);
	if (x == NULL) {
		printf("ERROR: Failed to allocate a memory block for variables.\n");
		return x0;
	}
	std::copy( x0.data(), x0.data()+n, x );
	
	lbfgs_parameter_t param;
	lbfgs_parameter_init(&param);
	// You might want to adjust the parameters to your problem
	param.epsilon = 1e-6;
	param.max_iterations = 50;
	
	double last_f = 1e100;
	int ret;
	for( int i=0; i<=restart; i++ ) {
		lbfgsfloatval_t fx;
		ret = lbfgs(n, x, &fx, evaluate, verbose?progress:NULL, &efun, &param);
		if( last_f > fx )
			last_f = fx;
		else
			break;
	}
	
	if ( verbose ) {
		printf("L-BFGS optimization terminated with status code = %d\n", ret);
	}
	
	std::copy( x, x+n, x0.data() );
	lbfgs_free(x);
	return x0;
}
Esempio n. 8
0
void EstimatePairModelMAP(numeric_t *x, numeric_t *lambdas, alignment_t *ali,
    options_t *options) {
    /* Computes Maximum a posteriori (MAP) estimates for the parameters of 
       and undirected graphical model by L-BFGS */

    /* Start timer */
    gettimeofday(&ali->start, NULL);

    /* Initialize L-BFGS */
    lbfgs_parameter_t param;
    lbfgs_parameter_init(&param);
    param.epsilon = 1E-3;
    param.max_iterations = options->maxIter; /* 0 is unbounded */

    /* Array of void pointers provides relevant data structures */
    void *d[3] = {(void *)ali, (void *)options, (void *)lambdas};

    /* Estimate parameters by optimization */
    static lbfgs_evaluate_t algo;
    switch(options->estimatorMAP) {
        case INFER_MAP_PLM:
            algo = PLMNegLogPosterior;
            break;
        case INFER_MAP_PLM_GAPREDUCE:
            algo = PLMNegLogPosteriorGapReduce;
            break;
        case INFER_MAP_PLM_BLOCK:
            algo = PLMNegLogPosteriorBlock;
            break;
        case INFER_MAP_PLM_DROPOUT:
            algo = PLMNegLogPosteriorDO;
            break;
        default:
            algo = PLMNegLogPosterior;
    }

    if (options->zeroAPC == 1) fprintf(stderr,
            "Estimating coupling hyperparameters le = 1/2 inverse variance\n");

    int ret = 0;
    lbfgsfloatval_t fx;
    ret = lbfgs(ali->nParams, x, &fx, algo, ReportProgresslBFGS,
        (void*)d, &param);
    fprintf(stderr, "Gradient optimization: %s\n", LBFGSErrorString(ret));

    /* Optionally re-estimate parameters with adjusted hyperparameters */
    if (options->zeroAPC == 1) {
        /* Form new priors on the variances */
        ZeroAPCPriors(ali, options, lambdas, x);

        /* Reinitialize coupling parameters */
        for (int i = 0; i < ali->nSites - 1; i++)
            for (int j = i + 1; j < ali->nSites; j++)
                for (int ai = 0; ai < ali->nCodes; ai++)
                    for (int aj = 0; aj < ali->nCodes; aj++)
                        xEij(i, j, ai, aj) = 0.0;

        /* Iterate estimation with new hyperparameter estimates */
        options->zeroAPC = 2;
        ret = lbfgs(ali->nParams, x, &fx, algo,
            ReportProgresslBFGS, (void*)d, &param);
        fprintf(stderr, "Gradient optimization: %s\n", LBFGSErrorString(ret));
    }
}
Esempio n. 9
0
int main(int argc, char *argv[]) {
  char *program_name = argv[0];
  double l2_sigma_sq = 0.0;
  int grafting = 0;
  int grafting_light = 0;

  lbfgs_parameter_t params;
  lbfgs_parameter_init(&params);
  params.past = 1;
  params.delta = 1e-7;

  int ch;
  while ((ch = getopt_long(argc, argv, "", longopts, NULL)) != -1) {
    switch (ch) {
    case OPTION_FTOL:
      params.ftol = str_to_double(optarg);
      break;
    case OPTION_GTOL:
      params.gtol = str_to_double(optarg);
      break;
    case OPTION_GRAFTING:
      grafting = str_to_int(optarg);
      break;
    case OPTION_GRAFTING_LIGHT:
      grafting_light = str_to_int(optarg);
      break;
    case OPTION_L1:
      params.orthantwise_c = str_to_double(optarg);
      break;
    case OPTION_L2:
      l2_sigma_sq = str_to_double(optarg);
      break;
    case OPTION_LINESEARCH:
      if (strcmp(optarg, "armijo") == 0)
        params.linesearch = LBFGS_LINESEARCH_BACKTRACKING_ARMIJO;
      else if (strcmp(optarg, "backtracking") == 0)
        params.linesearch = LBFGS_LINESEARCH_BACKTRACKING;
      else if (strcmp(optarg, "wolfe") == 0)
        params.linesearch = LBFGS_LINESEARCH_BACKTRACKING_WOLFE;
      else if (strcmp(optarg, "strong_wolfe") == 0)
        params.linesearch = LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE;
      else {
        usage(program_name);
        return 1;
      }
      break;
    case OPTION_MINSTEP:
      fprintf(stderr,"backtracking\n");
      params.min_step = str_to_double(optarg);
      break;
    case OPTION_MAXSTEP:
      params.max_step = str_to_double(optarg);
      break;
    case '?':
    default:
      usage(program_name);
      return 1;
    }
  }

  argc -= optind;
  argv += optind;

  if (argc != 0 && argc != 1) {
    usage(program_name);
    return 1;
  }

  if (grafting && grafting_light) {
    fprintf(stderr, "Grafting and grafting-light cannot be used simultaneously...");
    return 1;
  }

  if ((grafting || grafting_light) && params.orthantwise_c == 0.) {
    fprintf(stderr, "Grafting requires a l1 norm coefficient...");
    return 1;
  }

  fprintf(stderr, "l1 norm coefficient: %.4e\n", params.orthantwise_c); 
  fprintf(stderr, "l2 prior sigma^2: %.4e\n\n", l2_sigma_sq);

  dataset_t ds;
  
  int fd = 0;
  if (argc == 1 && (fd = open(argv[0], O_RDONLY)) == -1) {
    fprintf(stderr, "Could not open %s\n", argv[0]);
    return 1;
  }

  int r = read_tadm_dataset(fd, &ds);

  if (r != TADM_OK) {
    fprintf(stderr, "Error reading data...\n");
    return 1;
  }
  
  fprintf(stderr, "Features: %zu\n", ds.n_features);
  fprintf(stderr, "Contexts: %zu\n\n", ds.n_contexts);

  if (params.orthantwise_c != 0.0) {
    params.orthantwise_end = ds.n_features;
    // l1 prior only works with backtracking linesearch.
    params.linesearch = LBFGS_LINESEARCH_BACKTRACKING;
  }

  model_t model;
  if (grafting || grafting_light)
    model_new(&model, ds.n_features, true);
  else
    model_new(&model, ds.n_features, false);

  fprintf(stderr, "Iter\t-LL\t\txnorm\t\tgnorm\n\n");

  if (grafting)
    r = maxent_lbfgs_grafting(&ds, &model, &params, l2_sigma_sq, grafting);
  else if (grafting_light)
    r = maxent_lbfgs_grafting_light(&ds, &model, &params, l2_sigma_sq,
        grafting_light);
  else
    r = maxent_lbfgs_optimize(&ds, &model, &params, l2_sigma_sq);

  dataset_free(&ds);

  if (r != LBFGS_STOP && r != LBFGS_SUCCESS && r != LBFGS_ALREADY_MINIMIZED) {
    fprintf(stderr, "%s\n\n", err_to_string(lbfgs_errs, r));
    model_free(&model);
    return 1;
  }

  for (int i = 0; i < ds.n_features; ++i)
    printf("%.8f\n", model.params[i]);

  model_free(&model);

  return 0;
}
Esempio n. 10
0
 void setup(void) {
     lbfgs_parameter_init(&_params);
     //_params.max_step=0.1;
     _params.min_step=1e-320;
 }
Esempio n. 11
0
/**
 * @brief Use the open source implement of OWL-QN to optimize the feature weights.
 *
 * @param weights  feature weights
 */
void MStep_Trainer::OptimizeFeatureWeights_jp(vector<double>& weights)
{

    //if expect_count==0 L1 norm LBFGS error!!
    for(int i=0;i<this->m_scfg.phrase_rules_count;i++)
    {
        if(this->m_scfg.phrase_rules[i]->expect_count==0)
        {
            this->m_scfg.phrase_rules[i]->expect_count=1e-20;
            this->m_scfg.N_cnt[3]+=1e-20;
        }
    }

    cout<<"begin optimize the lambda!"<<endl;
    int N = FeatureManager::GetSingleton().GetFeatureCount();
    cout<<"the number of features is: "<<N<<endl;
    int  ret = 0;
    lbfgsfloatval_t fx=0;
    lbfgsfloatval_t *x = lbfgs_malloc(N);
    lbfgs_parameter_t param;

    if (x == NULL) {
        cout<<"ERROR: Failed to allocate a memory block for variables"<<endl;
    }

    /* Initialize the variables. */
    if(weights.size()>0)
    {
        for(int i=0;i<N;i++)
        {
            x[i]= weights[i];
        }
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    /*param.linesearch = LBFGS_LINESEARCH_BACKTRACKING;*/

    /*
       Start the L-BFGS optimization; this will invoke the callback functions
       evaluate() and progress() when necessary.
       */

    //we pre compute some value that used in gradient function 
    LBFGS_Param* lbfgs_param=new LBFGS_Param();
    lbfgs_param->scfg=&this->m_scfg;
    //int thread_count = this->config->configfile->read<int>("thread_count");
    double L2_coefficient=this->m_pconfig->configfile->read<double>("L2_coefficient");

    lbfgs_param->L2_coefficient= L2_coefficient;
    PreComputeLBFGS_Param(lbfgs_param,N);

    int max_iteration = this->m_pconfig->configfile->read<int>("max_iteration");
    int past=this->m_pconfig->configfile->read<int>("past");
    double delta=this->m_pconfig->configfile->read<double>("delta");
    int linesearch =this->m_pconfig->configfile->read<int>("linesearch");
    double gtol =this->m_pconfig->configfile->read<double>("gtol");
    double epsilon=this->m_pconfig->configfile->read<double>("epsilon");
    double L1_coefficient = this->m_pconfig->configfile->read<double>("L1_coefficient");
    int orthantwise_start = this->m_pconfig->configfile->read<int>("orthantwise_start");

    param.orthantwise_start = orthantwise_start;
    param.orthantwise_end = N-1;
    param.orthantwise_c=L1_coefficient;
    param.linesearch = linesearch;
    param.max_linesearch = 20;
    param.max_iterations =max_iteration;
    param.past=past;
    param.delta=delta;
    param.epsilon=epsilon;
    param.gtol=gtol;

    ret = lbfgs(N, x, &fx, evaluate_main, progress, static_cast<void*>(lbfgs_param), &param);

    Loger::LogTime();
    Loger::mylogfile<<"L-BFGS optimization terminated with status code = "<<ret<<endl;
    Loger::mylogfile<<"L-BFGS optimization log value fx = "<<fx<<endl;
    cout<<"L-BFGS optimization terminated with status code = "<<ret<<endl;
    cout<<"L-BFGS optimization log value fx = "<<fx<<endl;


    weights.clear();
    weights.resize(N,0);

    for(int i=0;i<N;i++)
    {
        weights[i]=x[i];
    }
    lbfgs_free(x);
}
Esempio n. 12
0
void parse_command_line(int argc, char* argv[], 
						boost::shared_ptr<lbfgs_parameter_t>& param,
						boost::shared_ptr<derivative_parameter_t>& derivative_param){

	bool bforget = false;
	namespace po = boost::program_options;
	po::options_description desc("Program options for train");
	desc.add_options()
		("help,h", "produce help message")
		("feat,f", po::value<std::string>()->required(), "feature files for training")
		("l1start", po::value<int>()->default_value(1), "start feature index for l1 norm")
		("l1end", po::value<int>()->default_value(-1), "end feature index for l1 norm")
		("l1c", po::value<double>()->default_value(1.0f), "default value for regularization parameter")
		("l2start", po::value<int>()->default_value(1), "start feature index for l2 norm")
		("l2end", po::value<int>()->default_value(-1), "end feature index for l2 norm")
		("l2c",  po::value<double>()->default_value(1.0f), "regularization parameter value for l2 norm")
		("iter", po::value<int>()->default_value(40), "number of iterations for optimization")
		("output", po::value<std::string>()->default_value("output.model"), "file store model");

	po::variables_map vm;

	try {
		po::store(po::parse_command_line(argc, argv, desc), vm);
	} catch(std::exception& e){
		std::cerr << e.what() << std::endl;
	}

	try {
		po::notify(vm);
	} catch(std::exception& e){
		std::cerr << e.what() << std::endl;
		bforget = true;
	}

	if( vm.count("help") ){
		std::cout << desc << std::endl;
		return;
	}

	if( bforget ){
		std::exit(-1);
	}
	
	int l1start = vm["l1start"].as<int>(), l1end = vm["l1end"].as<int>();
	int l2start = vm["l2start"].as<int>(), l2end = vm["l2end"].as<int>();
	double l1c = vm["l1c"].as<double>(), l2c = vm["l2c"].as<double>();
	int iter = vm["iter"].as<int>();
	std::string featfile = vm["feat"].as<std::string>(); 
	std::string	outmodel = vm["output"].as<std::string>();

	/* sanity check first */
	boost::filesystem::path p(featfile);
	if( !boost::filesystem::exists(p) ){
		
		std::cerr << "file " << featfile << " does not exist" << std::endl;
		std::abort();
	}
		
	if( !boost::filesystem::is_regular_file(p) ){
		std::cerr << "file " << p.filename() << " is not a regular file" << std::endl;
		std::abort();
	}

	boost::filesystem::path m(outmodel);
	if(!boost::filesystem::is_directory(m)){
		std::cerr << "output prefix must be a directory" << std::endl;
		std::abort();
	}

	if( (l1start >= l1end) || (l2start >= l2end) ){
		std::cerr << "end index must larget than start index" << std::endl;
		std::abort();
	}

	if( (l1c < 0 ) || (l2c < 0) ){
		std::cerr << "regularization parameter can not be negative" << std::endl;
		std::abort();
	}

	/* Start to print out the parameters */
	std::cout << "Feature range for L1 normalization : [" 
		<< l1start << ", " 
		<< l1end << "]"
		<< std::endl;

	std::cout << "Regularization parameter lambda_0  : " 
		<< l1c << std::endl;

	std::cout << "Feature range for L2 normalization : ["
		<< l2start << ", "
		<< l2end << "]"
		<< std::endl;

	std::cout << "Regularization parameter lambda_1  : "
		<< l2c << std::endl;

	std::cout << " **** Reading training data from   : " 
		<< featfile << std::endl;

	std::cout << " **** Train iterations             : "
		<< iter << std::endl;

	std::cout << " **** model will output to **      : "
		<< outmodel << std::endl;

	/* set up parameters for L-BFGS */
	lbfgs_parameter_init(param.get());
	param->linesearch = LBFGS_LINESEARCH_BACKTRACKING;
	if( abs(l1c) > 1e-3 ){
		param->orthantwise_c = l1c;
		param->orthantwise_start = l1start;
		param->orthantwise_end = l1end;
	}

	param->max_iterations = iter;

	/* set up parameters for function evaluation */
	derivative_param->l2start = l2start;
	derivative_param->l2end = l2end;
	derivative_param->l2c = l2c;
	derivative_param->featfile = featfile;
	derivative_param->outmodel = outmodel;
	derivative_param->l1start = l2start;
	derivative_param->l1end = l1end;
}