Exemplo n.º 1
0
/* Try to compute a trajectory in the given region.
 *  l       Vector of length {params.N} that will receieve the left thruster
 *          values for each time step.
 *  r       Vector of length {params.N} that will receive the right thruster
 *          values for each time step.
 *  params  A structure containing the problem to solve.
 */
region_result_t compute_trajectory(double *l, double *r, region_params_t *params)
{
    lbfgsfloatval_t fx;
    lbfgs_parameter_t param;
    int i;
    int n = params->N;
    lbfgsfloatval_t *x = lbfgs_malloc(n*2);
    int ret = 0;

    if (!x) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        return 1;
    }

    for (i = 0; i < n*2; i++) {
        x[i] = 0;
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    param.linesearch = LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE;
    param.past = 100;
    param.delta = 1e-4;
    param.max_linesearch = 1000;
    param.m = 5;

    ret = lbfgs(n*2, x, &fx, evaluate, progress, params, &param);
    memcpy(l, x, sizeof(l[0])*n);
    memcpy(r, x+n, sizeof(r[0])*n);

    printf("\nL-BFGS optimization terminated with status code = %d\n", ret);
    printf("fx = %f\n", fx);
    printf("\n");

    printf("MATLAB variables:\n");
    printf("l = [ ");
    for (i = 0; i < n; i++) { printf("%f ", l[i]); }
    printf(" ];\nr = [ ");
    for (i = 0; i < n; i++) { printf("%f ", r[i]); }
    printf(" ];\n");

    printf("rx = [ ");
    for (i = 0; i < params->polycount; i++) { printf("%f ", params->poly[i].x); }
    printf(" ];\nry = [ ");
    for (i = 0; i < params->polycount; i++) { printf("%f ", params->poly[i].y); }
    printf(" ];\n");
    printf("parm = [ %f %f %f %f %f %f %f %f %f ];\n",
        params->p_0.x, params->p_0.y, params->v_0.x, params->v_0.y,
        params->d_0.x, params->d_0.y, params->omega_0, params->mass,
        params->radius);
    printf("dt = %f\n", params->dt);

    lbfgs_free(x);
    
    if (ret == 0 || ret == 1 || ret == 2) {
        return REGION_SOLVED;
    } else {
        return REGION_STUCK;
    }
}
Exemplo n.º 2
0
void model_new(model_t *model, size_t n_params, bool f_restrict)
{
  model->params = lbfgs_malloc(n_params);
  model->n_params = n_params;

  if (f_restrict)
    model->f_restrict = bitvector_alloc(n_params);
  else
    model->f_restrict = NULL;
}
Exemplo n.º 3
0
int LbfgsWrap::run()
{
	lbfgsfloatval_t fx;
	if (m_x)
	{
		lbfgs_free(m_x);
	}
	m_x = lbfgs_malloc(m_unknow_x.rows() * m_unknow_x.cols());
	
	if (!m_x)
	{
		EAGLEEYE_ERROR("failed to allocate a memory block for variables. \n");
		return 1;
	}

	int rows = m_unknow_x.rows();
	int cols = m_unknow_x.cols();
	for (int i = 0; i < rows; ++i)
	{
		float* tr_data = m_unknow_x.row(i);
		for (int j = 0; j < cols; ++j)
		{
			m_x[i * cols + j] = tr_data[j];
		}
	}

	/*
        Start the L-BFGS optimization; this will invoke the callback functions
        evaluate() and progress() when necessary.
     */
	int ret = lbfgs(rows * cols,m_x,&fx,_evaluate,_progress,this,&m_param);

	for (int i = 0; i < rows; ++i)
	{
		float* tr_data = m_unknow_x.row(i);
		for (int j = 0; j < cols; ++j)
		{
			tr_data[j] = float(m_x[i * cols + j]);
		}
	}

	//report the result
	EAGLEEYE_INFO("L-BFGS optimization terminated with status code = %d\n", ret);
	EAGLEEYE_INFO("loss = %f",fx);

	return ret;
}
Exemplo n.º 4
0
 int run(int N) {
   lbfgsfloatval_t fx;
   lbfgsfloatval_t *m_x = lbfgs_malloc(N);
   if (m_x == NULL) {
       printf("ERROR: Failed to allocate a memory block for variables.\n");
       return 1;
   };
   /* Initialize the variables. */
   for (int i = 0;i < N; ++i) {
       m_x[i] = -1.2;
   };
   /*
       Start the L-BFGS optimization; this will invoke the callback functions
       evaluate() and progress() when necessary.
    */
   int ret = lbfgs(N, m_x, &fx, _evaluate, _progress, this, NULL);
   /* Report the result. */
   printf("L-BFGS optimization terminated with status code = %d\n", ret);
   printf("  fx = %f, x[0] = %f, x[1] = %f\n", fx, m_x[0], m_x[1]);
   return ret;
 };
Exemplo n.º 5
0
Matrix *runLbfgsOptim(Matrix *y, doubleVector *target, int L, int jobs, double epsilon) {
    int M = y->nrow;
    Matrix *w = createZeroMatrix(M, L);
    
    lbfgsfloatval_t fx;
    lbfgsfloatval_t *x = lbfgs_malloc(w->nrow*w->ncol);
    lbfgs_parameter_t param;

    if (x == NULL) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        exit(1);
    }

    /* Initialize the variables. */
    for (size_t i = 0; i < w->nrow; i++) {
        for (size_t j = 0; j < w->ncol;j++) {
            x[j*w->nrow + i] = getMatrixElement(w, i, j);
        }    
    }
    lbfgs_parameter_init(&param);
    param.epsilon = epsilon;
    param.m = 20;

    LbfgsInput inp;
    inp.target = target;
    inp.y = y;
    inp.jobs = jobs;
    inp.L = w->ncol;
    int ret = lbfgs(w->nrow*w->ncol, x, &fx, evaluate, progress, (void*)&inp, &param);

    printf("L-BFGS optimization terminated with status code = %d\n", ret);
    printf("  fx = %f\n", fx);
    Matrix *w_opt = formMatrixFromFloatVal(x, w->nrow, w->ncol);
    
    lbfgs_free(x);

    return(w_opt);
}
Exemplo n.º 6
0
int main(int argc, char *argv[])
{
    int i, ret = 0;
    int n = 2;
    lbfgsfloatval_t fx;
    lbfgsfloatval_t *x = lbfgs_malloc(n);
    lbfgs_parameter_t param;

    if (!x) {
        printf("ERROR: Failed to allocate a memory block for variables.\n");
        return 1;
    }

    for (i = 0; i < n; i++) {
        x[i] = 10;
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    //param.linesearch = LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE;

    ret = lbfgs(n, x, &fx, evaluate, progress, NULL, &param);

    printf("L-BFGS optimization terminated with status code = %d\n", ret);
    printf("  fx = %f; ", fx);
    for (i = 0; i < n; i ++){
        printf("x[%d] = %f; ", i, x[i]);
    }
    printf("\n");

    /* Answer according to Wolfram Alpha:
     *  1.00607 x^2 - 0.363643 x + 0.554
     */

    lbfgs_free(x);
    return 0;
}
Exemplo n.º 7
0
Arquivo: RAE.cpp Projeto: zerkh/RAE
void RAE::training()
{
	x = lbfgs_malloc(getRAEWeightSize());
	Map<MatrixLBFGS>(x, getRAEWeightSize(), 1).setRandom();
	lbfgs_parameter_t param;
	iterTimes = atoi(para->getPara("IterationTime").c_str());

	loadTrainingData();
	lbfgs_parameter_init(&param);
	param.max_iterations = iterTimes;

	lbfgsfloatval_t fx = 0;
	int ret;

	ret = lbfgs(getRAEWeightSize(), x, &fx, RAELBFGS::evaluate, RAELBFGS::progress, this, &param);

	cout << "L-BFGS optimization terminated with status code = " << ret << endl;
	cout << " fx = " << fx << endl;

	updateWeights(x);
	logWeights(para);
	trainingData.clear();
	lbfgs_free(x);
}
Exemplo n.º 8
0
Eigen::VectorXf minimizeLBFGS( EnergyFunction & efun, int restart, bool verbose ) {
	Eigen::VectorXf x0 = efun.initialValue();
	const int n = x0.rows();
	
	lbfgsfloatval_t *x = lbfgs_malloc(n);
	if (x == NULL) {
		printf("ERROR: Failed to allocate a memory block for variables.\n");
		return x0;
	}
	std::copy( x0.data(), x0.data()+n, x );
	
	lbfgs_parameter_t param;
	lbfgs_parameter_init(&param);
	// You might want to adjust the parameters to your problem
	param.epsilon = 1e-6;
	param.max_iterations = 50;
	
	double last_f = 1e100;
	int ret;
	for( int i=0; i<=restart; i++ ) {
		lbfgsfloatval_t fx;
		ret = lbfgs(n, x, &fx, evaluate, verbose?progress:NULL, &efun, &param);
		if( last_f > fx )
			last_f = fx;
		else
			break;
	}
	
	if ( verbose ) {
		printf("L-BFGS optimization terminated with status code = %d\n", ret);
	}
	
	std::copy( x, x+n, x0.data() );
	lbfgs_free(x);
	return x0;
}
Exemplo n.º 9
0
Arquivo: RAE.cpp Projeto: zerkh/RAE
lbfgsfloatval_t RAE::_evaluate(const lbfgsfloatval_t* x, lbfgsfloatval_t* g, const int n, const lbfgsfloatval_t step)
{
	lbfgsfloatval_t fx = 0;

	int RAEThreadNum = atoi(para->getPara("RAEThreadNum").c_str());
	RAEThreadPara* threadpara = new RAEThreadPara[RAEThreadNum];
	int batchsize = trainingData.size() / RAEThreadNum;
	updateWeights(x);

	for(int i = 0; i < RAEThreadNum; i++)
	{
		threadpara[i].cRAE = this->copy();
		threadpara[i].g = lbfgs_malloc(getRAEWeightSize());
		for(int j = 0; j < getRAEWeightSize(); j++)
		{
			threadpara[i].g[j] = 0;
		}

		if(i == RAEThreadNum-1)
		{
			map<string, int>::iterator s;
			s = trainingData.begin();
			for(int d = 0; d < i*batchsize; d++)
			{
				s++;
			}

			threadpara[i].cRAE->trainingData.insert(s, trainingData.end());
			threadpara[i].instance_num = trainingData.size()%batchsize;
		}
		else
		{
			map<string, int>::iterator s, e;
			s = trainingData.begin();
			e = trainingData.begin();

			for(int d = 0; d < i*batchsize; d++)
			{
				s++;
			}
			for(int d = 0; d < (i+1)*batchsize; d++)
			{
				e++;
			}

			threadpara[i].cRAE->trainingData.insert(s, e);
			threadpara[i].instance_num = batchsize;
		}
	}

	pthread_t* pt = new pthread_t[RAEThreadNum];
	for (int a = 0; a < RAEThreadNum; a++) pthread_create(&pt[a], NULL, RAELBFGS::deepThread, (void *)(threadpara + a));
	for (int a = 0; a < RAEThreadNum; a++) pthread_join(pt[a], NULL);

	for(int i = 0; i < RAEThreadNum; i++)
	{
		fx += threadpara[i].lossVal;
		for(int elem = 0; elem < getRAEWeightSize(); elem++)
		{
			g[elem] += threadpara[i].g[elem];
		}
	}

/*
	int internal_node_num = 0;
	for(map<string, int>::iterator it = trainingData.begin(); it != trainingData.end(); it++)
	{
		internal_node_num += getInternalNode(it->first);
	}*/

	fx /= trainingData.size();

	fx += ZETA * decay();
	for(int elem = 0; elem < getRAEWeightSize(); elem++)
	{
		g[elem] /= trainingData.size();
	}

	delWeight1 += ZETA * weights1;
	delWeight1_b.setZero();
	delWeight2 += ZETA * weights2;
	delWeight2_b.setZero();

	update(g);

	delete pt;
	pt = NULL;

	for(int i  = 0; i < RAEThreadNum; i++)
	{
		lbfgs_free(threadpara[i].g);
		delete threadpara[i].cRAE;
	}
	delete threadpara;
	threadpara = NULL;

	return fx;
}
Exemplo n.º 10
0
/**
 * @brief Use the open source implement of OWL-QN to optimize the feature weights.
 *
 * @param weights  feature weights
 */
void MStep_Trainer::OptimizeFeatureWeights_jp(vector<double>& weights)
{

    //if expect_count==0 L1 norm LBFGS error!!
    for(int i=0;i<this->m_scfg.phrase_rules_count;i++)
    {
        if(this->m_scfg.phrase_rules[i]->expect_count==0)
        {
            this->m_scfg.phrase_rules[i]->expect_count=1e-20;
            this->m_scfg.N_cnt[3]+=1e-20;
        }
    }

    cout<<"begin optimize the lambda!"<<endl;
    int N = FeatureManager::GetSingleton().GetFeatureCount();
    cout<<"the number of features is: "<<N<<endl;
    int  ret = 0;
    lbfgsfloatval_t fx=0;
    lbfgsfloatval_t *x = lbfgs_malloc(N);
    lbfgs_parameter_t param;

    if (x == NULL) {
        cout<<"ERROR: Failed to allocate a memory block for variables"<<endl;
    }

    /* Initialize the variables. */
    if(weights.size()>0)
    {
        for(int i=0;i<N;i++)
        {
            x[i]= weights[i];
        }
    }

    /* Initialize the parameters for the L-BFGS optimization. */
    lbfgs_parameter_init(&param);
    /*param.linesearch = LBFGS_LINESEARCH_BACKTRACKING;*/

    /*
       Start the L-BFGS optimization; this will invoke the callback functions
       evaluate() and progress() when necessary.
       */

    //we pre compute some value that used in gradient function 
    LBFGS_Param* lbfgs_param=new LBFGS_Param();
    lbfgs_param->scfg=&this->m_scfg;
    //int thread_count = this->config->configfile->read<int>("thread_count");
    double L2_coefficient=this->m_pconfig->configfile->read<double>("L2_coefficient");

    lbfgs_param->L2_coefficient= L2_coefficient;
    PreComputeLBFGS_Param(lbfgs_param,N);

    int max_iteration = this->m_pconfig->configfile->read<int>("max_iteration");
    int past=this->m_pconfig->configfile->read<int>("past");
    double delta=this->m_pconfig->configfile->read<double>("delta");
    int linesearch =this->m_pconfig->configfile->read<int>("linesearch");
    double gtol =this->m_pconfig->configfile->read<double>("gtol");
    double epsilon=this->m_pconfig->configfile->read<double>("epsilon");
    double L1_coefficient = this->m_pconfig->configfile->read<double>("L1_coefficient");
    int orthantwise_start = this->m_pconfig->configfile->read<int>("orthantwise_start");

    param.orthantwise_start = orthantwise_start;
    param.orthantwise_end = N-1;
    param.orthantwise_c=L1_coefficient;
    param.linesearch = linesearch;
    param.max_linesearch = 20;
    param.max_iterations =max_iteration;
    param.past=past;
    param.delta=delta;
    param.epsilon=epsilon;
    param.gtol=gtol;

    ret = lbfgs(N, x, &fx, evaluate_main, progress, static_cast<void*>(lbfgs_param), &param);

    Loger::LogTime();
    Loger::mylogfile<<"L-BFGS optimization terminated with status code = "<<ret<<endl;
    Loger::mylogfile<<"L-BFGS optimization log value fx = "<<fx<<endl;
    cout<<"L-BFGS optimization terminated with status code = "<<ret<<endl;
    cout<<"L-BFGS optimization log value fx = "<<fx<<endl;


    weights.clear();
    weights.resize(N,0);

    for(int i=0;i<N;i++)
    {
        weights[i]=x[i];
    }
    lbfgs_free(x);
}