Пример #1
0
static PetscErrorCode TaoSetFromOptions_SQPCON(Tao tao)
{
  PetscErrorCode ierr;
  PetscFunctionBegin;
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #2
0
static int TaoSetOptions_BCG(TAO_SOLVER tao, void*solver)
{
  TAO_BCG *cg = (TAO_BCG *)solver;
  double   dtmp;
  int      info;
  char     method[20];
  PetscTruth flg;

  TaoFunctionBegin;

  info = OptionsHead("Nonlinear Conjugate Gradient method for unconstrained optimization");CHKERRQ(info);

  info = OptionsDouble("-tao_cg_restart","Restart method based on orthogonality of successive gradients","TaoBCGSetRestartTol",cg->eta,&dtmp, &flg);CHKERRQ(info);
  if (flg){
    info =TaoCGSetRestartTol(tao,dtmp);
  }

  info = OptionsString("-tao_cg_type [fletcher-reeves,polak-ribiere,polak-ribiere-plus]","Set Nonlinear BCG method","TaoBCGSetMethod","polak-ribiere-plus",method,19,&flg);
  CHKERRQ(info);
  if (flg){
    /*
    info = TaoBCGSetMethod(tao, method);CHKERRQ(info);
    */
  }
  info = TaoLineSearchSetFromOptions(tao);CHKERRQ(info);
  info = OptionsTail();CHKERRQ(info);

  TaoFunctionReturn(0);
}
Пример #3
0
static PetscErrorCode TaoSetFromOptions_BQNLS(PetscOptionItems *PetscOptionsObject,Tao tao)
{
  TAO_BNK        *bnk = (TAO_BNK *)tao->data;
  TAO_BQNK       *bqnk = (TAO_BQNK*)bnk->ctx;
  PetscErrorCode ierr;
  KSPType        ksp_type;
  PetscBool      is_spd;

  PetscFunctionBegin;
  ierr = PetscOptionsHead(PetscOptionsObject,"Quasi-Newton-Krylov method for bound constrained optimization");CHKERRQ(ierr);
  ierr = PetscOptionsEList("-tao_bqnls_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, 0);CHKERRQ(ierr);
  ierr = PetscOptionsReal("-tao_bqnls_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon,NULL);CHKERRQ(ierr);
  ierr = PetscOptionsReal("-tao_bqnls_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol,NULL);CHKERRQ(ierr);
  ierr = PetscOptionsReal("-tao_bqnls_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step,NULL);CHKERRQ(ierr);
  ierr = PetscOptionsInt("-tao_bqnls_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its,NULL);CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  ierr = TaoSetFromOptions(bnk->bncg);CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr);
  ierr = KSPGetType(tao->ksp,&ksp_type);CHKERRQ(ierr);
  bnk->is_nash = bnk->is_gltr = bnk->is_stcg = PETSC_FALSE;
  ierr = MatSetFromOptions(bqnk->B);CHKERRQ(ierr);
  ierr = MatGetOption(bqnk->B, MAT_SPD, &is_spd);CHKERRQ(ierr);
  if (!is_spd) SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_INCOMP, "LMVM matrix must be symmetric positive-definite");
  PetscFunctionReturn(0);
}
Пример #4
0
static PetscErrorCode TaoSetFromOptions_BLMVM(PetscOptionItems* PetscOptionsObject,Tao tao)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscOptionsHead(PetscOptionsObject,"Limited-memory variable-metric method for bound constrained optimization");CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #5
0
/* ---------------------------------------------------------- */
EXTERN_C_BEGIN
#undef __FUNCT__
#define __FUNCT__ "TaoCreate_ASILS"
PetscErrorCode TaoCreate_ASILS(Tao tao)
{
  TAO_SSLS       *asls;
  PetscErrorCode ierr;
  const char     *armijo_type = TAOLINESEARCHARMIJO;

  PetscFunctionBegin;
  ierr = PetscNewLog(tao,&asls);CHKERRQ(ierr);
  tao->data = (void*)asls;
  tao->ops->solve = TaoSolve_ASILS;
  tao->ops->setup = TaoSetUp_ASILS;
  tao->ops->view = TaoView_SSLS;
  tao->ops->setfromoptions = TaoSetFromOptions_SSLS;
  tao->ops->destroy = TaoDestroy_ASILS;
  tao->subset_type = TAO_SUBSET_SUBVEC;
  asls->delta = 1e-10;
  asls->rho = 2.1;
  asls->fixed = NULL;
  asls->free = NULL;
  asls->J_sub = NULL;
  asls->Jpre_sub = NULL;
  asls->w = NULL;
  asls->r1 = NULL;
  asls->r2 = NULL;
  asls->r3 = NULL;
  asls->t1 = NULL;
  asls->t2 = NULL;
  asls->dxfree = NULL;

  asls->identifier = 1e-5;

  ierr = TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);CHKERRQ(ierr);
  ierr = TaoLineSearchSetType(tao->linesearch, armijo_type);CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);

  ierr = KSPCreate(((PetscObject)tao)->comm, &tao->ksp);CHKERRQ(ierr);
  ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr);
  tao->max_it = 2000;
  tao->max_funcs = 4000;
  tao->fatol = 0;
  tao->frtol = 0;
  tao->gttol = 0;
  tao->grtol = 0;
#if defined(PETSC_USE_REAL_SINGLE)
  tao->gatol = 1.0e-6;
  tao->fmin = 1.0e-4;
#else
  tao->gatol = 1.0e-16;
  tao->fmin = 1.0e-8;
#endif
  PetscFunctionReturn(0);
}
Пример #6
0
static PetscErrorCode TaoSetFromOptions_OWLQN(Tao tao)
{
  TAO_OWLQN      *lmP = (TAO_OWLQN *)tao->data;
  PetscBool      flg;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscOptionsHead("Orthant-Wise Limited-memory method for Quasi-Newton unconstrained optimization");CHKERRQ(ierr);
  ierr = PetscOptionsReal("-tao_owlqn_lambda", "regulariser weight","", 100,&lmP->lambda,&flg); CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #7
0
static PetscErrorCode TaoSetFromOptions_GPCG(Tao tao)
{
  TAO_GPCG       *gpcg = (TAO_GPCG *)tao->data;
  PetscErrorCode ierr;
  PetscBool      flg;

  PetscFunctionBegin;
  ierr = PetscOptionsHead("Gradient Projection, Conjugate Gradient method for bound constrained optimization");CHKERRQ(ierr);
  ierr=PetscOptionsInt("-gpcg_maxpgits","maximum number of gradient projections per GPCG iterate",0,gpcg->maxgpits,&gpcg->maxgpits,&flg);CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #8
0
static PetscErrorCode TaoSetFromOptions_TRON(PetscOptionItems *PetscOptionsObject,Tao tao)
{
  TAO_TRON       *tron = (TAO_TRON *)tao->data;
  PetscErrorCode ierr;
  PetscBool      flg;

  PetscFunctionBegin;
  ierr = PetscOptionsHead(PetscOptionsObject,"Newton Trust Region Method for bound constrained optimization");CHKERRQ(ierr);
  ierr = PetscOptionsInt("-tao_tron_maxgpits","maximum number of gradient projections per TRON iterate","TaoSetMaxGPIts",tron->maxgpits,&tron->maxgpits,&flg);CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #9
0
PetscErrorCode TaoSetFromOptions_SSLS(PetscOptionItems *PetscOptionsObject,Tao tao)
{
  TAO_SSLS       *ssls = (TAO_SSLS *)tao->data;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscOptionsHead(PetscOptionsObject,"Semismooth method with a linesearch for complementarity problems");CHKERRQ(ierr);
  ierr = PetscOptionsReal("-ssls_delta", "descent test fraction", "",ssls->delta, &ssls->delta, NULL);CHKERRQ(ierr);
  ierr = PetscOptionsReal("-ssls_rho", "descent test power", "",ssls->rho, &ssls->rho, NULL);CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr);
  ierr = PetscOptionsTail();CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #10
0
static int TaoSetOptions_BNLS(TAO_SOLVER tao, void*solver)
{
  int        info;
  TaoInt     ival;
  TaoTruth flg;

  TaoFunctionBegin;

  info = TaoOptionsHead("Newton Line Search Method for bound constrained optimization");CHKERRQ(info);

  info = TaoOptionInt("-redistribute","Redistribute Free variables (> 1 processors, only)","TaoPetscISType",1,&ival,&flg); CHKERRQ(info);

  info = TaoOptionName("-submatrixfree","Mask full matrix instead of extract submatrices","TaoPetscISType",&flg); CHKERRQ(info);

  info = TaoOptionsTail();CHKERRQ(info);
  info = TaoLineSearchSetFromOptions(tao);CHKERRQ(info);

  TaoFunctionReturn(0);
}
Пример #11
0
EXTERN_C_BEGIN
#undef __FUNCT__
#define __FUNCT__ "TaoCreate_SSFLS"
PetscErrorCode TaoCreate_SSFLS(Tao tao)
{
  TAO_SSLS       *ssls;
  PetscErrorCode ierr;
  const char     *armijo_type = TAOLINESEARCHARMIJO;

  PetscFunctionBegin;
  ierr = PetscNewLog(tao,&ssls);CHKERRQ(ierr);
  tao->data = (void*)ssls;
  tao->ops->solve=TaoSolve_SSFLS;
  tao->ops->setup=TaoSetUp_SSFLS;
  tao->ops->view=TaoView_SSLS;
  tao->ops->setfromoptions=TaoSetFromOptions_SSLS;
  tao->ops->destroy = TaoDestroy_SSFLS;

  ssls->delta = 1e-10;
  ssls->rho = 2.1;

  ierr = TaoLineSearchCreate(((PetscObject)tao)->comm,&tao->linesearch);CHKERRQ(ierr);
  ierr = TaoLineSearchSetType(tao->linesearch,armijo_type);CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);
  /* Linesearch objective and objectivegradient routines are  set in solve routine */
  ierr = KSPCreate(((PetscObject)tao)->comm,&tao->ksp);CHKERRQ(ierr);

  tao->max_it = 2000;
  tao->max_funcs = 4000;
  tao->fatol = 0;
  tao->frtol = 0; 
  tao->grtol=0; 
  tao->grtol=0;
#if defined(PETSC_USE_REAL_SINGLE)
  tao->gatol = 1.0e-6;
  tao->fmin = 1.0e-4;
#else
  tao->gatol = 1.0e-16;
  tao->fmin = 1.0e-8;
#endif
  PetscFunctionReturn(0);
}
Пример #12
0
PETSC_EXTERN PetscErrorCode TaoCreate_SSFLS(Tao tao)
{
  TAO_SSLS       *ssls;
  PetscErrorCode ierr;
  const char     *armijo_type = TAOLINESEARCHARMIJO;

  PetscFunctionBegin;
  ierr = PetscNewLog(tao,&ssls);CHKERRQ(ierr);
  tao->data = (void*)ssls;
  tao->ops->solve=TaoSolve_SSFLS;
  tao->ops->setup=TaoSetUp_SSFLS;
  tao->ops->view=TaoView_SSLS;
  tao->ops->setfromoptions = TaoSetFromOptions_SSLS;
  tao->ops->destroy = TaoDestroy_SSFLS;

  ssls->delta = 1e-10;
  ssls->rho = 2.1;

  ierr = TaoLineSearchCreate(((PetscObject)tao)->comm,&tao->linesearch);CHKERRQ(ierr);
  ierr = TaoLineSearchSetType(tao->linesearch,armijo_type);CHKERRQ(ierr);
  ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr);
  ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
  /* Linesearch objective and objectivegradient routines are  set in solve routine */
  ierr = KSPCreate(((PetscObject)tao)->comm,&tao->ksp);CHKERRQ(ierr);
  ierr = KSPSetOptionsPrefix(tao->ksp,tao->hdr.prefix);CHKERRQ(ierr);

  /* Override default settings (unless already changed) */
  if (!tao->max_it_changed) tao->max_it = 2000;
  if (!tao->max_funcs_changed) tao->max_funcs = 4000;
  if (!tao->fatol_changed) tao->fatol = 0;
  if (!tao->frtol_changed) tao->frtol = 0;
  if (!tao->gttol_changed) tao->gttol = 0;
  if (!tao->grtol_changed) tao->grtol = 0;
#if defined(PETSC_USE_REAL_SINGLE)
  if (!tao->gatol_changed) tao->gatol = 1.0e-6;
  if (!tao->fmin_changed)  tao->fmin = 1.0e-4;
#else
  if (!tao->gatol_changed) tao->gatol = 1.0e-16;
  if (!tao->fmin_changed)  tao->fmin = 1.0e-8;
#endif
  PetscFunctionReturn(0);
}
Пример #13
0
static int TaoSetFromOptions_GPCG(TAO_SOLVER tao, void*solver)
{
  TAO_GPCG *gpcg = (TAO_GPCG *)solver;
  int      info;
  TaoInt   ival;
  TaoTruth flg;

  TaoFunctionBegin;
  info = TaoOptionsHead("Gradient Projection, Conjugate Gradient method for bound constrained optimization");CHKERRQ(info);

  info=TaoOptionInt("-gpcg_maxpgits","maximum number of gradient projections per GPCG iterate",0,gpcg->maxgpits,&gpcg->maxgpits,&flg);
  CHKERRQ(info);

  info = TaoOptionInt("-redistribute","Redistribute Free variables (> 1 processors, only)","TaoPetscISType",1,&ival,&flg); CHKERRQ(info);

  info = TaoOptionName("-submatrixfree","Mask full matrix instead of extract submatrices","TaoPetscISType",&flg); CHKERRQ(info);


  info = TaoOptionsTail();CHKERRQ(info);
  info=TaoLineSearchSetFromOptions(tao);CHKERRQ(info);

  TaoFunctionReturn(0);
}