Ejemplo n.º 1
0
/*@
  TaoComputeObjective - Computes the objective function value at a given point

  Collective on Tao

  Input Parameters:
+ tao - the Tao context
- X - input vector

  Output Parameter:
. f - Objective value at X

  Notes: TaoComputeObjective() is typically used within minimization implementations,
  so most users would not generally call this routine themselves.

  Level: advanced

.seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
@*/
PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
{
  PetscErrorCode ierr;
  Vec            temp;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  PetscValidHeaderSpecific(X,VEC_CLASSID,2);
  PetscCheckSameComm(tao,1,X,2);
  if (tao->ops->computeobjective) {
    ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
    PetscStackPush("Tao user objective evaluation routine");
    ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
    PetscStackPop;
    ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
    tao->nfuncs++;
  } else if (tao->ops->computeobjectiveandgradient) {
    ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr);
    ierr = VecDuplicate(X,&temp);CHKERRQ(ierr);
    ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,NULL,NULL);CHKERRQ(ierr);
    PetscStackPush("Tao user objective/gradient evaluation routine");
    ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr);
    PetscStackPop;
    ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,NULL,NULL);CHKERRQ(ierr);
    ierr = VecDestroy(&temp);CHKERRQ(ierr);
    tao->nfuncgrads++;
  }  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
  ierr = PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",(double)(*f));CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 2
0
/*@
  TaoComputeGradient - Computes the gradient of the objective function

  Collective on Tao

  Input Parameters:
+ tao - the Tao context
- X - input vector

  Output Parameter:
. G - gradient vector

  Notes: TaoComputeGradient() is typically used within minimization implementations,
  so most users would not generally call this routine themselves.

  Level: advanced

.seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
@*/
PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
{
  PetscErrorCode ierr;
  PetscReal      dummy;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  PetscValidHeaderSpecific(X,VEC_CLASSID,2);
  PetscValidHeaderSpecific(G,VEC_CLASSID,2);
  PetscCheckSameComm(tao,1,X,2);
  PetscCheckSameComm(tao,1,G,3);
  if (tao->ops->computegradient) {
    ierr = PetscLogEventBegin(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
    PetscStackPush("Tao user gradient evaluation routine");
    ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
    PetscStackPop;
    ierr = PetscLogEventEnd(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
    tao->ngrads++;
  } else if (tao->ops->computeobjectiveandgradient) {
    ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
    PetscStackPush("Tao user objective/gradient evaluation routine");
    ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr);
    PetscStackPop;
    ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
    tao->nfuncgrads++;
  }  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
  PetscFunctionReturn(0);
}
Ejemplo n.º 3
0
/*@
  TaoComputeObjectiveAndGradient - Computes the objective function value at a given point

  Collective on Tao

  Input Parameters:
+ tao - the Tao context
- X - input vector

  Output Parameter:
+ f - Objective value at X
- g - Gradient vector at X

  Notes: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
  so most users would not generally call this routine themselves.

  Level: advanced

.seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
@*/
PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
{
    PetscErrorCode ierr;

    PetscFunctionBegin;
    PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
    PetscValidHeaderSpecific(X,VEC_CLASSID,2);
    PetscValidHeaderSpecific(G,VEC_CLASSID,4);
    PetscCheckSameComm(tao,1,X,2);
    PetscCheckSameComm(tao,1,G,4);
    if (tao->ops->computeobjectiveandgradient) {
        ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,G,NULL);
        CHKERRQ(ierr);
        PetscStackPush("Tao user objective/gradient evaluation routine");
        ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);
        CHKERRQ(ierr);
        PetscStackPop;
        if (tao->ops->computegradient == TaoDefaultComputeGradient) {
            /* Overwrite gradient with finite difference gradient */
            ierr = TaoDefaultComputeGradient(tao,X,G,tao->user_objgradP);
            CHKERRQ(ierr);
        }
        ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,G,NULL);
        CHKERRQ(ierr);
        tao->nfuncgrads++;
    } else if (tao->ops->computeobjective && tao->ops->computegradient) {
        ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);
        CHKERRQ(ierr);
        PetscStackPush("Tao user objective evaluation routine");
        ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);
        CHKERRQ(ierr);
        PetscStackPop;
        ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);
        CHKERRQ(ierr);
        tao->nfuncs++;
        ierr = PetscLogEventBegin(Tao_GradientEval,tao,X,G,NULL);
        CHKERRQ(ierr);
        PetscStackPush("Tao user gradient evaluation routine");
        ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);
        CHKERRQ(ierr);
        PetscStackPop;
        ierr = PetscLogEventEnd(Tao_GradientEval,tao,X,G,NULL);
        CHKERRQ(ierr);
        tao->ngrads++;
    } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
    ierr = PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",(double)(*f));
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Ejemplo n.º 4
0
/*@
  TaoLineSearchComputeObjectiveAndGradient - Computes the objective function value at a given point

  Collective on Tao

  Input Parameters:
+ ls - the TaoLineSearch context
- x - input vector

  Output Parameter:
+ f - Objective value at X
- g - Gradient vector at X

  Notes: TaoLineSearchComputeObjectiveAndGradient() is typically used within line searches
  so most users would not generally call this routine themselves.

  Level: developer

.seealso: TaoLineSearchComputeGradient(), TaoLineSearchComputeObjectiveAndGradient(), TaoLineSearchSetObjectiveRoutine()
@*/
PetscErrorCode TaoLineSearchComputeObjectiveAndGradient(TaoLineSearch ls, Vec x, PetscReal *f, Vec g)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(ls,TAOLINESEARCH_CLASSID,1);
  PetscValidHeaderSpecific(x,VEC_CLASSID,2);
  PetscValidPointer(f,3);
  PetscValidHeaderSpecific(g,VEC_CLASSID,4);
  PetscCheckSameComm(ls,1,x,2);
  PetscCheckSameComm(ls,1,g,4);
  if (ls->usetaoroutines) {
      ierr = TaoComputeObjectiveAndGradient(ls->tao,x,f,g);CHKERRQ(ierr);
  } else {
    ierr = PetscLogEventBegin(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
    if (!ls->ops->computeobjective && !ls->ops->computeobjectiveandgradient) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Line Search does not have objective function set");
    if (!ls->ops->computegradient && !ls->ops->computeobjectiveandgradient) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Line Search does not have gradient function set");

    PetscStackPush("TaoLineSearch user objective/gradient routine");
    if (ls->ops->computeobjectiveandgradient) {
      ierr = (*ls->ops->computeobjectiveandgradient)(ls,x,f,g,ls->userctx_funcgrad);CHKERRQ(ierr);
    } else {
      ierr = (*ls->ops->computeobjective)(ls,x,f,ls->userctx_func);CHKERRQ(ierr);
      ierr = (*ls->ops->computegradient)(ls,x,g,ls->userctx_grad);CHKERRQ(ierr);
    }
    PetscStackPop;
    ierr = PetscLogEventEnd(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
    ierr = PetscInfo1(ls,"TaoLineSearch Function evaluation: %14.12e\n",(double)(*f));CHKERRQ(ierr);
    ls->nfgeval++;
  }
  PetscFunctionReturn(0);
}
Ejemplo n.º 5
0
/*@
  TaoComputeSeparableObjective - Computes a separable objective function vector at a given point (for least-square applications)

  Collective on Tao

  Input Parameters:
+ tao - the Tao context
- X - input vector

  Output Parameter:
. f - Objective vector at X

  Notes: TaoComputeSeparableObjective() is typically used within minimization implementations,
  so most users would not generally call this routine themselves.

  Level: advanced

.seealso: TaoSetSeparableObjectiveRoutine()
@*/
PetscErrorCode TaoComputeSeparableObjective(Tao tao, Vec X, Vec F)
{
    PetscErrorCode ierr;

    PetscFunctionBegin;
    PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
    PetscValidHeaderSpecific(X,VEC_CLASSID,2);
    PetscValidHeaderSpecific(F,VEC_CLASSID,3);
    PetscCheckSameComm(tao,1,X,2);
    PetscCheckSameComm(tao,1,F,3);
    if (tao->ops->computeseparableobjective) {
        ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);
        CHKERRQ(ierr);
        PetscStackPush("Tao user separable objective evaluation routine");
        ierr = (*tao->ops->computeseparableobjective)(tao,X,F,tao->user_sepobjP);
        CHKERRQ(ierr);
        PetscStackPop;
        ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);
        CHKERRQ(ierr);
        tao->nfuncs++;
    } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetSeparableObjectiveRoutine() has not been called");
    ierr = PetscInfo(tao,"TAO separable function evaluation.\n");
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Ejemplo n.º 6
0
/*@
  TaoLineSearchComputeObjective - Computes the objective function value at a given point

  Collective on TaoLineSearch

  Input Parameters:
+ ls - the TaoLineSearch context
- x - input vector

  Output Parameter:
. f - Objective value at X

  Notes: TaoLineSearchComputeObjective() is typically used within line searches
  so most users would not generally call this routine themselves.

  Level: developer

.seealso: TaoLineSearchComputeGradient(), TaoLineSearchComputeObjectiveAndGradient(), TaoLineSearchSetObjectiveRoutine()
@*/
PetscErrorCode TaoLineSearchComputeObjective(TaoLineSearch ls, Vec x, PetscReal *f)
{
  PetscErrorCode ierr;
  Vec            gdummy;
  PetscReal      gts;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(ls,TAOLINESEARCH_CLASSID,1);
  PetscValidHeaderSpecific(x,VEC_CLASSID,2);
  PetscValidPointer(f,3);
  PetscCheckSameComm(ls,1,x,2);
  if (ls->usetaoroutines) {
    ierr = TaoComputeObjective(ls->tao,x,f);CHKERRQ(ierr);
  } else {
    ierr = PetscLogEventBegin(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
    if (!ls->ops->computeobjective && !ls->ops->computeobjectiveandgradient && !ls->ops->computeobjectiveandgts) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Line Search does not have objective function set");
    PetscStackPush("TaoLineSearch user objective routine");
    if (ls->ops->computeobjective) {
      ierr = (*ls->ops->computeobjective)(ls,x,f,ls->userctx_func);CHKERRQ(ierr);
    } else if (ls->ops->computeobjectiveandgradient) {
      ierr = VecDuplicate(x,&gdummy);CHKERRQ(ierr);
      ierr = (*ls->ops->computeobjectiveandgradient)(ls,x,f,gdummy,ls->userctx_funcgrad);CHKERRQ(ierr);
      ierr = VecDestroy(&gdummy);CHKERRQ(ierr);
    } else {
      ierr = (*ls->ops->computeobjectiveandgts)(ls,x,ls->stepdirection,f,&gts,ls->userctx_funcgts);CHKERRQ(ierr);
    }
    PetscStackPop;
    ierr = PetscLogEventEnd(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
  }
  ls->nfeval++;
  PetscFunctionReturn(0);
}
Ejemplo n.º 7
0
/*@
  TaoLineSearchComputeGradient - Computes the gradient of the objective function

  Collective on TaoLineSearch

  Input Parameters:
+ ls - the TaoLineSearch context
- x - input vector

  Output Parameter:
. g - gradient vector

  Notes: TaoComputeGradient() is typically used within line searches
  so most users would not generally call this routine themselves.

  Level: developer

.seealso: TaoLineSearchComputeObjective(), TaoLineSearchComputeObjectiveAndGradient(), TaoLineSearchSetGradient()
@*/
PetscErrorCode TaoLineSearchComputeGradient(TaoLineSearch ls, Vec x, Vec g)
{
  PetscErrorCode ierr;
  PetscReal      fdummy;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(ls,TAOLINESEARCH_CLASSID,1);
  PetscValidHeaderSpecific(x,VEC_CLASSID,2);
  PetscValidHeaderSpecific(g,VEC_CLASSID,3);
  PetscCheckSameComm(ls,1,x,2);
  PetscCheckSameComm(ls,1,g,3);
  if (ls->usetaoroutines) {
    ierr = TaoComputeGradient(ls->tao,x,g);CHKERRQ(ierr);
  } else {
    ierr = PetscLogEventBegin(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
    if (!ls->ops->computegradient && !ls->ops->computeobjectiveandgradient) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Line Search does not have gradient functions set");
    PetscStackPush("TaoLineSearch user gradient routine");
    if (ls->ops->computegradient) {
      ierr = (*ls->ops->computegradient)(ls,x,g,ls->userctx_grad);CHKERRQ(ierr);
    } else {
      ierr = (*ls->ops->computeobjectiveandgradient)(ls,x,&fdummy,g,ls->userctx_funcgrad);CHKERRQ(ierr);
    }
    PetscStackPop;
    ierr = PetscLogEventEnd(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
  }
  ls->ngeval++;
  PetscFunctionReturn(0);
}
Ejemplo n.º 8
0
PetscErrorCode STApplyTranspose_Shell(ST st,Vec x,Vec y)
{
  PetscErrorCode ierr;
  ST_SHELL       *shell = (ST_SHELL*)st->data;

  PetscFunctionBegin;
  if (!shell->applytrans) SETERRQ(PetscObjectComm((PetscObject)st),PETSC_ERR_USER,"No applytranspose() routine provided to Shell ST");
  PetscStackPush("STSHELL applytranspose() user function");
  CHKMEMQ;
  ierr = (*shell->applytrans)(st,x,y);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 9
0
static PetscErrorCode PCApplyRichardson_Shell(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt it,PetscTruth guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
{
  PetscErrorCode ierr;
  PC_Shell       *shell;

  PetscFunctionBegin;
  shell = (PC_Shell*)pc->data;
  if (!shell->applyrich) SETERRQ(PETSC_ERR_USER,"No applyrichardson() routine provided to Shell PC");
  PetscStackPush("PCSHELL user function applyrichardson()");
  CHKMEMQ;
  ierr  = (*shell->applyrich)(pc,x,y,w,rtol,abstol,dtol,it,guesszero,outits,reason);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 10
0
static PetscErrorCode PCPostSolve_Shell(PC pc,KSP ksp,Vec b,Vec x)
{
  PC_Shell       *shell;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  shell = (PC_Shell*)pc->data;
  if (!shell->postsolve) SETERRQ(PETSC_ERR_USER,"No postsolve() routine provided to Shell PC");
  PetscStackPush("PCSHELL user function postsolve()");
  CHKMEMQ;
  ierr  = (*shell->postsolve)(pc,ksp,b,x);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 11
0
static PetscErrorCode PCApplyTranspose_Shell(PC pc,Vec x,Vec y)
{
  PC_Shell       *shell;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  shell = (PC_Shell*)pc->data;
  if (!shell->applytranspose) SETERRQ(PETSC_ERR_USER,"No applytranspose() routine provided to Shell PC");
  PetscStackPush("PCSHELL user function applytranspose()");
  CHKMEMQ;
  ierr  = (*shell->applytranspose)(pc,x,y);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 12
0
static PetscErrorCode PCSetUp_Shell(PC pc)
{
  PC_Shell       *shell;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  shell = (PC_Shell*)pc->data;
  if (!shell->setup) SETERRQ(PETSC_ERR_USER,"No setup() routine provided to Shell PC");
  PetscStackPush("PCSHELL user function setup()");
  CHKMEMQ;
  ierr  = (*shell->setup)(pc);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 13
0
static PetscErrorCode PCApplyBA_Shell(PC pc,PCSide side,Vec x,Vec y,Vec w)
{
  PC_Shell       *shell;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  shell = (PC_Shell*)pc->data;
  if (!shell->applyBA) SETERRQ(PETSC_ERR_USER,"No applyBA() routine provided to Shell PC");
  PetscStackPush("PCSHELL user function applyBA()");
  CHKMEMQ;
  ierr  = (*shell->applyBA)(pc,side,x,y,w);CHKERRQ(ierr);
  CHKMEMQ;
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 14
0
PetscErrorCode STBackTransform_Shell(ST st,PetscInt n,PetscScalar *eigr,PetscScalar *eigi)
{
  PetscErrorCode ierr;
  ST_SHELL       *shell = (ST_SHELL*)st->data;

  PetscFunctionBegin;
  if (shell->backtransform) {
    PetscStackPush("STSHELL backtransform() user function");
    CHKMEMQ;
    ierr = (*shell->backtransform)(st,n,eigr,eigi);CHKERRQ(ierr);
    CHKMEMQ;
    PetscStackPop;
  }
  PetscFunctionReturn(0);
}
Ejemplo n.º 15
0
/*@C
   TaoComputeJacobianInequality - Computes the Jacobian matrix that has been
   set with TaoSetJacobianInequalityRoutine().

   Collective on Tao

   Input Parameters:
+  solver - the Tao solver context
-  xx - input vector

   Output Parameters:
+  H - Jacobian matrix
-  Hpre - Preconditioning matrix

   Notes:
   Most users should not need to explicitly call this routine, as it
   is used internally within the minimization solvers.

   Level: developer

.seealso:  TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetJacobianStateRoutine(), TaoComputeJacobianDesign(), TaoSetStateDesignIS()

@*/
PetscErrorCode TaoComputeJacobianInequality(Tao tao, Vec X, Mat J, Mat Jpre)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  PetscValidHeaderSpecific(X, VEC_CLASSID,2);
  PetscCheckSameComm(tao,1,X,2);

  if (!tao->ops->computejacobianinequality) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetJacobianInequality() first");
  ++tao->njac_inequality;
  ierr = PetscLogEventBegin(Tao_JacobianEval,tao,X,J,Jpre);CHKERRQ(ierr);
  PetscStackPush("Tao user Jacobian(inequality) function");
  ierr = (*tao->ops->computejacobianinequality)(tao,X,J,Jpre,tao->user_jac_inequalityP);CHKERRQ(ierr);
  PetscStackPop;
  ierr = PetscLogEventEnd(Tao_JacobianEval,tao,X,J,Jpre);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 16
0
/*@C
   TaoComputeHessian - Computes the Hessian matrix that has been
   set with TaoSetHessianRoutine().

   Collective on Tao

   Input Parameters:
+  solver - the Tao solver context
-  xx - input vector

   Output Parameters:
+  H - Hessian matrix
-  Hpre - Preconditioning matrix

   Notes:
   Most users should not need to explicitly call this routine, as it
   is used internally within the minimization solvers.

   TaoComputeHessian() is typically used within minimization
   implementations, so most users would not generally call this routine
   themselves.

   Level: developer

.seealso:  TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetHessian()

@*/
PetscErrorCode TaoComputeHessian(Tao tao, Vec X, Mat H, Mat Hpre)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  PetscValidHeaderSpecific(X, VEC_CLASSID,2);
  PetscCheckSameComm(tao,1,X,2);

  if (!tao->ops->computehessian) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetHessian() first");
  ++tao->nhess;
  ierr = PetscLogEventBegin(Tao_HessianEval,tao,X,H,Hpre);CHKERRQ(ierr);
  PetscStackPush("Tao user Hessian function");
  ierr = (*tao->ops->computehessian)(tao,X,H,Hpre,tao->user_hessP);CHKERRQ(ierr);
  PetscStackPop;
  ierr = PetscLogEventEnd(Tao_HessianEval,tao,X,H,Hpre);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 17
0
PetscErrorCode TaoComputeVariableBounds(Tao tao)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  if (!tao->ops->computebounds) PetscFunctionReturn(0);
  if (!tao->XL || !tao->XU) {
    if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetInitialVector must be called before TaoComputeVariableBounds");
    ierr = VecDuplicate(tao->solution, &tao->XL);CHKERRQ(ierr);
    ierr = VecSet(tao->XL, PETSC_NINFINITY);CHKERRQ(ierr);
    ierr = VecDuplicate(tao->solution, &tao->XU);CHKERRQ(ierr);
    ierr = VecSet(tao->XU, PETSC_INFINITY);CHKERRQ(ierr);
  }
  PetscStackPush("Tao compute variable bounds");
  ierr = (*tao->ops->computebounds)(tao,tao->XL,tao->XU,tao->user_boundsP);CHKERRQ(ierr);
  PetscStackPop;
  PetscFunctionReturn(0);
}
Ejemplo n.º 18
0
/*@C
   TaoAppComputeFunction - Compute the constraint vector using the
   routine set by TaoApplicationSetConstraintsRoutine().

   Collective on TAO_APPLICATION

   Input Parameters:
+  taopp - the TAO_APPLICATION context
-  X - the point where the objective should be evaluated

   Output Parameter:
.  R - constraint vector

   Level: developer

.keywords: TAO_APPLICATION, objective

.seealso: TaoAppComputeJacobian() TaoAppSetConstraintRoutine()
@*/
int TaoAppComputeFunction(TAO_APPLICATION taoapp, Vec X, Vec R){
  int     info;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(taoapp,TAO_APP_COOKIE,1);
  PetscValidHeaderSpecific(X,VEC_COOKIE,2);
  PetscValidHeaderSpecific(R,VEC_COOKIE,3);
  if (taoapp->computevfunc){
    info = PetscLogEventBegin(Tao_FunctionEval,taoapp,X,R,0);CHKERRQ(info);
    PetscStackPush("Tao user ConstraintsFunction");
    info = (*taoapp->computevfunc)(taoapp,X,R,taoapp->usrfvctx);
    CHKERRQ(info);
    PetscStackPop;
    info = PetscLogEventEnd(Tao_FunctionEval,taoapp,X,R,0);
  } else {
    SETERRQ(1,"TAO ERROR: Must set Constraint function");
  }
  PetscFunctionReturn(0);
}
Ejemplo n.º 19
0
/*@
  TaoLineSearchComputeObjectiveAndGTS - Computes the objective function value and inner product of gradient and step direction at a given point

  Collective on Tao

  Input Parameters:
+ ls - the TaoLineSearch context
- x - input vector

  Output Parameter:
+ f - Objective value at X
- gts - inner product of gradient and step direction at X

  Notes: TaoLineSearchComputeObjectiveAndGTS() is typically used within line searches
  so most users would not generally call this routine themselves.

  Level: developer

.seealso: TaoLineSearchComputeGradient(), TaoLineSearchComputeObjectiveAndGradient(), TaoLineSearchSetObjectiveRoutine()
@*/
PetscErrorCode TaoLineSearchComputeObjectiveAndGTS(TaoLineSearch ls, Vec x, PetscReal *f, PetscReal *gts)
{
  PetscErrorCode ierr;
  PetscFunctionBegin;
  PetscValidHeaderSpecific(ls,TAOLINESEARCH_CLASSID,1);
  PetscValidHeaderSpecific(x,VEC_CLASSID,2);
  PetscValidPointer(f,3);
  PetscValidPointer(gts,4);
  PetscCheckSameComm(ls,1,x,2);
  ierr = PetscLogEventBegin(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
  if (!ls->ops->computeobjectiveandgts) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Line Search does not have objective and gts function set");
  PetscStackPush("TaoLineSearch user objective/gts routine");
  ierr = (*ls->ops->computeobjectiveandgts)(ls,x,ls->stepdirection,f,gts,ls->userctx_funcgts);CHKERRQ(ierr);
  PetscStackPop;
  ierr = PetscLogEventEnd(TaoLineSearch_EvalEvent,ls,0,0,0);CHKERRQ(ierr);
  ierr = PetscInfo1(ls,"TaoLineSearch Function evaluation: %14.12e\n",(double)(*f));CHKERRQ(ierr);
  ls->nfeval++;
  PetscFunctionReturn(0);
}
Ejemplo n.º 20
0
PetscErrorCode TaoComputeInequalityConstraints(Tao tao, Vec X, Vec CI)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
  PetscValidHeaderSpecific(X,VEC_CLASSID,2);
  PetscValidHeaderSpecific(CI,VEC_CLASSID,2);
  PetscCheckSameComm(tao,1,X,2);
  PetscCheckSameComm(tao,1,CI,3);

  if (!tao->ops->computeinequalityconstraints) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetInequalityConstraintsRoutine() has not been called");
  if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetInitialVector must be called before TaoComputeInequalityConstraints");
  ierr = PetscLogEventBegin(Tao_ConstraintsEval,tao,X,CI,NULL);CHKERRQ(ierr);
  PetscStackPush("Tao inequality constraints evaluation routine");
  ierr = (*tao->ops->computeinequalityconstraints)(tao,X,CI,tao->user_con_inequalityP);CHKERRQ(ierr);
  PetscStackPop;
  ierr = PetscLogEventEnd(Tao_ConstraintsEval,tao,X,CI,NULL);CHKERRQ(ierr);
  tao->nconstraints++;
  PetscFunctionReturn(0);
}
Ejemplo n.º 21
0
/*@C
   TaoAppComputeJacobian - Compute the Jacobian of the nonlinear equations using the
   routine set by TaoApplicationSetJacobianRoutine().

   Collective on TAO_APPLICATION

   Input Parameters:
+  taopp - the TAO_APPLICATION context
.  X - the variable vector
.  H - the Jacobian matrix
.  HP - the preconditioner for the Jacobian matrix.
-  flag - flag used in KSPSetOperators()

   Output Parameter:
+  H - the Jacobian matrix
.  HP - the preconditioner for the Jacobian matrix.
-  flag - flag used in KSPSetOperators()

   Level: developer

.keywords: TAO_APPLICATION, objective

.seealso: TaoAppComputeFunction(), TaoAppSetJacobianRoutine()
@*/
int TaoAppComputeJacobian(TAO_APPLICATION taoapp, Vec X, Mat *JJ, Mat *JJPre, MatStructure*flag){

  int     info;
  Mat J=*JJ,JPre=*JJPre;
  MatStructure pflag=*flag;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(X,VEC_COOKIE,2);
  PetscValidHeaderSpecific(J,MAT_COOKIE,3);
  PetscValidHeaderSpecific(taoapp,TAO_APP_COOKIE,1);
  if (taoapp->computejacobian){
    PetscStackPush("TAO User Jacobian Evaluation");
    info = PetscLogEventBegin(Tao_JacobianEval,taoapp,X,J,0);CHKERRQ(info);
    info = (*taoapp->computejacobian)(taoapp,X,&J,&JPre, &pflag, taoapp->usrjctx);
    CHKERRQ(info);
    info = PetscLogEventEnd(Tao_JacobianEval,taoapp,X,J,0);CHKERRQ(info);
    PetscStackPop;
  } else {
    SETERRQ(1,"TAO Error:  No Jacobian Routine Available.");
  }
  *JJ=J;*JJPre=JPre; *flag=pflag;
  PetscFunctionReturn(0);
}