Example #1
0
 void updateIterate(Vector<Real> &xnew, const Vector<Real> &x, const Vector<Real> &s, Real alpha,
                    BoundConstraint<Real> &con ) {
   xnew.set(x);
   xnew.axpy(alpha,s);
   if ( con.isActivated() ) {
     con.project(xnew);
   }
 }
 void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g, 
                  Objective<Real> &obj, BoundConstraint<Real> &con, 
                  AlgorithmState<Real> &algo_state ) {
   Step<Real>::initialize(x,s,g,obj,con,algo_state);
   Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
   lineSearch_->initialize(x, s, *(step_state->gradientVec),obj,con);
   if ( edesc_ == DESCENT_NEWTONKRYLOV || edesc_ == DESCENT_NEWTON || edesc_ == DESCENT_SECANT ) {
     Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcp(&obj, false);
     Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcp(&con, false);
     hessian_ = Teuchos::rcp(
       new ProjectedHessian<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,step_state->gradientVec,
                                  useSecantHessVec_));
     precond_ = Teuchos::rcp(
       new ProjectedPreconditioner<Real>(secant_,obj_ptr,con_ptr,algo_state.iterateVec,
                                         step_state->gradientVec,useSecantPrecond_));
   }
   if ( con.isActivated() ) {
     d_ = s.clone();
   }
   if ( con.isActivated() || edesc_ == DESCENT_SECANT 
                          || (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
     gp_ = g.clone();
   }
 }
  /** \brief Update step, if successful.

      Given a trial step, \f$s_k\f$, this function updates \f$x_{k+1}=x_k+s_k\f$. 
      This function also updates the secant approximation.

      @param[in,out]   x          is the updated iterate
      @param[in]       s          is the computed trial step
      @param[in]       obj        is the objective function
      @param[in]       con        are the bound constraints
      @param[in]       algo_state contains the current state of the algorithm
  */
  void update( Vector<Real> &x, const Vector<Real> &s, Objective<Real> &obj, BoundConstraint<Real> &con,
               AlgorithmState<Real> &algo_state ) {
    Real tol = std::sqrt(ROL_EPSILON);
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    

    // Update iterate
    algo_state.iter++;
    x.axpy(1.0, s);
    // Compute new gradient
    if ( edesc_ == DESCENT_SECANT || 
        (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
      gp_->set(*(step_state->gradientVec));
    }
    obj.gradient(*(step_state->gradientVec),x,tol);
    algo_state.ngrad++;

    // Update Secant Information
    if ( edesc_ == DESCENT_SECANT || 
        (edesc_ == DESCENT_NEWTONKRYLOV && useSecantPrecond_) ) {
      secant_->update(*(step_state->gradientVec),*gp_,s,algo_state.snorm,algo_state.iter+1);
    }

    // Update algorithm state
    (algo_state.iterateVec)->set(x);
    if ( con.isActivated() ) {
      if ( useProjectedGrad_ ) {
        gp_->set(*(step_state->gradientVec));
        con.computeProjectedGradient( *gp_, x );
        algo_state.gnorm = gp_->norm();
      }
      else {
        d_->set(x);
        d_->axpy(-1.0,(step_state->gradientVec)->dual());
        con.project(*d_);
        d_->axpy(-1.0,x);
        algo_state.gnorm = d_->norm();
      }
    }
    else {
      algo_state.gnorm = (step_state->gradientVec)->norm();
    }
  }
Example #4
0
 Real GradDotStep(const Vector<Real> &g, const Vector<Real> &s,
                  const Vector<Real> &x,
                  BoundConstraint<Real> &bnd, Real eps = 0) {
   Real gs(0), one(1);
   if (!bnd.isActivated()) {
     gs = s.dot(g.dual());
   }
   else {
     d_->set(s);
     bnd.pruneActive(*d_,g,x,eps);
     gs = d_->dot(g.dual());
     d_->set(x);
     d_->axpy(-one,g.dual());
     bnd.project(*d_);
     d_->scale(-one);
     d_->plus(x);
     bnd.pruneInactive(*d_,g,x,eps);
     gs -= d_->dot(g.dual());
   }
   return gs;
 }
  /** \brief Compute step.

      Computes a trial step, \f$s_k\f$ as defined by the enum EDescent.  Once the 
      trial step is determined, this function determines an approximate minimizer 
      of the 1D function \f$\phi_k(t) = f(x_k+ts_k)\f$.  This approximate 
      minimizer must satisfy sufficient decrease and curvature conditions.

      @param[out]      s          is the computed trial step
      @param[in]       x          is the current iterate
      @param[in]       obj        is the objective function
      @param[in]       con        are the bound constraints
      @param[in]       algo_state contains the current state of the algorithm
  */
  void compute( Vector<Real> &s, const Vector<Real> &x, Objective<Real> &obj, BoundConstraint<Real> &con, 
                AlgorithmState<Real> &algo_state ) {
    Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();

    Real tol = std::sqrt(ROL_EPSILON);

    // Set active set parameter
    Real eps = 0.0;
    if ( con.isActivated() ) {
      eps = algo_state.gnorm;
    }
    lineSearch_->setData(eps);
    if ( hessian_ != Teuchos::null ) {
      hessian_->setData(eps);
    }
    if ( precond_ != Teuchos::null ) {
      precond_->setData(eps);
    }

    // Compute step s
    switch(edesc_) {
      case DESCENT_NEWTONKRYLOV:
        flagKrylov_ = 0;
        krylov_->run(s,*hessian_,*(step_state->gradientVec),*precond_,iterKrylov_,flagKrylov_);
        break;
      case DESCENT_NEWTON:
      case DESCENT_SECANT:
        hessian_->applyInverse(s,*(step_state->gradientVec),tol);
        break;
      case DESCENT_NONLINEARCG:
        nlcg_->run(s,*(step_state->gradientVec),x,obj);
        break;
      case DESCENT_STEEPEST:
        s.set(step_state->gradientVec->dual());
        break;
      default: break;
    }

    // Compute g.dot(s)
    Real gs = 0.0;
    if ( !con.isActivated() ) {
      gs = -s.dot((step_state->gradientVec)->dual());
    }
    else {
      if ( edesc_ == DESCENT_STEEPEST ) {
        d_->set(x);
        d_->axpy(-1.0,s);
        con.project(*d_);
        d_->scale(-1.0);
        d_->plus(x);
        //d->set(s);
        //con.pruneActive(*d,s,x,eps);
        //con.pruneActive(*d,*(step_state->gradientVec),x,eps);
        gs = -d_->dot((step_state->gradientVec)->dual());
      }
      else {
        d_->set(s);
        con.pruneActive(*d_,*(step_state->gradientVec),x,eps);
        gs = -d_->dot((step_state->gradientVec)->dual());
        d_->set(x);
        d_->axpy(-1.0,(step_state->gradientVec)->dual());
        con.project(*d_);
        d_->scale(-1.0);
        d_->plus(x);
        con.pruneInactive(*d_,*(step_state->gradientVec),x,eps);
        gs -= d_->dot((step_state->gradientVec)->dual());
      }
    }

    // Check if s is a descent direction i.e., g.dot(s) < 0
    if ( gs >= 0.0 || (flagKrylov_ == 2 && iterKrylov_ <= 1) ) {
      s.set((step_state->gradientVec)->dual());
      if ( con.isActivated() ) {
        d_->set(s);
        con.pruneActive(*d_,s,x);
        gs = -d_->dot((step_state->gradientVec)->dual());
      }
      else {
        gs = -s.dot((step_state->gradientVec)->dual());
      }
    }
    s.scale(-1.0);

    // Perform line search
    Real fnew  = algo_state.value;
    ls_nfval_ = 0;
    ls_ngrad_ = 0;
    lineSearch_->run(step_state->searchSize,fnew,ls_nfval_,ls_ngrad_,gs,s,x,obj,con);

    // Make correction if maximum function evaluations reached
    if(!acceptLastAlpha_)
    {  
      lineSearch_->setMaxitUpdate(step_state->searchSize,fnew,algo_state.value);
    }

    algo_state.nfval += ls_nfval_;
    algo_state.ngrad += ls_ngrad_;

    // Compute get scaled descent direction
    s.scale(step_state->searchSize);
    if ( con.isActivated() ) {
      s.plus(x);
      con.project(s);
      s.axpy(-1.0,x);
    }

    // Update step state information
    (step_state->descentVec)->set(s);

    // Update algorithm state information
    algo_state.snorm = s.norm();
    algo_state.value = fnew;
  }
Example #6
0
  virtual bool status( const ELineSearch type, int &ls_neval, int &ls_ngrad, const Real alpha, 
                       const Real fold, const Real sgold, const Real fnew, 
                       const Vector<Real> &x, const Vector<Real> &s, 
                       Objective<Real> &obj, BoundConstraint<Real> &con ) { 
    Real tol = std::sqrt(ROL_EPSILON);

    // Check Armijo Condition
    bool armijo = false;
    if ( con.isActivated() ) {
      Real gs = 0.0;
      if ( edesc_ == DESCENT_STEEPEST ) {
        updateIterate(*d_,x,s,alpha,con);
        d_->scale(-1.0);
        d_->plus(x);
        gs = -s.dot(*d_);
      }
      else {
        d_->set(s);
        d_->scale(-1.0);
        con.pruneActive(*d_,*(grad_),x,eps_);
        gs = alpha*(grad_)->dot(*d_);
        d_->zero();
        updateIterate(*d_,x,s,alpha,con);
        d_->scale(-1.0);
        d_->plus(x);
        con.pruneInactive(*d_,*(grad_),x,eps_);
        gs += d_->dot(grad_->dual());
      }
      if ( fnew <= fold - c1_*gs ) {
        armijo = true;
      }
    }
    else {
      if ( fnew <= fold + c1_*alpha*sgold ) {
        armijo = true;
      }
    }

    // Check Maximum Iteration
    bool itcond = false;
    if ( ls_neval >= maxit_ ) { 
      itcond = true;
    }

    // Check Curvature Condition
    bool curvcond = false;
    if ( armijo && ((type != LINESEARCH_BACKTRACKING && type != LINESEARCH_CUBICINTERP) ||
                    (edesc_ == DESCENT_NONLINEARCG)) ) {
      if (econd_ == CURVATURECONDITION_GOLDSTEIN) {
        if (fnew >= fold + (1.0-c1_)*alpha*sgold) {
          curvcond = true;
        }
      }
      else if (econd_ == CURVATURECONDITION_NULL) {
        curvcond = true;
      }
      else { 
        updateIterate(*xtst_,x,s,alpha,con);
        obj.update(*xtst_);
        obj.gradient(*g_,*xtst_,tol);
        Real sgnew = 0.0;
        if ( con.isActivated() ) {
          d_->set(s);
          d_->scale(-alpha);
          con.pruneActive(*d_,s,x);
          sgnew = -d_->dot(g_->dual());
        }
        else {
          sgnew = s.dot(g_->dual());
        }
        ls_ngrad++;
   
        if (    ((econd_ == CURVATURECONDITION_WOLFE)       
                     && (sgnew >= c2_*sgold))
             || ((econd_ == CURVATURECONDITION_STRONGWOLFE) 
                     && (std::abs(sgnew) <= c2_*std::abs(sgold)))
             || ((econd_ == CURVATURECONDITION_GENERALIZEDWOLFE) 
                     && (c2_*sgold <= sgnew && sgnew <= -c3_*sgold))
             || ((econd_ == CURVATURECONDITION_APPROXIMATEWOLFE) 
                     && (c2_*sgold <= sgnew && sgnew <= (2.0*c1_ - 1.0)*sgold)) ) {
          curvcond = true;
        }
      }
    }

    if (type == LINESEARCH_BACKTRACKING || type == LINESEARCH_CUBICINTERP) {
      if (edesc_ == DESCENT_NONLINEARCG) {
        return ((armijo && curvcond) || itcond);
      }
      else {
        return (armijo || itcond);
      }
    }
    else {
      return ((armijo && curvcond) || itcond);
    }
  }
Example #7
0
  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g, 
                   Objective<Real> &obj, BoundConstraint<Real> &bnd, 
                   AlgorithmState<Real> &algo_state ) {
    d_ = x.clone();

    // Initialize unglobalized step
    Teuchos::ParameterList& list
      = parlist_.sublist("Step").sublist("Line Search").sublist("Descent Method");
    EDescent edesc = StringToEDescent(list.get("Type","Quasi-Newton Method") );
    if (bnd.isActivated()) {
      switch(edesc) {
        case DESCENT_STEEPEST: {
          desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NONLINEARCG: {
          desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
          break;
        }
        case DESCENT_SECANT: {
          desc_ = Teuchos::rcp(new ProjectedSecantStep<Real>(parlist_,secant_,computeObj_));
          break;
        }
        case DESCENT_NEWTON: {
          desc_ = Teuchos::rcp(new ProjectedNewtonStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NEWTONKRYLOV: {
          desc_ = Teuchos::rcp(new ProjectedNewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
          break;
        }
        default:
          TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
            ">>> (LineSearchStep::Initialize): Undefined descent type!");
      }
    }
    else {
      switch(edesc) {
        case DESCENT_STEEPEST: {
          desc_ = Teuchos::rcp(new GradientStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NONLINEARCG: {
          desc_ = Teuchos::rcp(new NonlinearCGStep<Real>(parlist_,nlcg_,computeObj_));
          break;
        }
        case DESCENT_SECANT: {
          desc_ = Teuchos::rcp(new SecantStep<Real>(parlist_,secant_,computeObj_));
          break;
        }
        case DESCENT_NEWTON: {
          desc_ = Teuchos::rcp(new NewtonStep<Real>(parlist_,computeObj_));
          break;
        }
        case DESCENT_NEWTONKRYLOV: {
          desc_ = Teuchos::rcp(new NewtonKrylovStep<Real>(parlist_,krylov_,secant_,computeObj_));
          break;
        }
        default:
          TEUCHOS_TEST_FOR_EXCEPTION(true,std::invalid_argument,
            ">>> (LineSearchStep::Initialize): Undefined descent type!");
      }
    }
    desc_->initialize(x,s,g,obj,bnd,algo_state);

    // Initialize line search
    lineSearch_->initialize(x,s,g,obj,bnd);
    //Teuchos::RCP<const StepState<Real> > desc_state = desc_->getStepState();
    //lineSearch_->initialize(x,s,*(desc_state->gradientVec),obj,bnd);
  }