コード例 #1
0
void DRNonHomogeneousTreeLikelihood::fireParameterChanged(const ParameterList & params)
{
  applyParameters();

  if(params.getCommonParametersWith(_rateDistribution->getIndependentParameters()).size() > 0)
  {
    computeAllTransitionProbabilities();
  }
  else
  {
    vector<int> ids;
    vector<string> tmp = params.getCommonParametersWith(_modelSet->getNodeParameters()).getParameterNames();
    for(unsigned int i = 0; i < tmp.size(); i++)
    {
      vector<int> tmpv = _modelSet->getNodesWithParameter(tmp[i]);
      ids = VectorTools::vectorUnion(ids, tmpv);
    }
    tmp = params.getCommonParametersWith(_brLenParameters).getParameterNames();
    vector<const Node *> nodes;
    for(unsigned int i = 0; i < ids.size(); i++)
    {
      nodes.push_back(_idToNode[ids[i]]);
    }
    vector<const Node *> tmpv;
    bool test = false;
    for(unsigned int i = 0; i < tmp.size(); i++)
    {
      if(tmp[i] == "BrLenRoot" || tmp[i] == "RootPosition")
      {
        if(!test)
        {
          tmpv.push_back(_tree->getRootNode()->getSon(0));
          tmpv.push_back(_tree->getRootNode()->getSon(1));
          test = true; //Add only once.
        }
      }
      else
        tmpv.push_back(_nodes[TextTools::to<unsigned int>(tmp[i].substr(5))]);
    }
    nodes = VectorTools::vectorUnion(nodes, tmpv);

    for(unsigned int i = 0; i < nodes.size(); i++)
    {
      computeTransitionProbabilitiesForNode(nodes[i]);
    }
    _rootFreqs = _modelSet->getRootFrequencies();
  }
  computeTreeLikelihood();
  if(_computeFirstOrderDerivatives)
  {
    computeTreeDLikelihoods();  
  }
  if(_computeSecondOrderDerivatives)
  {
    computeTreeD2Likelihoods();
  }
}
コード例 #2
0
ファイル: OptimizationTools.cpp プロジェクト: matsen/bpp-phyl
unsigned int OptimizationTools::optimizeNumericalParametersWithGlobalClock2(
  DiscreteRatesAcrossSitesClockTreeLikelihood* cl,
  const ParameterList& parameters,
  OptimizationListener* listener,
  double tolerance,
  unsigned int tlEvalMax,
  OutputStream* messageHandler,
  OutputStream* profiler,
  unsigned int verbose,
  const std::string& optMethodDeriv)
throw (Exception)
{
  AbstractNumericalDerivative* fun = 0;

  // Build optimizer:
  Optimizer* optimizer = 0;
  if (optMethodDeriv == OPTIMIZATION_GRADIENT)
  {
    fun = new TwoPointsNumericalDerivative(cl);
    fun->setInterval(0.0000001);
    optimizer = new ConjugateGradientMultiDimensions(fun);
  }
  else if (optMethodDeriv == OPTIMIZATION_NEWTON)
  {
    fun = new ThreePointsNumericalDerivative(cl);
    fun->setInterval(0.0001);
    optimizer = new PseudoNewtonOptimizer(fun);
  }
  else
    throw Exception("OptimizationTools::optimizeBranchLengthsParameters. Unknown optimization method: " + optMethodDeriv);

  // Numerical derivatives:
  ParameterList tmp = parameters.getCommonParametersWith(cl->getParameters());
  fun->setParametersToDerivate(tmp.getParameterNames());

  optimizer->setVerbose(verbose);
  optimizer->setProfiler(profiler);
  optimizer->setMessageHandler(messageHandler);
  optimizer->setMaximumNumberOfEvaluations(tlEvalMax);
  optimizer->getStopCondition()->setTolerance(tolerance);

  // Optimize TreeLikelihood function:
  optimizer->setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
  if (listener)
    optimizer->addOptimizationListener(listener);
  optimizer->init(parameters);
  optimizer->optimize();
  if (verbose > 0)
    ApplicationTools::displayMessage("\n");

  // We're done.
  unsigned int n = optimizer->getNumberOfEvaluations();
  delete optimizer;

  // We're done.
  return n;
}
コード例 #3
0
ファイル: OptimizationTools.cpp プロジェクト: matsen/bpp-phyl
unsigned int OptimizationTools::optimizeBranchLengthsParameters(
  DiscreteRatesAcrossSitesTreeLikelihood* tl,
  const ParameterList& parameters,
  OptimizationListener* listener,
  double tolerance,
  unsigned int tlEvalMax,
  OutputStream* messageHandler,
  OutputStream* profiler,
  unsigned int verbose,
  const std::string& optMethodDeriv)
throw (Exception)
{
  // Build optimizer:
  Optimizer* optimizer = 0;
  if (optMethodDeriv == OPTIMIZATION_GRADIENT)
  {
    tl->enableFirstOrderDerivatives(true);
    tl->enableSecondOrderDerivatives(false);
    optimizer = new ConjugateGradientMultiDimensions(tl);
  }
  else if (optMethodDeriv == OPTIMIZATION_NEWTON)
  {
    tl->enableFirstOrderDerivatives(true);
    tl->enableSecondOrderDerivatives(true);
    optimizer = new PseudoNewtonOptimizer(tl);
  }
  else if (optMethodDeriv == OPTIMIZATION_BFGS)
  {
    tl->enableFirstOrderDerivatives(true);
    tl->enableSecondOrderDerivatives(false);
    optimizer = new BfgsMultiDimensions(tl);
  }
  else
    throw Exception("OptimizationTools::optimizeBranchLengthsParameters. Unknown optimization method: " + optMethodDeriv);
  optimizer->setVerbose(verbose);
  optimizer->setProfiler(profiler);
  optimizer->setMessageHandler(messageHandler);
  optimizer->setMaximumNumberOfEvaluations(tlEvalMax);
  optimizer->getStopCondition()->setTolerance(tolerance);

  // Optimize TreeLikelihood function:
  ParameterList pl = parameters.getCommonParametersWith(tl->getBranchLengthsParameters());
  optimizer->setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
  if (listener)
    optimizer->addOptimizationListener(listener);
  optimizer->init(pl);
  optimizer->optimize();
  if (verbose > 0)
    ApplicationTools::displayMessage("\n");

  // We're done.
  unsigned int n = optimizer->getNumberOfEvaluations();
  delete optimizer;
  return n;
}
コード例 #4
0
ファイル: OptimizationTools.cpp プロジェクト: matsen/bpp-phyl
unsigned int OptimizationTools::optimizeNumericalParametersWithGlobalClock(
  DiscreteRatesAcrossSitesClockTreeLikelihood* cl,
  const ParameterList& parameters,
  OptimizationListener* listener,
  unsigned int nstep,
  double tolerance,
  unsigned int tlEvalMax,
  OutputStream* messageHandler,
  OutputStream* profiler,
  unsigned int verbose,
  const std::string& optMethodDeriv)
throw (Exception)
{
  AbstractNumericalDerivative* fun = 0;

  // Build optimizer:
  MetaOptimizerInfos* desc = new MetaOptimizerInfos();
  if (optMethodDeriv == OPTIMIZATION_GRADIENT)
  {
    fun = new TwoPointsNumericalDerivative(cl);
    fun->setInterval(0.0000001);
    desc->addOptimizer("Branch length parameters", new ConjugateGradientMultiDimensions(fun), cl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
  }
  else if (optMethodDeriv == OPTIMIZATION_NEWTON)
  {
    fun = new ThreePointsNumericalDerivative(cl);
    fun->setInterval(0.0001);
    desc->addOptimizer("Branch length parameters", new PseudoNewtonOptimizer(fun), cl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
  }
  else
    throw Exception("OptimizationTools::optimizeNumericalParametersWithGlobalClock. Unknown optimization method: " + optMethodDeriv);

  // Numerical derivatives:
  ParameterList tmp = parameters.getCommonParametersWith(cl->getBranchLengthsParameters());
  fun->setParametersToDerivate(tmp.getParameterNames());

  ParameterList plsm = parameters.getCommonParametersWith(cl->getSubstitutionModelParameters());
  if (plsm.size() < 10)
    desc->addOptimizer("Substitution model parameter", new SimpleMultiDimensions(cl), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
  else
    desc->addOptimizer("Substitution model parameters", new DownhillSimplexMethod(cl), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_FULL);

  ParameterList plrd = parameters.getCommonParametersWith(cl->getRateDistributionParameters());
  if (plrd.size() < 10)
    desc->addOptimizer("Rate distribution parameter", new SimpleMultiDimensions(cl), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
  else
    desc->addOptimizer("Rate dsitribution parameters", new DownhillSimplexMethod(cl), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_FULL);

  MetaOptimizer optimizer(fun, desc, nstep);
  optimizer.setVerbose(verbose);
  optimizer.setProfiler(profiler);
  optimizer.setMessageHandler(messageHandler);
  optimizer.setMaximumNumberOfEvaluations(tlEvalMax);
  optimizer.getStopCondition()->setTolerance(tolerance);

  // Optimize TreeLikelihood function:
  optimizer.setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
  if (listener)
    optimizer.addOptimizationListener(listener);
  optimizer.init(parameters);
  optimizer.optimize();
  if (verbose > 0)
    ApplicationTools::displayMessage("\n");

  // We're done.
  return optimizer.getNumberOfEvaluations();
}
コード例 #5
0
ファイル: OptimizationTools.cpp プロジェクト: matsen/bpp-phyl
unsigned int OptimizationTools::optimizeNumericalParameters(
  DiscreteRatesAcrossSitesTreeLikelihood* tl,
  const ParameterList& parameters,
  OptimizationListener* listener,
  unsigned int nstep,
  double tolerance,
  unsigned int tlEvalMax,
  OutputStream* messageHandler,
  OutputStream* profiler,
  bool reparametrization,
  unsigned int verbose,
  const std::string& optMethodDeriv,
  const std::string& optMethodModel)
throw (Exception)
{
  DerivableSecondOrder* f = tl;
  ParameterList pl = parameters;

  // Shall we reparametrize the function to remove constraints?
  auto_ptr<DerivableSecondOrder> frep;
  if (reparametrization)
  {
    frep.reset(new ReparametrizationDerivableSecondOrderWrapper(f, parameters));
    f = frep.get();

    // Reset parameters to remove constraints:
    pl = f->getParameters().subList(parameters.getParameterNames());
  }

  // ///////////////
  // Build optimizer:

  // Branch lengths

  MetaOptimizerInfos* desc = new MetaOptimizerInfos();
  MetaOptimizer* poptimizer = 0;
  AbstractNumericalDerivative* fnum = new ThreePointsNumericalDerivative(f);

  if (optMethodDeriv == OPTIMIZATION_GRADIENT)
    desc->addOptimizer("Branch length parameters", new ConjugateGradientMultiDimensions(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
  else if (optMethodDeriv == OPTIMIZATION_NEWTON)
    desc->addOptimizer("Branch length parameters", new PseudoNewtonOptimizer(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
  else if (optMethodDeriv == OPTIMIZATION_BFGS)
    desc->addOptimizer("Branch length parameters", new BfgsMultiDimensions(f), tl->getBranchLengthsParameters().getParameterNames(), 2, MetaOptimizerInfos::IT_TYPE_FULL);
  else
    throw Exception("OptimizationTools::optimizeNumericalParameters. Unknown optimization method: " + optMethodDeriv);

  // Other parameters

  if (optMethodModel == OPTIMIZATION_BRENT)
  {
    ParameterList plsm = parameters.getCommonParametersWith(tl->getSubstitutionModelParameters());
    desc->addOptimizer("Substitution model parameter", new SimpleMultiDimensions(f), plsm.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);


    ParameterList plrd = parameters.getCommonParametersWith(tl->getRateDistributionParameters());
    desc->addOptimizer("Rate distribution parameter", new SimpleMultiDimensions(f), plrd.getParameterNames(), 0, MetaOptimizerInfos::IT_TYPE_STEP);
    poptimizer = new MetaOptimizer(f, desc, nstep);
  }
  else if (optMethodModel == OPTIMIZATION_BFGS)
  {
    vector<string> vNameDer;

    ParameterList plsm = parameters.getCommonParametersWith(tl->getSubstitutionModelParameters());
    vNameDer = plsm.getParameterNames();

    ParameterList plrd = parameters.getCommonParametersWith(tl->getRateDistributionParameters());

    vector<string> vNameDer2 = plrd.getParameterNames();

    vNameDer.insert(vNameDer.begin(), vNameDer2.begin(), vNameDer2.end());
    fnum->setParametersToDerivate(vNameDer);

    desc->addOptimizer("Rate & model distribution parameters", new BfgsMultiDimensions(fnum), vNameDer, 1, MetaOptimizerInfos::IT_TYPE_FULL);
    poptimizer = new MetaOptimizer(fnum, desc, nstep);
  }
  else
    throw Exception("OptimizationTools::optimizeNumericalParameters. Unknown optimization method: " + optMethodModel);

  poptimizer->setVerbose(verbose);
  poptimizer->setProfiler(profiler);
  poptimizer->setMessageHandler(messageHandler);
  poptimizer->setMaximumNumberOfEvaluations(tlEvalMax);
  poptimizer->getStopCondition()->setTolerance(tolerance);

  // Optimize TreeLikelihood function:
  poptimizer->setConstraintPolicy(AutoParameter::CONSTRAINTS_AUTO);
  NaNListener* nanListener = new NaNListener(poptimizer, tl);
  poptimizer->addOptimizationListener(nanListener);
  if (listener)
    poptimizer->addOptimizationListener(listener);
  poptimizer->init(pl);
  poptimizer->optimize();

  if (verbose > 0)
    ApplicationTools::displayMessage("\n");

  // We're done.
  unsigned int nb = poptimizer->getNumberOfEvaluations();
  delete poptimizer;
  return nb;
}
コード例 #6
0
 void RNonHomogeneousMixedTreeLikelihood::fireParameterChanged(const ParameterList& params)
{
  if (main_)
    applyParameters();
  else {
    for (size_t i = 0; i < nbNodes_; i++)
      {
        int id = nodes_[i]->getId();
        if (reparametrizeRoot_ && id == root1_)
          {
            const Parameter* rootBrLen = &getParameter("BrLenRoot");
            const Parameter* rootPos = &getParameter("RootPosition");
            nodes_[i]->setDistanceToFather(rootBrLen->getValue() * rootPos->getValue());
          }
        else if (reparametrizeRoot_ && id == root2_)
          {
            const Parameter* rootBrLen = &getParameter("BrLenRoot");
            const Parameter* rootPos = &getParameter("RootPosition");
            nodes_[i]->setDistanceToFather(rootBrLen->getValue() * (1. - rootPos->getValue()));
          }
        else
          {
            const Parameter* brLen = &getParameter(string("BrLen") + TextTools::toString(i));
            if (brLen) nodes_[i]->setDistanceToFather(brLen->getValue());
          }
      }
  }

  map<int, vector<RNonHomogeneousMixedTreeLikelihood*> >::const_iterator it2;
  for (it2 = mvTreeLikelihoods_.begin(); it2 != mvTreeLikelihoods_.end(); it2++)
    for (size_t i = 0; i < it2->second.size(); i++){
      (it2->second)[i]->setProbability((dynamic_cast<MixedSubstitutionModelSet*>(modelSet_))->getHyperNodeProbability((it2->second)[i]->getHyperNode()));
    }

  if (main_){
    for (size_t i=0;i< mvTreeLikelihoods_[upperNode_].size(); i++)
      mvTreeLikelihoods_[upperNode_][i]->matchParametersValues(params);
    rootFreqs_ = modelSet_->getRootFrequencies();
  }
  else {
    if (params.getCommonParametersWith(rateDistribution_->getIndependentParameters()).size() > 0)
      {
        computeAllTransitionProbabilities();
      }
    else
      {
        vector<int> ids;
        vector<string> tmp = params.getCommonParametersWith(modelSet_->getNodeParameters()).getParameterNames();
        for (size_t i = 0; i < tmp.size(); i++)
          {
            vector<int> tmpv = modelSet_->getNodesWithParameter(tmp[i]);
            ids = VectorTools::vectorUnion(ids, tmpv);
          }
        tmp = params.getCommonParametersWith(brLenParameters_).getParameterNames();
        vector<const Node*> nodes;
        for (size_t i = 0; i < ids.size(); i++)
          {
            nodes.push_back(idToNode_[ids[i]]);
          }
        vector<const Node*> tmpv;
        bool test = false;
        for (size_t i = 0; i < tmp.size(); i++)
          {
            if (tmp[i] == "BrLenRoot" || tmp[i] == "RootPosition")
              {
                if (!test)
                  {
                    tmpv.push_back(tree_->getRootNode()->getSon(0));
                    tmpv.push_back(tree_->getRootNode()->getSon(1));
                    test = true; // Add only once.
                  }
              }
            else
              tmpv.push_back(nodes_[TextTools::to < size_t > (tmp[i].substr(5))]);
          }
        nodes = VectorTools::vectorUnion(nodes, tmpv);
        
        for (size_t i = 0; i < nodes.size(); i++){
          computeTransitionProbabilitiesForNode(nodes[i]);
        }
      }

    map<int, vector<RNonHomogeneousMixedTreeLikelihood*> >::iterator it;
    for (it = mvTreeLikelihoods_.begin(); it != mvTreeLikelihoods_.end(); it++)
      {
        for (size_t i = 0; i < it->second.size(); i++)
          {
            it->second[i]->matchParametersValues(params);
          }
      }
  }
  
  if (main_)
    {
      computeTreeLikelihood();
      minusLogLik_ = -getLogLikelihood();
    }
}