Esempio n. 1
0
/**
 * Update the cost function, derivatives and hessian by adding values calculated
 * on a domain.
 * @param function :: Function to use to calculate the value and the derivatives
 * @param domain :: The domain.
 * @param values :: The fit function values
 * @param evalFunction :: Flag to evaluate the function
 * @param evalDeriv :: Flag to evaluate the derivatives
 * @param evalHessian :: Flag to evaluate the Hessian
 */
void CostFuncLeastSquares::addValDerivHessian(
  API::IFunction_sptr function,
  API::FunctionDomain_sptr domain,
  API::FunctionValues_sptr values,
  bool evalFunction , bool evalDeriv, bool evalHessian) const
{
  UNUSED_ARG(evalDeriv);
  size_t np = function->nParams();  // number of parameters 
  size_t ny = domain->size(); // number of data points
  Jacobian jacobian(ny,np);
  if (evalFunction)
  {
    function->function(*domain,*values);
  }
  function->functionDeriv(*domain,jacobian);

  size_t iActiveP = 0;
  double fVal = 0.0;
  if (debug)
  {
    std::cerr << "Jacobian:\n";
    for(size_t i = 0; i < ny; ++i)
    {
      for(size_t ip = 0; ip < np; ++ip)
      {
        if ( !m_function->isActive(ip) ) continue;
        std::cerr << jacobian.get(i,ip) << ' ';
      }
      std::cerr << std::endl;
    }
  }
  for(size_t ip = 0; ip < np; ++ip)
  {
    if ( !function->isActive(ip) ) continue;
    double d = 0.0;
    for(size_t i = 0; i < ny; ++i)
    {
      double calc = values->getCalculated(i);
      double obs = values->getFitData(i);
      double w = values->getFitWeight(i);
      double y = ( calc - obs ) * w;
      d += y * jacobian.get(i,ip) * w;
      if (iActiveP == 0 && evalFunction)
      {
        fVal += y * y;
      }
    }
    PARALLEL_CRITICAL(der_set)
    {
      double der = m_der.get(iActiveP);
      m_der.set(iActiveP, der + d);
    }
    //std::cerr << "der " << ip << ' ' << der[iActiveP] << std::endl;
    ++iActiveP;
  }

  if (evalFunction)
  {
    PARALLEL_ATOMIC
    m_value += 0.5 * fVal;
  }

  if (!evalHessian) return;

  //size_t na = m_der.size(); // number of active parameters

  size_t i1 = 0; // active parameter index
  for(size_t i = 0; i < np; ++i) // over parameters
  {
    if ( !function->isActive(i) ) continue;
    size_t i2 = 0; // active parameter index
    for(size_t j = 0; j <= i; ++j) // over ~ half of parameters
    {
      if ( !function->isActive(j) ) continue;
      double d = 0.0;
      for(size_t k = 0; k < ny; ++k) // over fitting data
      {
        double w = values->getFitWeight(k);
        d += jacobian.get(k,i) * jacobian.get(k,j) * w * w;
      }
      PARALLEL_CRITICAL(hessian_set)
      {
        double h = m_hessian.get(i1,i2);
        m_hessian.set(i1,i2, h + d);
        //std::cerr << "hess " << i1 << ' ' << i2 << std::endl;
        if (i1 != i2)
        {
          m_hessian.set(i2,i1,h + d);
        }
      }
      ++i2;
    }
    ++i1;
  }
}
Esempio n. 2
0
/** Fit background function
  */
void ProcessBackground::fitBackgroundFunction(std::string bkgdfunctiontype) {
  // Get background type and create bakground function
  BackgroundFunction_sptr bkgdfunction =
      createBackgroundFunction(bkgdfunctiontype);

  int bkgdorder = getProperty("OutputBackgroundOrder");
  bkgdfunction->setAttributeValue("n", bkgdorder);

  if (bkgdfunctiontype == "Chebyshev") {
    double xmin = m_outputWS->readX(0).front();
    double xmax = m_outputWS->readX(0).back();
    g_log.information() << "Chebyshev Fit range: " << xmin << ", " << xmax
                        << "\n";
    bkgdfunction->setAttributeValue("StartX", xmin);
    bkgdfunction->setAttributeValue("EndX", xmax);
  }

  g_log.information() << "Fit selected background " << bkgdfunctiontype
                      << " to data workspace with "
                      << m_outputWS->getNumberHistograms() << " spectra."
                      << "\n";

  // Fit input (a few) background pionts to get initial guess
  API::IAlgorithm_sptr fit;
  try {
    fit = this->createChildAlgorithm("Fit", 0.9, 1.0, true);
  } catch (Exception::NotFoundError &) {
    g_log.error() << "Requires CurveFitting library." << std::endl;
    throw;
  }

  g_log.information() << "Fitting background function: "
                      << bkgdfunction->asString() << "\n";

  double startx = m_lowerBound;
  double endx = m_upperBound;
  fit->setProperty("Function",
                   boost::dynamic_pointer_cast<API::IFunction>(bkgdfunction));
  fit->setProperty("InputWorkspace", m_outputWS);
  fit->setProperty("WorkspaceIndex", 0);
  fit->setProperty("MaxIterations", 500);
  fit->setProperty("StartX", startx);
  fit->setProperty("EndX", endx);
  fit->setProperty("Minimizer", "Levenberg-MarquardtMD");
  fit->setProperty("CostFunction", "Least squares");

  fit->executeAsChildAlg();

  // Get fit status and chi^2
  std::string fitStatus = fit->getProperty("OutputStatus");
  bool allowedfailure = (fitStatus.find("cannot") < fitStatus.size()) &&
                        (fitStatus.find("tolerance") < fitStatus.size());
  if (fitStatus.compare("success") != 0 && !allowedfailure) {
    g_log.error() << "ProcessBackground: Fit Status = " << fitStatus
                  << ".  Not to update fit result" << std::endl;
    throw std::runtime_error("Bad Fit");
  }

  const double chi2 = fit->getProperty("OutputChi2overDoF");
  g_log.information() << "Fit background: Fit Status = " << fitStatus
                      << ", chi2 = " << chi2 << "\n";

  // Get out the parameter names
  API::IFunction_sptr funcout = fit->getProperty("Function");
  TableWorkspace_sptr outbkgdparws = boost::make_shared<TableWorkspace>();
  outbkgdparws->addColumn("str", "Name");
  outbkgdparws->addColumn("double", "Value");

  TableRow typerow = outbkgdparws->appendRow();
  typerow << bkgdfunctiontype << 0.;

  vector<string> parnames = funcout->getParameterNames();
  size_t nparam = funcout->nParams();
  for (size_t i = 0; i < nparam; ++i) {
    TableRow newrow = outbkgdparws->appendRow();
    newrow << parnames[i] << funcout->getParameter(i);
  }

  TableRow chi2row = outbkgdparws->appendRow();
  chi2row << "Chi-square" << chi2;

  g_log.information() << "Set table workspace (#row = "
                      << outbkgdparws->rowCount()
                      << ") to OutputBackgroundParameterTable. "
                      << "\n";
  setProperty("OutputBackgroundParameterWorkspace", outbkgdparws);

  // Set output workspace
  const MantidVec &vecX = m_outputWS->readX(0);
  const MantidVec &vecY = m_outputWS->readY(0);
  FunctionDomain1DVector domain(vecX);
  FunctionValues values(domain);

  funcout->function(domain, values);

  MantidVec &dataModel = m_outputWS->dataY(1);
  MantidVec &dataDiff = m_outputWS->dataY(2);
  for (size_t i = 0; i < dataModel.size(); ++i) {
    dataModel[i] = values[i];
    dataDiff[i] = vecY[i] - dataModel[i];
  }

  return;
}
Esempio n. 3
0
/**
 * Update the cost function, derivatives and hessian by adding values calculated
 * on a domain.
 * @param function :: Function to use to calculate the value and the derivatives
 * @param domain :: The domain.
 * @param values :: The fit function values
 * @param evalDeriv :: Flag to evaluate the derivatives
 * @param evalHessian :: Flag to evaluate the Hessian
 */
void CostFuncLeastSquares::addValDerivHessian(API::IFunction_sptr function,
                                              API::FunctionDomain_sptr domain,
                                              API::FunctionValues_sptr values,
                                              bool evalDeriv,
                                              bool evalHessian) const {
  UNUSED_ARG(evalDeriv);
  function->function(*domain, *values);
  size_t np = function->nParams(); // number of parameters
  size_t ny = values->size();      // number of data points
  Jacobian jacobian(ny, np);
  function->functionDeriv(*domain, jacobian);

  size_t iActiveP = 0;
  double fVal = 0.0;
  std::vector<double> weights = getFitWeights(values);

  for (size_t ip = 0; ip < np; ++ip) {
    if (!function->isActive(ip))
      continue;
    double d = 0.0;
    for (size_t i = 0; i < ny; ++i) {
      double calc = values->getCalculated(i);
      double obs = values->getFitData(i);
      double w = weights[i];
      double y = (calc - obs) * w;
      d += y * jacobian.get(i, ip) * w;
      if (iActiveP == 0) {
        fVal += y * y;
      }
    }
    PARALLEL_CRITICAL(der_set) {
      double der = m_der.get(iActiveP);
      m_der.set(iActiveP, der + d);
    }
    ++iActiveP;
  }

  PARALLEL_ATOMIC
  m_value += 0.5 * fVal;

  if (!evalHessian)
    return;

  size_t i1 = 0;                  // active parameter index
  for (size_t i = 0; i < np; ++i) // over parameters
  {
    if (!function->isActive(i))
      continue;
    size_t i2 = 0;                  // active parameter index
    for (size_t j = 0; j <= i; ++j) // over ~ half of parameters
    {
      if (!function->isActive(j))
        continue;
      double d = 0.0;
      for (size_t k = 0; k < ny; ++k) // over fitting data
      {
        double w = weights[k];
        d += jacobian.get(k, i) * jacobian.get(k, j) * w * w;
      }
      PARALLEL_CRITICAL(hessian_set) {
        double h = m_hessian.get(i1, i2);
        m_hessian.set(i1, i2, h + d);
        if (i1 != i2) {
          m_hessian.set(i2, i1, h + d);
        }
      }
      ++i2;
    }
    ++i1;
  }
}