Example #1
0
double SGD<DecomposableFunctionType, UpdatePolicyType>::Optimize(
    DecomposableFunctionType& function,
    arma::mat& iterate)
{
  // Find the number of functions to use.
  const size_t numFunctions = function.NumFunctions();

  // This is used only if shuffle is true.
  arma::Col<size_t> visitationOrder;
  if (shuffle)
  {
    visitationOrder = arma::shuffle(arma::linspace<arma::Col<size_t>>(0,
        (numFunctions - 1), numFunctions));
  }

  // To keep track of where we are and how things are going.
  size_t currentFunction = 0;
  double overallObjective = 0;
  double lastObjective = DBL_MAX;

  // Calculate the first objective function.
  for (size_t i = 0; i < numFunctions; ++i)
    overallObjective += function.Evaluate(iterate, i);

  // Initialize the update policy.
  updatePolicy.Initialize(iterate.n_rows, iterate.n_cols);

  // Now iterate!
  arma::mat gradient(iterate.n_rows, iterate.n_cols);
  for (size_t i = 1; i != maxIterations; ++i, ++currentFunction)
  {
    // Is this iteration the start of a sequence?
    if ((currentFunction % numFunctions) == 0)
    {
      // Output current objective function.
      Log::Info << "SGD: iteration " << i << ", objective " << overallObjective
          << "." << std::endl;

      if (std::isnan(overallObjective) || std::isinf(overallObjective))
      {
        Log::Warn << "SGD: converged to " << overallObjective << "; terminating"
            << " with failure.  Try a smaller step size?" << std::endl;
        return overallObjective;
      }

      if (std::abs(lastObjective - overallObjective) < tolerance)
      {
        Log::Info << "SGD: minimized within tolerance " << tolerance << "; "
            << "terminating optimization." << std::endl;
        return overallObjective;
      }

      // Reset the counter variables.
      lastObjective = overallObjective;
      overallObjective = 0;
      currentFunction = 0;

      if (shuffle) // Determine order of visitation.
        visitationOrder = arma::shuffle(visitationOrder);
    }

    // Evaluate the gradient for this iteration.
    if (shuffle)
      function.Gradient(iterate, visitationOrder[currentFunction], gradient);
    else
      function.Gradient(iterate, currentFunction, gradient);

    // Use the update policy to take a step.
    updatePolicy.Update(iterate, stepSize, gradient);

    // Now add that to the overall objective function.
    if (shuffle)
    {
      overallObjective += function.Evaluate(iterate,
          visitationOrder[currentFunction]);
    }
    else
    {
      overallObjective += function.Evaluate(iterate, currentFunction);
    }
  }

  Log::Info << "SGD: maximum iterations (" << maxIterations << ") reached; "
      << "terminating optimization." << std::endl;

  // Calculate final objective.
  overallObjective = 0;
  for (size_t i = 0; i < numFunctions; ++i)
    overallObjective += function.Evaluate(iterate, i);
  return overallObjective;
}
Example #2
0
double CNE::Optimize(DecomposableFunctionType& function, arma::mat& iterate)
{
  // Make sure for evolution to work at least four candidates are present.
  if (populationSize < 4)
  {
    throw std::logic_error("CNE::Optimize(): population size should be at least"
        " 4!");
  }

  // Find the number of elite canditates from population.
  numElite = floor(selectPercent * populationSize);

  // Making sure we have even number of candidates to remove and create.
  if ((populationSize - numElite) % 2 != 0)
    numElite--;

  // Terminate if two parents can not be created.
  if (numElite < 2)
  {
    throw std::logic_error("CNE::Optimize(): unable to select two parents. "
        "Increase selection percentage.");
  }

  // Terminate if at least two childs are not possible.
  if ((populationSize - numElite) < 2)
  {
    throw std::logic_error("CNE::Optimize(): no space to accomodate even 2 "
        "children. Increase population size.");
  }

  // Set the population size and fill random values [0,1].
  population = arma::randu(iterate.n_rows, iterate.n_cols, populationSize);

  // Store the number of elements in a cube slice or a matrix column.
  elements = population.n_rows * population.n_cols;

  // initializing helper variables.
  fitnessValues.set_size(populationSize);

  Log::Info << "CNE initialized successfully. Optimization started."
      << std::endl;

  // Find the fitness before optimization using given iterate parameters.
  size_t lastBestFitness = function.Evaluate(iterate);

  // Iterate until maximum number of generations is obtained.
  for (size_t gen = 1; gen <= maxGenerations; gen++)
  {
    // Calculating fitness values of all candidates.
    for (size_t i = 0; i < populationSize; i++)
    {
       // Select a candidate and insert the parameters in the function.
       iterate = population.slice(i);

       // Find fitness of candidate.
       fitnessValues[i] = function.Evaluate(iterate);
    }

    Log::Info << "Generation number: " << gen << " best fitness = "
        << fitnessValues.min() << std::endl;

    // Create next generation of species.
    Reproduce();

    // Check for termination criteria.
    if (tolerance >= fitnessValues.min())
    {
      Log::Info << "CNE::Optimize(): terminating. Given fitness criteria "
          << tolerance << " > " << fitnessValues.min() << "." << std::endl;
      break;
    }

    // Check for termination criteria.
    if (lastBestFitness - fitnessValues.min() < objectiveChange)
    {
      Log::Info << "CNE::Optimize(): terminating. Fitness history change "
          << (lastBestFitness - fitnessValues.min())
          << " < " << objectiveChange << "." << std::endl;
      break;
    }

    // Store the best fitness of present generation.
    lastBestFitness = fitnessValues.min();
  }

  // Set the best candidate into the network parameters.
  iterate = population.slice(index(0));

  return function.Evaluate(iterate);
}