Example #1
0
void FFN<
LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::Gradient(const arma::mat& /* unused */,
            const size_t /* unused */,
            arma::mat& gradient)
{
  NetworkGradients(gradient, network);

  Backward<>(error, network);
  UpdateGradients<>(network);
}
Example #2
0
void CNN<
LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::Gradient(const arma::mat& /* unused */,
            const size_t i,
            arma::mat& gradient)
{
  Evaluate(parameter, i, false);

  NetworkGradients(gradient, network);

  Backward<>(error, network);
  UpdateGradients<>(network);
}
Example #3
0
void RNN<
LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::Gradient(const arma::mat& /* unused */,
            const size_t i,
            arma::mat& gradient)
{
  if (gradient.is_empty())
  {
    gradient = arma::zeros<arma::mat>(parameter.n_rows, parameter.n_cols);
  }
  else
  {
    gradient.zeros();
  }

  Evaluate(parameter, i, false);

  arma::mat currentGradient = arma::mat(gradient.n_rows, gradient.n_cols);
  NetworkGradients(currentGradient, network);

  const arma::mat input = arma::mat(predictors.colptr(i), predictors.n_rows,
      1, false, true);

  // Iterate through the input sequence and perform the feed backward pass.
  for (seqNum = seqLen - 1; seqNum >= 0; seqNum--)
  {
    // Load the network activation for the upcoming backward pass.
    LoadActivations(input.rows(seqNum * inputSize, (seqNum + 1) *
        inputSize - 1), network);

    // Perform the backward pass.
    if (seqOutput)
    {
      arma::mat seqError = error.unsafe_col(seqNum);
      Backward(seqError, network);
    }
    else
    {
      Backward(error, network);
    }

    // Link the parameters and update the gradients.
    LinkParameter(network);
    UpdateGradients<>(network);

    // Update the overall gradient.
    gradient += currentGradient;

    if (seqNum == 0) break;
  }
}