Beispiel #1
0
void Sequential<InputDataType, OutputDataType, CustomLayers...>::Forward(
    arma::Mat<eT>&& input, arma::Mat<eT>&& output)
{
  boost::apply_visitor(ForwardVisitor(std::move(input), std::move(
      boost::apply_visitor(outputParameterVisitor, network.front()))),
      network.front());

  if (!reset)
  {
    if (boost::apply_visitor(outputWidthVisitor, network.front()) != 0)
    {
      width = boost::apply_visitor(outputWidthVisitor, network.front());
    }

    if (boost::apply_visitor(outputHeightVisitor, network.front()) != 0)
    {
      height = boost::apply_visitor(outputHeightVisitor, network.front());
    }
  }

  for (size_t i = 1; i < network.size(); ++i)
  {
    if (!reset)
    {
      // Set the input width.
      boost::apply_visitor(SetInputWidthVisitor(width), network[i]);

      // Set the input height.
      boost::apply_visitor(SetInputHeightVisitor(height), network[i]);
    }

    boost::apply_visitor(ForwardVisitor(std::move(boost::apply_visitor(
        outputParameterVisitor, network[i - 1])), std::move(
        boost::apply_visitor(outputParameterVisitor, network[i]))),
        network[i]);

    if (!reset)
    {
      // Get the output width.
      if (boost::apply_visitor(outputWidthVisitor, network[i]) != 0)
      {
        width = boost::apply_visitor(outputWidthVisitor, network[i]);
      }

      // Get the output height.
      if (boost::apply_visitor(outputHeightVisitor, network[i]) != 0)
      {
        height = boost::apply_visitor(outputHeightVisitor, network[i]);
      }
    }
  }

if (!reset)
{
  reset = true;
}

  output = boost::apply_visitor(outputParameterVisitor, network.back());
}
void RecurrentAttention<InputDataType, OutputDataType>::Forward(
    arma::Mat<eT>&& input, arma::Mat<eT>&& output)
{
  // Initialize the action input.
  if (initialInput.is_empty())
  {
    initialInput = arma::zeros(outSize, input.n_cols);
  }

  // Propagate through the action and recurrent module.
  for (forwardStep = 0; forwardStep < rho; ++forwardStep)
  {
    if (forwardStep == 0)
    {
      boost::apply_visitor(ForwardVisitor(std::move(initialInput), std::move(
          boost::apply_visitor(outputParameterVisitor, actionModule))),
          actionModule);
    }
    else
    {
      boost::apply_visitor(ForwardVisitor(std::move(boost::apply_visitor(
          outputParameterVisitor, rnnModule)), std::move(boost::apply_visitor(
          outputParameterVisitor, actionModule))), actionModule);
    }

    // Initialize the glimpse input.
    arma::mat glimpseInput = arma::zeros(input.n_elem, 2);
    glimpseInput.col(0) = input;
    glimpseInput.submat(0, 1, boost::apply_visitor(outputParameterVisitor,
        actionModule).n_elem - 1, 1) = boost::apply_visitor(
        outputParameterVisitor, actionModule);

    boost::apply_visitor(ForwardVisitor(std::move(glimpseInput),
        std::move(boost::apply_visitor(outputParameterVisitor, rnnModule))),
        rnnModule);

    // Save the output parameter when training the module.
    if (!deterministic)
    {
      for (size_t l = 0; l < network.size(); ++l)
      {
        boost::apply_visitor(SaveOutputParameterVisitor(
            std::move(moduleOutputParameter)), network[l]);
      }
    }
  }

  output = boost::apply_visitor(outputParameterVisitor, rnnModule);

  forwardStep = 0;
  backwardStep = 0;
}
void Recurrent<InputDataType, OutputDataType>::Forward(
    arma::Mat<eT>&& input, arma::Mat<eT>&& output)
{
  if (forwardStep == 0)
  {
    boost::apply_visitor(ForwardVisitor(std::move(input), std::move(output)),
        initialModule);
  }
  else
  {
    boost::apply_visitor(ForwardVisitor(std::move(input), std::move(
        boost::apply_visitor(outputParameterVisitor, inputModule))),
        inputModule);

    boost::apply_visitor(ForwardVisitor(std::move(boost::apply_visitor(
        outputParameterVisitor, transferModule)), std::move(
        boost::apply_visitor(outputParameterVisitor, feedbackModule))),
        feedbackModule);

    boost::apply_visitor(ForwardVisitor(std::move(input), std::move(output)),
        recurrentModule);
  }

  output = boost::apply_visitor(outputParameterVisitor, transferModule);

  // Save the feedback output parameter when training the module.
  if (!deterministic)
  {
    feedbackOutputParameter.push_back(output);
  }

  forwardStep++;
  if (forwardStep == rho)
  {
    forwardStep = 0;
    backwardStep = 0;

    if (!recurrentError.is_empty())
    {
      recurrentError.zeros();
    }
  }
}
Beispiel #4
0
void Concat<InputDataType, OutputDataType>::Forward(
    arma::Mat<eT>&& input, arma::Mat<eT>&& output)
{
  size_t outSize = 0;

  for (size_t i = 0; i < network.size(); ++i)
  {
    boost::apply_visitor(ForwardVisitor(std::move(input), std::move(
        boost::apply_visitor(outputParameterVisitor, network[i]))),
        network[i]);

    if (boost::apply_visitor(
        outputParameterVisitor, network[i]).n_elem > outSize)
    {
      outSize = boost::apply_visitor(outputParameterVisitor,
          network[i]).n_elem;
    }
  }

  output = arma::zeros(outSize, network.size());
  for (size_t i = 0; i < network.size(); ++i)
  {
    size_t elements = boost::apply_visitor(outputParameterVisitor,
        network[i]).n_elem;

    if (elements < outSize)
    {
      output.submat(0, i, elements - 1, i) = arma::vectorise(
          boost::apply_visitor(outputParameterVisitor, network[i]));
    }
    else
    {
      output.col(i) = arma::vectorise(boost::apply_visitor(
        outputParameterVisitor, network[i]));
    }
  }
}