Пример #1
0
bool CG::step()
{
  OPENANN_CHECK(opt);
  if(iteration < 0)
    initialize();
  OPENANN_CHECK(n > 0);

  try
  {
    while(alglib_impl::mincgiteration(state.c_ptr(), &envState))
    {
      if(state.needf)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        error = opt->error();
        state.f = error;
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.needfg)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        opt->errorGradient(error, gradient);
        state.f = error;
        for(unsigned i = 0; i < n; i++)
          state.g[i] = (double) gradient(i);
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.xupdated)
        continue;
      throw alglib::ap_error("ALGLIB: error in 'mincgoptimize'"
                             " (some derivatives were not provided?)");
    }
  }
  catch(alglib_impl::ae_error_type)
  {
    throw OpenANNException(envState.error_msg);
  }

  reset();
  return false;
}
Пример #2
0
void merge(DataSetView& merging, std::vector<DataSetView>& groups)
{
  OPENANN_CHECK(!groups.empty());

  for(int i = 0; i < groups.size(); ++i)
  {
    OPENANN_CHECK(merging.dataset == groups.at(i).dataset);

    std::copy(groups.at(i).indices.begin(), groups.at(i).indices.end(),
              std::back_inserter(merging.indices));
  }
}
Пример #3
0
void BCIDataSet::loadSignal()
{
  std::ifstream file(fileName("Signal").c_str());
  OPENANN_CHECK(file.is_open());

  for(int e = 0; e < readEpochs; e++)
  {
    for(int c = 0; c < channels; c++)
    {
      for(int t = 0; t < maxT; t++)
      {
        file >> signal[e](c, t);
      }
    }
    if(debugLogger.isActive())
    {
      double progress = 100.0 * (double)(e + 1) / (double) readEpochs;
      if(e % 10 == 0 || e == readEpochs - 1)
      {
        debugLogger << "[";
        int p = 0;
        for(; p < (int)(progress + 0.5); p++)
          debugLogger << "#";
        for(; p < 100; p++)
          debugLogger << " ";
        debugLogger << "] (" << (int)(progress + 0.5) << "%)\n";
      }
    }
  }
  debugLogger << "Loaded signal.\n";
}
Пример #4
0
void split(std::vector<DataSetView>& groups, DataSet& dataset,
           int numberOfGroups, bool shuffling)
{
  OPENANN_CHECK(numberOfGroups > 1);
  std::vector<int> indices;

  indices.reserve(dataset.samples());
  groups.reserve(numberOfGroups);

  for(int i = 0; i < dataset.samples(); ++i)
    indices.push_back(i);

  int samplesPerGroup = std::floor(dataset.samples() / numberOfGroups + 0.5);

  if(shuffling)
    std::random_shuffle(indices.begin(), indices.end());

  for(int i = 0; i < numberOfGroups; ++i)
  {
    std::vector<int>::iterator it = indices.begin() + i * samplesPerGroup;

    if(i < numberOfGroups - 1)
      groups.push_back(DataSetView(dataset, it, it + samplesPerGroup));
    else
      groups.push_back(DataSetView(dataset, it, indices.end()));
  }
}
StoppingInterrupt::StoppingInterrupt()
{
  OPENANN_CHECK(!stoppingInterruptSignal);

  if(observers == 0)
    std::signal(SIGINT, StoppingInterrupt::setStoppingInterruptSignal);

  ++observers;
}
Пример #6
0
Net& Net::addLayer(Layer* layer)
{
  OPENANN_CHECK(layer != 0);

  OutputInfo info = layer->initialize(parameters, derivatives);
  layers.push_back(layer);
  infos.push_back(info);
  L++;
  return *this;
}
Пример #7
0
void BCIDataSet::loadTargetChar()
{
  std::ifstream file(fileName("TargetChar").c_str());
  OPENANN_CHECK(file.is_open());

  int c = 0;
  for(int e = 0; e < epochs; e++)
  {
    file >> c;
    targetChar[e] = (char)c;
  }
  debugLogger << "Loaded target char.\n";
}
Пример #8
0
void BCIDataSet::loadFlashing()
{
  std::ifstream file(fileName("Flashing").c_str());
  OPENANN_CHECK(file.is_open());

  for(int t = 0; t < maxT; t++)
  {
    for(int e = 0; e < epochs; e++)
    {
      file >> flashing(t, e);
    }
  }
  debugLogger << "Loaded flashing.\n";
}
Пример #9
0
void BCIDataSet::loadStimulusCode()
{
  std::ifstream file(fileName("StimulusCode").c_str());
  OPENANN_CHECK(file.is_open());

  for(int t = 0; t < maxT; t++)
  {
    for(int e = 0; e < epochs; e++)
    {
      file >> stimulusCode(t, e);
    }
  }
  debugLogger << "Loaded stimulus code.\n";
}
Пример #10
0
void CG::optimize()
{
  OPENANN_CHECK(opt);
  StoppingInterrupt interrupt;
  while(step())
  {
    OPENANN_DEBUG << "Iteration #" << iteration << ", training error = "
                  << FloatingPointFormatter(error, 4);
    if(interrupt.isSignaled())
    {
      reset();
      break;
    }
  }
}
Пример #11
0
void BCIDataSet::determineDimension()
{
  std::ifstream file(fileName("Flashing").c_str());
  OPENANN_CHECK(file.is_open());
  sampling = 240;
  channels = 64;
  epochs = dataType == TEST ? 100 : 85;
  if(dataType == DEMO)
    readEpochs = 1;
  else
    readEpochs = epochs;
  maxT = 7794;
  D = sampling * channels;
  F = 1;
  debugLogger << sampling << " samples, " << channels << " channels, "
              << epochs << " epochs, " << maxT << " steps\n";
  flashing.resize(maxT, epochs);
  stimulusCode.resize(maxT, epochs);
  stimulusType.resize(maxT, epochs);
  targetChar.resize(epochs);
  signal.resize(epochs, Eigen::MatrixXd(channels, maxT));
  tempInstance.resize(channels * sampling);
}
Пример #12
0
void MaxPooling::forwardPropagate(Eigen::MatrixXd* x, Eigen::MatrixXd*& y,
                                  bool dropout)
{
    const int N = x->rows();
    this->y.conservativeResize(N, Eigen::NoChange);
    this->x = x;

    OPENANN_CHECK(x->cols() == fm * inRows * inRows);
    OPENANN_CHECK_EQUALS(this->y.cols(), fm * outRows * outCols);

    #pragma omp parallel for
    for(int n = 0; n < N; n++)
    {
        int outputIdx = 0;
        int inputIdx = 0;
        for(int fmo = 0; fmo < fm; fmo++)
        {
            for(int ri = 0, ro = 0; ri < maxRow; ri += kernelRows, ro++)
            {
                int rowBase = fmo * fmInSize + ri * inCols;
                for(int ci = 0, co = 0; ci < maxCol; ci += kernelCols, co++, outputIdx++)
                {
                    double m = -std::numeric_limits<double>::max();
                    for(int kr = 0; kr < kernelRows; kr++)
                    {
                        inputIdx = rowBase + ci;
                        for(int kc = 0; kc < kernelCols; kc++, inputIdx++)
                            m = std::max(m, (*x)(n, inputIdx));
                    }
                    this->y(n, outputIdx) = m;
                }
            }
        }
    }

    y = &(this->y);
}
Пример #13
0
bool StoppingInterrupt::isSignaled()
{
  OPENANN_CHECK(observers > 0);
  return stoppingInterruptSignal;
}
const OpenANN::ActionSpace::A& DoublePoleBalancing::getDiscreteActionSpace() const
{
  OPENANN_CHECK(false);
  static ActionSpace::A dummy;
  return dummy;
}
const OpenANN::StateSpace::S& DoublePoleBalancing::getDiscreteStateSpace() const
{
  OPENANN_CHECK(false);
  static StateSpace::S dummy;
  return dummy;
}
Пример #16
0
Layer& Net::getLayer(unsigned int l)
{
  OPENANN_CHECK(l >= 0 && l < L);
  return *layers[l];
}
Пример #17
0
OutputInfo Subsampling::initialize(std::vector<double*>& parameterPointers,
                                   std::vector<double*>& parameterDerivativePointers)
{
  OutputInfo info;
  info.dimensions.push_back(fm);
  outRows = inRows / kernelRows;
  outCols = inCols / kernelCols;
  fmOutSize = outRows * outCols;
  info.dimensions.push_back(outRows);
  info.dimensions.push_back(outCols);
  fmInSize = inRows * inCols;
  maxRow = inRows - kernelRows + 1;
  maxCol = inCols - kernelCols + 1;

  W.resize(fm, Eigen::MatrixXd(outRows, outCols));
  Wd.resize(fm, Eigen::MatrixXd(outRows, outCols));
  int numParams = fm * outRows * outCols * kernelRows * kernelCols;
  if(bias)
  {
    Wb.resize(fm, Eigen::MatrixXd(outRows, outCols));
    Wbd.resize(fm, Eigen::MatrixXd(outRows, outCols));
    numParams += fm * outRows * outCols;
  }
  parameterPointers.reserve(parameterPointers.size() + numParams);
  parameterDerivativePointers.reserve(parameterDerivativePointers.size() + numParams);
  for(int fmo = 0; fmo < fm; fmo++)
  {
    for(int r = 0; r < outRows; r++)
    {
      for(int c = 0; c < outCols; c++)
      {
        parameterPointers.push_back(&W[fmo](r, c));
        parameterDerivativePointers.push_back(&Wd[fmo](r, c));
        if(bias)
        {
          parameterPointers.push_back(&Wb[fmo](r, c));
          parameterDerivativePointers.push_back(&Wbd[fmo](r, c));
        }
      }
    }
  }

  initializeParameters();

  a.resize(1, info.outputs());
  y.resize(1, info.outputs());
  yd.resize(1, info.outputs());
  deltas.resize(1, info.outputs());

  if(info.outputs() < 1)
    throw OpenANNException("Number of outputs in subsampling layer is below"
                           " 1. You should either choose a smaller filter"
                           " size or generate a bigger input.");
  OPENANN_CHECK(fmInSize > 0);
  OPENANN_CHECK(outRows > 0);
  OPENANN_CHECK(outCols > 0);
  OPENANN_CHECK(fmOutSize > 0);
  OPENANN_CHECK(maxRow > 0);
  OPENANN_CHECK(maxCol > 0);

  return info;
}
Пример #18
0
bool LMA::step()
{
  OPENANN_CHECK(opt);
  if(iteration < 0)
    initialize();
  OPENANN_CHECK(n > 0);

  try
  {
    while(alglib_impl::minlmiteration(state.c_ptr(), &envState))
    {
      if(state.needfi)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        for(unsigned i = 0; i < opt->examples(); i++)
          state.fi[i] = opt->error(i);
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.needfij)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        for(int ex = 0; ex < opt->examples(); ex++)
        {
          opt->errorGradient(ex, errorValues(ex), gradient);
          state.fi[ex] = errorValues(ex);
          for(unsigned d = 0; d < opt->dimension(); d++)
            state.j[ex][d] = gradient(d);
        }
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.xupdated)
        continue;
      throw alglib::ap_error("ALGLIB: error in 'minlmoptimize' (some "
                             "derivatives were not provided?)");
    }
    alglib_impl::ae_state_clear(&envState);
  }
  catch(alglib_impl::ae_error_type)
  {
    throw OpenANNException(envState.error_msg);
  }

  reset();
  return false;
}
Пример #19
0
OutputInfo Net::getOutputInfo(unsigned int l)
{
  OPENANN_CHECK(l >= 0 && l < L);
  return infos[l];
}
Пример #20
0
Eigen::VectorXd LMA::result()
{
  OPENANN_CHECK(opt);
  opt->setParameters(optimum);
  return optimum;
}