LogisticRegression<MatType>::LogisticRegression(
    OptimizerType<LogisticRegressionFunction<MatType>>& optimizer) :
    parameters(optimizer.Function().GetInitialPoint()),
    lambda(optimizer.Function().Lambda())
{
  Train(optimizer);
}
SoftmaxRegression<OptimizerType>::SoftmaxRegression(
    OptimizerType<SoftmaxRegressionFunction>& optimizer) :
    parameters(optimizer.Function().GetInitialPoint()),
    numClasses(optimizer.Function().NumClasses()),
    lambda(optimizer.Function().Lambda()),
    fitIntercept(optimizer.Function().FitIntercept())
{
  Train(optimizer);
}
LogisticRegression<OptimizerType>::LogisticRegression(
    OptimizerType<LogisticRegressionFunction>& optimizer) :
    parameters(optimizer.Function().GetInitialPoint()),
    lambda(optimizer.Function().Lambda())
{
  Timer::Start("logistic_regression_optimization");
  const double out = optimizer.Optimize(parameters);
  Timer::Stop("logistic_regression_optimization");

  Log::Info << "LogisticRegression::LogisticRegression(): final objective of "
      << "trained model is " << out << "." << std::endl;
}
void LogisticRegression<MatType>::Train(
    OptimizerType<LogisticRegressionFunction<MatType>>& optimizer)
{
  // Everything is good.  Just train the model.
  parameters = optimizer.Function().GetInitialPoint();

  Timer::Start("logistic_regression_optimization");
  const double out = optimizer.Optimize(parameters);
  Timer::Stop("logistic_regression_optimization");

  Log::Info << "LogisticRegression::LogisticRegression(): final objective of "
      << "trained model is " << out << "." << std::endl;
}
Esempio n. 5
0
FFN<LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::FFN(LayerType &&network,
       OutputType &&outputLayer,
       const arma::mat& predictors,
       const arma::mat& responses,
       OptimizerType<NetworkType>& optimizer,
       InitializationRuleType initializeRule,
       PerformanceFunction performanceFunction) : 
    network(std::forward<LayerType>(network)),
    outputLayer(std::forward<OutputType>(outputLayer)),
    performanceFunc(std::move(performanceFunction)),
    predictors(predictors),
    responses(responses),
    numFunctions(predictors.n_cols)
{
  static_assert(std::is_same<typename std::decay<LayerType>::type,
                  LayerTypes>::value,
                  "The type of network must be LayerTypes.");

  static_assert(std::is_same<typename std::decay<OutputType>::type,
                OutputLayerType>::value,
                "The type of outputLayer must be OutputLayerType.");

  initializeRule.Initialize(parameter, NetworkSize(this->network), 1);
  NetworkWeights(parameter, this->network);

  // Train the model.
  Timer::Start("ffn_optimization");
  const double out = optimizer.Optimize(parameter);
  Timer::Stop("ffn_optimization");

  Log::Info << "FFN::FFN(): final objective of trained model is " << out
      << "." << std::endl;
}
Esempio n. 6
0
void FFN<
LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::Train(OptimizerType<NetworkType>& optimizer)
{
  // Train the model.
  Timer::Start("ffn_optimization");
  const double out = optimizer.Optimize(parameter);
  Timer::Stop("ffn_optimization");

  Log::Info << "FFN::FFN(): final objective of trained model is " << out
      << "." << std::endl;
}
double SoftmaxRegression<OptimizerType>::Train(
    OptimizerType<SoftmaxRegressionFunction>& optimizer)
{
  // Train the model.
  Timer::Start("softmax_regression_optimization");
  const double out = optimizer.Optimize(parameters);
  Timer::Stop("softmax_regression_optimization");

  Log::Info << "SoftmaxRegression::SoftmaxRegression(): final objective of "
            << "trained model is " << out << "." << std::endl;

  return out;
}
void LogisticRegression<MatType>::Train(
    const MatType& predictors,
    const arma::Row<size_t>& responses,
    OptimizerType& optimizer)
{
  LogisticRegressionFunction<MatType> errorFunction(predictors,
                                                    responses,
                                                    lambda);
  errorFunction.InitialPoint() = parameters;

  Timer::Start("logistic_regression_optimization");
  const double out = optimizer.Optimize(errorFunction, parameters);
  Timer::Stop("logistic_regression_optimization");

  Log::Info << "LogisticRegression::LogisticRegression(): final objective of "
      << "trained model is " << out << "." << std::endl;
}
SparseAutoencoder<OptimizerType>::SparseAutoencoder(
    OptimizerType<SparseAutoencoderFunction> &optimizer) :
    parameters(optimizer.Function().GetInitialPoint()),
    visibleSize(optimizer.Function().VisibleSize()),
    hiddenSize(optimizer.Function().HiddenSize()),
    lambda(optimizer.Function().Lambda()),
    beta(optimizer.Function().Beta()),
    rho(optimizer.Function().Rho())
{
  Timer::Start("sparse_autoencoder_optimization");
  const double out = optimizer.Optimize(parameters);
  Timer::Stop("sparse_autoencoder_optimization");

  Log::Info << "SparseAutoencoder::SparseAutoencoder(): final objective of "
      << "trained model is " << out << "." << std::endl;
}
Esempio n. 10
0
void FFN<
LayerTypes, OutputLayerType, InitializationRuleType, PerformanceFunction
>::Train(const arma::mat& predictors,
         const arma::mat& responses,
         OptimizerType<NetworkType>& optimizer)
{
  numFunctions = predictors.n_cols;
  this->predictors = predictors;
  this->responses = responses;

  // Train the model.
  Timer::Start("ffn_optimization");
  const double out = optimizer.Optimize(parameter);
  Timer::Stop("ffn_optimization");

  Log::Info << "FFN::FFN(): final objective of trained model is " << out
      << "." << std::endl;
}