/** Main function
 * \param[in] n_args Number of arguments
 * \param[in] args Arguments themselves
 * \return Success of exection. 0 if successful.
 */
int main(int n_args, char** args)
{
  // First argument may be optional directory to write data to
  string directory;
  if (n_args>1)
    directory = string(args[1]);
  bool overwrite = true;
  
  
  // Generate training data 
  int n_input_dims = 1;
  VectorXi n_samples_per_dim = VectorXi::Constant(1,25);
  if (n_input_dims==2) 
    n_samples_per_dim = VectorXi::Constant(2,25);
    
  MatrixXd inputs, targets, outputs;
  targetFunction(n_samples_per_dim,inputs,targets);
  
  
  
  // Locally Weighted Regression
  double overlap = 0.07;
  int n_rfs = 9;
  if (n_input_dims==2) n_rfs = 5;
  VectorXi num_rfs_per_dim = VectorXi::Constant(n_input_dims,n_rfs);
  MetaParametersLWR* meta_parameters_lwr = new MetaParametersLWR(n_input_dims,num_rfs_per_dim,overlap);
  FunctionApproximator* fa = new FunctionApproximatorLWR(meta_parameters_lwr);

  cout << "_____________________________________" << endl << fa->getName() << endl;
  cout << "    Training"  << endl;
  fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite);
  cout << "    Predicting" << endl;
  fa->predict(inputs,outputs);
  meanAbsoluteErrorPerOutputDimension(targets,outputs);
  cout << endl << endl;
  
  delete fa;

  
  // IRFRLS
  int number_of_basis_functions=100;
  double lambda=0.2;
  double gamma=10;
  MetaParametersIRFRLS* meta_parameters_irfrls = new MetaParametersIRFRLS(n_input_dims,number_of_basis_functions,lambda,gamma);
  fa = new FunctionApproximatorIRFRLS(meta_parameters_irfrls);
  
  cout << "_____________________________________" << endl << fa->getName() << endl;
  cout << "    Training"  << endl;
  fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite);
  cout << "    Predicting" << endl;
  fa->predict(inputs,outputs);
  meanAbsoluteErrorPerOutputDimension(targets,outputs);
  cout << endl << endl;
  
  delete fa;
  
  
  
  /*
  // Gaussian Mixture Regression  (TOO SLOW FOR DEMO)
  int number_of_gaussians = 5;
  MetaParametersGMR* meta_parameters_gmr = new MetaParametersGMR(n_input_dims,number_of_gaussians);
  fa = new FunctionApproximatorGMR(meta_parameters_gmr);
    
  cout << "_____________________________________" << endl << fa->getName() << endl;
  cout << "    Training"  << endl;
  fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite);
  cout << "    Predicting" << endl;
  fa->predict(inputs,outputs);
  meanAbsoluteErrorPerOutputDimension(targets,outputs);
  cout << endl << endl;
  
  delete fa;


  
    // Locally Weighted Projection Regression  (LIBRARIES NOT LINKED CORRECTLY IN PYTHON)
#ifdef USE_LWPR
  double   w_gen=0.2;
  double   w_prune=0.8;
  bool     update_D=true;
  double   init_alpha=0.1;
  double   penalty=0.1;
  VectorXd init_D=VectorXd::Constant(n_input_dims,20);
  MetaParametersLWPR* meta_parameters_lwpr = new MetaParametersLWPR(n_input_dims,init_D,w_gen,w_prune,update_D,init_alpha,penalty);
  fa = new FunctionApproximatorLWPR(meta_parameters_lwpr);
    
  cout << "_____________________________________" << endl << fa->getName() << endl;
  cout << "    Training"  << endl;
  fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite);
  cout << "    Predicting" << endl;
  fa->predict(inputs,outputs);
  meanAbsoluteErrorPerOutputDimension(targets,outputs);
  cout << endl << endl;
  
  delete fa;
#endif // USE_LWPR

  */
 
  return 0;
}
Example #2
0
/** Main function
 * \param[in] n_args Number of arguments
 * \param[in] args Arguments themselves
 * \return Success of exection. 0 if successful.
 */
int main(int n_args, char** args)
{
  string directory, directory_fa;
  if (n_args>1)
    directory = string(args[1]);
  bool overwrite = true;
  
  for (int n_input_dims = 1; n_input_dims<=2; n_input_dims++)
  {
    vector<FunctionApproximator*> function_approximators;
    if (n_args>2)
    {
      // Assume the arguments are names of function approximatores
      for (int i_arg=2; i_arg<n_args; i_arg++)
      {
        FunctionApproximator* fa =  getFunctionApproximatorByName(args[i_arg],n_input_dims);
        if (fa==NULL)
          return -1;
        function_approximators.push_back(fa);
      }
    }
    else
    {
      // No name passed, get all function approximators
      getFunctionApproximatorsVector(n_input_dims,function_approximators);
    }
  
    // Generate training data 
    VectorXi n_samples_per_dim = VectorXi::Constant(1,25);
    if (n_input_dims==2) 
      n_samples_per_dim = VectorXi::Constant(2,10);
      
    MatrixXd inputs, targets, outputs;
    targetFunction(n_samples_per_dim,inputs,targets);
  
    VectorXd min = inputs.colwise().minCoeff();
    VectorXd max = inputs.colwise().maxCoeff();
        
    VectorXi n_samples_per_dim_dense = VectorXi::Constant(n_input_dims,100);
    if (n_input_dims==2)
      n_samples_per_dim = VectorXi::Constant(n_input_dims,40);
            
    
    
    for (unsigned int dd=0; dd<function_approximators.size(); dd++)
    {
      
      FunctionApproximator* fa = function_approximators[dd]; 
    
      cout << "_____________________________________" << endl << fa->getName() << endl;
      cout << "    Training (with " << n_input_dims << "D data)"<< endl;
      if (!directory.empty()) {
        directory_fa =  directory+"/"+fa->getName();
        if (n_input_dims==1)
          directory_fa = directory_fa+"1D";
        else
          directory_fa = directory_fa+"2D";
      }
        
      fa->train(inputs,targets,directory_fa,overwrite);
      fa->predict(inputs,outputs);
      
      cout << "    Converting to UnifiedModel"  << endl;
      UnifiedModel* mp_unified =  fa->getUnifiedModel();
      if (mp_unified!=NULL)
      {
        mp_unified->saveGridData(min, max, n_samples_per_dim_dense, directory_fa+"Unified",overwrite);
      
        fa->saveGridData(min, max, n_samples_per_dim_dense, directory_fa,overwrite);
      
        saveMatrix(directory_fa+"Unified","inputs.txt",inputs,overwrite);
        saveMatrix(directory_fa+"Unified","targets.txt",targets,overwrite);
        //saveMatrix(directory_fa+"Unified","outputs.txt",outputs,overwrite);
      }
      
      delete fa;
      fa = NULL;
      delete mp_unified;
    }
  }
     
  return 0;
}