int main(int n_args, char** args) { double intersection = 0.5; double n_rfs = 9; string fa_name = string(args[1]); string directory = string(args[2]); if (n_args>3) intersection = atof(args[3]); if (n_args>4) n_rfs = atoi(args[4]); // Load training data MatrixXd inputs; MatrixXd targets; directory += "/"; if (!loadMatrix(directory+"inputs.txt", inputs)) return -1; if (!loadMatrix(directory+"targets.txt", targets)) return -1; int input_dim = inputs.cols(); // Initialize function approximator FunctionApproximator* fa; if (fa_name.compare("LWR")==0) { MetaParametersLWR* meta_params = new MetaParametersLWR(input_dim,n_rfs,intersection); fa = new FunctionApproximatorLWR(meta_params); } else { MetaParametersRBFN* meta_params = new MetaParametersRBFN(input_dim,n_rfs,intersection); fa = new FunctionApproximatorRBFN(meta_params); } // Train function approximator with data bool overwrite = true; fa->train(inputs,targets,directory,overwrite); // Make predictions for the targets MatrixXd outputs(inputs.rows(),fa->getExpectedOutputDim()); fa->predict(inputs,outputs); saveMatrix(directory,"outputs.txt",outputs,overwrite); VectorXd min(1); min << 0.0; VectorXd max(1); max << 2.0; VectorXi n_samples_grid(1); n_samples_grid << 201; fa->saveGridData(min, max, n_samples_grid, directory, overwrite); delete fa; return 0; }
/** Main function * \param[in] n_args Number of arguments * \param[in] args Arguments themselves * \return Success of exection. 0 if successful. */ int main(int n_args, char** args) { // First argument may be optional directory to write data to string directory; if (n_args>1) directory = string(args[1]); bool overwrite = true; // Generate training data int n_input_dims = 1; VectorXi n_samples_per_dim = VectorXi::Constant(1,25); if (n_input_dims==2) n_samples_per_dim = VectorXi::Constant(2,25); MatrixXd inputs, targets, outputs; targetFunction(n_samples_per_dim,inputs,targets); // Locally Weighted Regression double overlap = 0.07; int n_rfs = 9; if (n_input_dims==2) n_rfs = 5; VectorXi num_rfs_per_dim = VectorXi::Constant(n_input_dims,n_rfs); MetaParametersLWR* meta_parameters_lwr = new MetaParametersLWR(n_input_dims,num_rfs_per_dim,overlap); FunctionApproximator* fa = new FunctionApproximatorLWR(meta_parameters_lwr); cout << "_____________________________________" << endl << fa->getName() << endl; cout << " Training" << endl; fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite); cout << " Predicting" << endl; fa->predict(inputs,outputs); meanAbsoluteErrorPerOutputDimension(targets,outputs); cout << endl << endl; delete fa; // IRFRLS int number_of_basis_functions=100; double lambda=0.2; double gamma=10; MetaParametersIRFRLS* meta_parameters_irfrls = new MetaParametersIRFRLS(n_input_dims,number_of_basis_functions,lambda,gamma); fa = new FunctionApproximatorIRFRLS(meta_parameters_irfrls); cout << "_____________________________________" << endl << fa->getName() << endl; cout << " Training" << endl; fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite); cout << " Predicting" << endl; fa->predict(inputs,outputs); meanAbsoluteErrorPerOutputDimension(targets,outputs); cout << endl << endl; delete fa; /* // Gaussian Mixture Regression (TOO SLOW FOR DEMO) int number_of_gaussians = 5; MetaParametersGMR* meta_parameters_gmr = new MetaParametersGMR(n_input_dims,number_of_gaussians); fa = new FunctionApproximatorGMR(meta_parameters_gmr); cout << "_____________________________________" << endl << fa->getName() << endl; cout << " Training" << endl; fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite); cout << " Predicting" << endl; fa->predict(inputs,outputs); meanAbsoluteErrorPerOutputDimension(targets,outputs); cout << endl << endl; delete fa; // Locally Weighted Projection Regression (LIBRARIES NOT LINKED CORRECTLY IN PYTHON) #ifdef USE_LWPR double w_gen=0.2; double w_prune=0.8; bool update_D=true; double init_alpha=0.1; double penalty=0.1; VectorXd init_D=VectorXd::Constant(n_input_dims,20); MetaParametersLWPR* meta_parameters_lwpr = new MetaParametersLWPR(n_input_dims,init_D,w_gen,w_prune,update_D,init_alpha,penalty); fa = new FunctionApproximatorLWPR(meta_parameters_lwpr); cout << "_____________________________________" << endl << fa->getName() << endl; cout << " Training" << endl; fa->train(inputs,targets,directory+"/"+fa->getName(),overwrite); cout << " Predicting" << endl; fa->predict(inputs,outputs); meanAbsoluteErrorPerOutputDimension(targets,outputs); cout << endl << endl; delete fa; #endif // USE_LWPR */ return 0; }
/** Main function * \param[in] n_args Number of arguments * \param[in] args Arguments themselves * \return Success of exection. 0 if successful. */ int main(int n_args, char** args) { string directory, directory_fa; if (n_args>1) directory = string(args[1]); bool overwrite = true; for (int n_input_dims = 1; n_input_dims<=2; n_input_dims++) { vector<FunctionApproximator*> function_approximators; if (n_args>2) { // Assume the arguments are names of function approximatores for (int i_arg=2; i_arg<n_args; i_arg++) { FunctionApproximator* fa = getFunctionApproximatorByName(args[i_arg],n_input_dims); if (fa==NULL) return -1; function_approximators.push_back(fa); } } else { // No name passed, get all function approximators getFunctionApproximatorsVector(n_input_dims,function_approximators); } // Generate training data VectorXi n_samples_per_dim = VectorXi::Constant(1,25); if (n_input_dims==2) n_samples_per_dim = VectorXi::Constant(2,10); MatrixXd inputs, targets, outputs; targetFunction(n_samples_per_dim,inputs,targets); VectorXd min = inputs.colwise().minCoeff(); VectorXd max = inputs.colwise().maxCoeff(); VectorXi n_samples_per_dim_dense = VectorXi::Constant(n_input_dims,100); if (n_input_dims==2) n_samples_per_dim = VectorXi::Constant(n_input_dims,40); for (unsigned int dd=0; dd<function_approximators.size(); dd++) { FunctionApproximator* fa = function_approximators[dd]; cout << "_____________________________________" << endl << fa->getName() << endl; cout << " Training (with " << n_input_dims << "D data)"<< endl; if (!directory.empty()) { directory_fa = directory+"/"+fa->getName(); if (n_input_dims==1) directory_fa = directory_fa+"1D"; else directory_fa = directory_fa+"2D"; } fa->train(inputs,targets,directory_fa,overwrite); fa->predict(inputs,outputs); cout << " Converting to UnifiedModel" << endl; UnifiedModel* mp_unified = fa->getUnifiedModel(); if (mp_unified!=NULL) { mp_unified->saveGridData(min, max, n_samples_per_dim_dense, directory_fa+"Unified",overwrite); fa->saveGridData(min, max, n_samples_per_dim_dense, directory_fa,overwrite); saveMatrix(directory_fa+"Unified","inputs.txt",inputs,overwrite); saveMatrix(directory_fa+"Unified","targets.txt",targets,overwrite); //saveMatrix(directory_fa+"Unified","outputs.txt",outputs,overwrite); } delete fa; fa = NULL; delete mp_unified; } } return 0; }
int main(int n_args, char** args) { string directory; if (n_args>1) directory = string(args[1]); //else // usage(args[0],"/tmp/testFunctionApproximatorLWR"); for (int input_dim=1; input_dim<=2; input_dim++) { cout << "________________________________________________________________________" << endl; cout << "________________________________________________________________________" << endl; VectorXi n_samples_per_dim = VectorXi::Constant(1,10); if (input_dim==2) n_samples_per_dim = VectorXi::Constant(2,25); MatrixXd inputs, targets, outputs; targetFunction(n_samples_per_dim,inputs,targets); double intersection = 0.5; int n_rfs = 9; if (input_dim==2) n_rfs = 3; VectorXi num_rfs_per_dim = VectorXi::Constant(input_dim,n_rfs); MetaParametersLWR* meta_parameters = new MetaParametersLWR(input_dim,num_rfs_per_dim,intersection); string save_directory; if (!directory.empty()) save_directory = directory+"/"+(input_dim==1?"1D":"2D"); FunctionApproximator* fa = new FunctionApproximatorLWR(meta_parameters); bool overwrite = true; fa->train(inputs,targets,save_directory,overwrite); // Now the basic functionality of the LWR FA has been tested. // No perturb the model parameters const ModelParametersLWR* model_parameters_lwr_const = static_cast< const ModelParametersLWR*>(fa->getModelParameters()); // Get a clone which is not const so that we can modify it ModelParametersLWR* model_parameters_lwr = static_cast< ModelParametersLWR*>(model_parameters_lwr_const->clone()); set<string> selected; selected.insert("offsets"); selected.insert("slopes"); model_parameters_lwr->setSelectedParameters(selected); model_parameters_lwr->set_lines_pivot_at_max_activation(true); VectorXd values; bool normalized = false; model_parameters_lwr->getParameterVectorSelected(values,normalized); cout << "Original values : " << fixed << setprecision(4) << values.transpose() << endl; normalized = true; model_parameters_lwr->getParameterVectorSelected(values,normalized); cout << "Original values (normalized): " << fixed << setprecision(4) << values.transpose() << endl; normalized = true; model_parameters_lwr->getParameterVectorSelected(values,normalized); int n_perturbations = 5; for (int i_perturbation=0; i_perturbation<n_perturbations; i_perturbation++) { if (!save_directory.empty()) { // Get min and max of the targetfunction (i.e. generate just 2 samples per dimension) MatrixXd inputs; MatrixXd targets; targetFunction(VectorXi::Constant(input_dim,2), inputs, targets); VectorXd min = inputs.colwise().minCoeff(); VectorXd max = inputs.colwise().maxCoeff(); VectorXi n_samples_per_dim = VectorXi::Constant(input_dim,100); if (input_dim==2) n_samples_per_dim = VectorXi::Constant(input_dim,40); string cur_save_directory = save_directory + "/perturbation" + to_string(i_perturbation); FunctionApproximatorLWR fa(model_parameters_lwr); fa.saveGridData(min, max, n_samples_per_dim, cur_save_directory, overwrite); } double scale = 0.05; VectorXd perturbations = scale*VectorXd::Random(values.size()); VectorXd values_perturbed = values.array()+perturbations.array(); model_parameters_lwr->setParameterVectorSelected(values_perturbed,normalized); //cout << *model_parameters_lwr << endl; cout << "Perturbation " << i_perturbation << ": " << fixed << setprecision(4) << values_perturbed.transpose() << endl; } delete meta_parameters; delete fa; } return 0; }