void GazeTracker::trainNN() { cout << "Getting data" << endl; struct fann_train_data* data = fann_create_train_from_callback(input_count, nn_eyewidth * nn_eyeheight, 2, getTrainingData); //fann_save_train(data, "data.txt"); cout << "Getting left data" << endl; struct fann_train_data* data_left = fann_create_train_from_callback(input_count, nn_eyewidth * nn_eyeheight, 2, getTrainingData_left); //fann_save_train(data_left, "data_left.txt"); fann_set_training_algorithm(ANN, FANN_TRAIN_RPROP); fann_set_learning_rate(ANN, 0.75); fann_set_training_algorithm(ANN_left, FANN_TRAIN_RPROP); fann_set_learning_rate(ANN_left, 0.75); cout << "Training" << endl; fann_train_on_data(ANN, data, 200, 20, 0.01); cout << "Training left" << endl; fann_train_on_data(ANN_left, data_left, 200, 20, 0.01); double mse = fann_get_MSE(ANN); double mse_left = fann_get_MSE(ANN_left); cout << "MSE: " << mse << ", MSE left: " << mse_left << endl; }
void GazeTracker::trainNN() { std::cout << "Getting data" << std::endl; struct fann_train_data *data = fann_create_train_from_callback(_inputCount, _nnEyeWidth * _nnEyeHeight, 2, getTrainingData); //fann_save_train(data, "data.txt"); std::cout << "Getting left data" << std::endl; struct fann_train_data *dataLeft = fann_create_train_from_callback(_inputCount, _nnEyeWidth * _nnEyeHeight, 2, getTrainingDataLeft); //fann_save_train(dataLeft, "dataLeft.txt"); fann_set_training_algorithm(_ANN, FANN_TRAIN_RPROP); fann_set_learning_rate(_ANN, 0.75); fann_set_training_algorithm(_ANNLeft, FANN_TRAIN_RPROP); fann_set_learning_rate(_ANNLeft, 0.75); std::cout << "Training" << std::endl; fann_train_on_data(_ANN, data, 200, 20, 0.01); std::cout << "Training left" << std::endl; fann_train_on_data(_ANNLeft, dataLeft, 200, 20, 0.01); double mse = fann_get_MSE(_ANN); double mseLeft = fann_get_MSE(_ANNLeft); std::cout << "MSE: " << mse << ", MSE left: " << mseLeft << std::endl; }
int main(int argc, char **argv) { const unsigned int num_input = 360; const unsigned int num_output = 1; const unsigned int num_layers = 4; const unsigned int num_neurons_hidden = num_input / 2; const float desired_error = (const float) 0.001; const unsigned int max_epochs = 300; const unsigned int epochs_between_reports = 10; struct fann_train_data * data = NULL; struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden / 2, num_output); unsigned int i, j; if (argc != 2) { fprintf(stderr, "Use: %s arquivoTreino\n", argv[0]); exit(1); } fann_set_activation_function_hidden(ann, FANN_ELLIOT); fann_set_activation_function_output(ann, FANN_LINEAR); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_set_learning_rate(ann, 0.1); fann_set_learning_momentum(ann, 0.6); data = fann_read_train_from_file(argv[1]); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); free(data); fann_save(ann, ARQ_RNA); fann_destroy(ann); return 0; }
/*! ann:set_learning_rate(function) *# Sets the learning rate for the various training algorithms. *x ann:set_learning_rate(0.7) *- */ static int ann_set_learning_rate(lua_State *L) { struct fann **ann; float rate; ann = luaL_checkudata(L, 1, FANN_METATABLE); luaL_argcheck(L, ann != NULL, 1, "'neural net' expected"); rate = luaL_checknumber(L, 2); #ifdef FANN_VERBOSE printf("Setting learning rate to %g\n", rate); #endif fann_set_learning_rate(*ann, rate); return 0; }
void ann_set(ann_t *net, ann_param_t param) { if((param == ann_all) || (param == ann_learning_rate)) fann_set_learning_rate((struct fann *) net, options->learning_rate); }
int main() { struct fann *ann; struct fann_train_data *train_data, *test_data; const float desired_error = (const float)0.0; unsigned int max_neurons = 30; unsigned int neurons_between_reports = 1; unsigned int bit_fail_train, bit_fail_test; float mse_train, mse_test; unsigned int i = 0; fann_type *output; fann_type steepness; int multi = 0; enum fann_activationfunc_enum activation; enum fann_train_enum training_algorithm = FANN_TRAIN_RPROP; printf("Reading data.\n"); train_data = fann_read_train_from_file("../benchmarks/datasets/parity8.train"); test_data = fann_read_train_from_file("../benchmarks/datasets/parity8.test"); fann_scale_train_data(train_data, -1, 1); fann_scale_train_data(test_data, -1, 1); printf("Creating network.\n"); ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data)); fann_set_training_algorithm(ann, training_algorithm); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_LINEAR); fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR); if(!multi) { /*steepness = 0.5;*/ steepness = 1; fann_set_cascade_activation_steepnesses(ann, &steepness, 1); /*activation = FANN_SIN_SYMMETRIC;*/ activation = FANN_SIGMOID_SYMMETRIC; fann_set_cascade_activation_functions(ann, &activation, 1); fann_set_cascade_num_candidate_groups(ann, 8); } if(training_algorithm == FANN_TRAIN_QUICKPROP) { fann_set_learning_rate(ann, 0.35); fann_randomize_weights(ann, -2.0,2.0); } fann_set_bit_fail_limit(ann, 0.9); fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); fann_print_parameters(ann); fann_save(ann, "cascade_train2.net"); printf("Training network.\n"); fann_cascadetrain_on_data(ann, train_data, max_neurons, neurons_between_reports, desired_error); fann_print_connections(ann); mse_train = fann_test_data(ann, train_data); bit_fail_train = fann_get_bit_fail(ann); mse_test = fann_test_data(ann, test_data); bit_fail_test = fann_get_bit_fail(ann); printf("\nTrain error: %f, Train bit-fail: %d, Test error: %f, Test bit-fail: %d\n\n", mse_train, bit_fail_train, mse_test, bit_fail_test); for(i = 0; i < train_data->num_data; i++) { output = fann_run(ann, train_data->input[i]); if((train_data->output[i][0] >= 0 && output[0] <= 0) || (train_data->output[i][0] <= 0 && output[0] >= 0)) { printf("ERROR: %f does not match %f\n", train_data->output[i][0], output[0]); } } printf("Saving network.\n"); fann_save(ann, "cascade_train.net"); printf("Cleaning up.\n"); fann_destroy_train(train_data); fann_destroy_train(test_data); fann_destroy(ann); return 0; }
int main() { fann_type *calc_out; const unsigned int num_input = 22500; const unsigned int num_output = 1; //const unsigned int num_layers = 4; const unsigned int num_layers = 4; /* this value can be changed to tweak the network */ const unsigned int num_neurons_hidden = 50; //const unsigned int num_neurons_hidden = 150; const float desired_error = (const float) 0.02; const unsigned int max_epochs = 15000; const unsigned int epochs_between_reports = 20; float learning_rate = .5; struct fann *ann; struct fann_train_data *data; int num_neurons = 0; unsigned int i = 0; unsigned int decimal_point; /* CREATING NETWORK */ ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output); /* reading training data */ data = fann_read_train_from_file("training.data"); fann_set_activation_steepness_hidden(ann, 1); fann_set_activation_steepness_output(ann, 1); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); fann_init_weights(ann, data); /* TRAINING NETWORK run x epochs at learn rate .y */ //fann_set_learning_rate(ann, .7); //fann_train_on_data(ann, data, 200, epochs_between_reports, .4); //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); fann_set_learning_rate(ann, .5); fann_train_on_data(ann, data,5000, epochs_between_reports, .2); //fann_train_on_data(ann, data,50, epochs_between_reports, .2); fann_set_learning_rate(ann, .2); fann_train_on_data(ann, data,1000, epochs_between_reports, .15); //fann_train_on_data(ann, data,100, epochs_between_reports, .15); fann_set_learning_rate(ann, .1); fann_train_on_data(ann, data,5000, epochs_between_reports, .002); //fann_train_on_data(ann, data,200, epochs_between_reports, .00002); /* TESTING NETWORK */ printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); /*printf("%f, should be %f, difference=%f\n", calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); */ } /* SAVING NETWORK */ fann_save(ann, "image_spam.net"); /* CLEANING UP */ fann_destroy_train(data); fann_destroy(ann); return 0; }
/* arguments (all required): - data filename - topology, as number of neurons per layer separated by dashes - epochs (integer) - learning rate (0.0-1.0 float) - output filename */ int main(int argc, char **argv) { // Argument 1: data filename. const char *datafn = argv[1]; // Argument 2: topology. unsigned int layer_sizes[MAX_LAYERS]; unsigned int num_layers = 0; char *token = strtok(argv[2], "-"); while (token != NULL) { layer_sizes[num_layers] = atoi(token); ++num_layers; token = strtok(NULL, "-"); } // Argument 3: epoch count. unsigned int max_epochs = atoi(argv[3]); // Argument 4: learning rate. float learning_rate = atof(argv[4]); // Argument 5: output filename. const char *outfn = argv[5]; struct fann *ann; ann = fann_create_standard_array(num_layers, layer_sizes); // Misc parameters. fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_set_activation_steepness_hidden(ann, 0.5); fann_set_activation_steepness_output(ann, 0.5); fann_set_activation_function_hidden(ann, FANN_SIGMOID); fann_set_activation_function_output(ann, FANN_SIGMOID); //fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); //fann_set_bit_fail_limit(ann, 0.01f); struct fann_train_data *data; data = fann_read_train_from_file(datafn); fann_init_weights(ann, data); fann_set_learning_rate(ann, learning_rate); fann_train_on_data( ann, data, max_epochs, 10, // epochs between reports DESIRED_ERROR ); printf("Testing network. %f\n", fann_test_data(ann, data)); fann_type *calc_out; for(unsigned int i = 0; i < fann_length_train_data(data); ++i) { calc_out = fann_run(ann, data->input[i]); } printf("RMSE = %f\n", sqrt(fann_get_MSE(ann))); fann_save(ann, outfn); fann_destroy_train(data); fann_destroy(ann); return 0; }
void Trainer::set_learning_rate(float learning_rate) { std::cerr << "Set learning rate to " << learning_rate << std::endl; fann_set_learning_rate(ann_, learning_rate); }
int main() { const unsigned int max_epochs = 1000; const unsigned int epochs_between_reports = 10; const unsigned int num_input = 48*48; const unsigned int num_output = 30; const unsigned int num_layers = 2; const unsigned int num_neurons_hidden = 25; const float desired_error = (const float) 0.0000; fann_type *calc_out; unsigned int i; int incorrect,ret = 0; int orig,pred; float max =0 ; float learning_rate = 0.01; struct fann *ann = fann_create_standard(num_layers, num_input, num_output); fann_set_activation_function_hidden(ann, FANN_SIGMOID); fann_set_activation_function_output(ann, FANN_LINEAR); fann_set_learning_rate(ann, learning_rate); fann_train_on_file(ann, "facial-train.txt", max_epochs, epochs_between_reports, desired_error); fann_reset_MSE(ann); struct fann_train_data *data = fann_read_train_from_file("facial-test.txt"); printf("Testing network..\n"); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_test(ann, data->input[i], data->output[i] ); printf ("%i ", i ); max = calc_out[0]; int maxo = data->output[i][0]; for (int n=0; n<30; n++) { printf (" %.2f/%.2f(%.2f) ",calc_out[n]*(2*96), data->output[i][n]*(2*96), data->output[i][n]*(2*96) - calc_out[n]*(2*96) ); } printf ("\n"); } printf("Mean Square Error: %f\n", fann_get_MSE(ann)); //printf ("Incorrect %i\n", incorrect); fann_save(ann, "facial.net"); fann_destroy_train(data); fann_destroy(ann); return 0; }
int main(int argc, char *argv[]) { struct fann_train_data *dadosTreino, *dadosTeste; struct fann *ANN; fann_type *ANN_Answers; int *layers, i, j, aux; chromosome chromo; float erro = 0.0; checkArgs(argc, argv); buildChromosome(argv, &chromo); checkDatasetFiles(); dadosTreino = fann_read_train_from_file(nomeArqTreino); layers = (int *) calloc(2+chromo.qntCamadasOcultas, sizeof(int)); layers[0] = qntNeuroniosEntrada; layers[2+chromo.qntCamadasOcultas-1] = qntNeuroniosSaida; aux = chromo.neurOcultos; for (i=1; i < 2+chromo.qntCamadasOcultas-1 ; i++) { layers[i] = aux; aux = aux/2; } // CRIANDO A RNA: ANN = fann_create_standard_array(2+chromo.qntCamadasOcultas, layers); // TREINO fann_set_learning_rate(ANN, chromo.learnRate); fann_set_learning_momentum(ANN, chromo.moment); fann_set_activation_function_hidden( ANN, chromo.fcOculta ); fann_set_activation_function_output( ANN, chromo.fcSaida ); fann_set_training_algorithm(ANN, chromo.algAprend ); if (fann_get_training_algorithm(ANN) == FANN_TRAIN_QUICKPROP) fann_set_quickprop_decay(ANN, chromo.decay); // Em python, o treino ficava entre um try. // Se desse erro, escrevia "Resultado: 999.0" e exit fann_train_on_data(ANN, dadosTreino, chromo.epocasTreino, 50, desiredError); fann_destroy_train(dadosTreino); // TESTES: dadosTeste = fann_read_train_from_file( nomeArqValidacao); // Em python, o teste também ficava entre um try. // Se desse erro, escrevia "Resultado: 999.0" e exit for(i = 0; i < fann_length_train_data(dadosTeste); i++) { ANN_Answers = fann_run(ANN, dadosTeste->input[i]); if (ANN_Answers == NULL) { printf("Resultado: 999.0\n"); exit(2); } for (j=0; j < qntNeuroniosSaida; j++) erro += (float) powf(fann_abs(ANN_Answers[j] - dadosTeste->output[i][j]), 2); } printf("Resultado: %f\n", erro/(fann_length_train_data(dadosTeste)-1)); fann_destroy_train(dadosTeste); saveANN(argc, argv, ANN); fann_destroy(ANN); }
void ViFann::setLearningRate(const qreal &rate) { fann_set_learning_rate(mNetwork, rate); }