int main() { fann_type *calc_out; const unsigned int num_input = 2; const unsigned int num_output = 1; const unsigned int num_layers = 3; const unsigned int num_neurons_hidden = 9; const float desired_error = (const float) 0; const unsigned int max_epochs = 500000; const unsigned int epochs_between_reports = 1000; struct fann *ann; struct fann_train_data *data; unsigned int i = 0; unsigned int decimal_point; printf("Creating network.\n"); ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); data = fann_read_train_from_file("osyslec_train.data"); fann_set_activation_steepness_hidden(ann, 1); fann_set_activation_steepness_output(ann, 1); fann_set_activation_function_hidden(ann, FANN_SIGMOID); fann_set_activation_function_output(ann, FANN_SIGMOID); fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(ann, 0.01f); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_init_weights(ann, data); printf("Training network.\n"); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("GG test (%f,%f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); } printf("Saving network.\n"); fann_save(ann, "osyslec_train_float.net"); decimal_point = fann_save_to_fixed(ann, "osyslec_train_fixed.net"); fann_save_train_to_fixed(data, "osyslec_train_fixed.data", decimal_point); printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); return 0; }
void CNeuroNetwok::Init(const int nNeirons, const int nResults, const int nEpochs) { const unsigned int nLayersCount = 3; const unsigned int nHiddenNeironsCount = 3; m_nEpochsCount = nEpochs; // Создаем нейросеть // Количество входных нейронов столько же, сколько и входных параметров // Выходных нейронов столько же, сколько и результатов. m_pANN = fann_create_standard(nLayersCount, nNeirons, nHiddenNeironsCount, nResults); if (!m_pANN) throw std::runtime_error("Failed to init fann!"); // Выполняем очень важные инициализации :) fann_set_activation_steepness_hidden(m_pANN, 1); fann_set_activation_steepness_output(m_pANN, 1); fann_set_activation_function_hidden(m_pANN, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(m_pANN, FANN_SIGMOID_SYMMETRIC); fann_set_train_stop_function(m_pANN, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(m_pANN, 0.01f); m_bIsInited = true; }
void train_on_steepness_file(struct fann *ann, char *filename, unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error, float steepness_start, float steepness_step, float steepness_end) { float error; unsigned int i; struct fann_train_data *data = fann_read_train_from_file(filename); if(epochs_between_reports) { printf("Max epochs %8d. Desired error: %.10f\n", max_epochs, desired_error); } fann_set_activation_steepness_hidden(ann, steepness_start); fann_set_activation_steepness_output(ann, steepness_start); for(i = 1; i <= max_epochs; i++) { /* train */ error = fann_train_epoch(ann, data); /* print current output */ if(epochs_between_reports && (i % epochs_between_reports == 0 || i == max_epochs || i == 1 || error < desired_error)) { printf("Epochs %8d. Current error: %.10f\n", i, error); } if(error < desired_error) { steepness_start += steepness_step; if(steepness_start <= steepness_end) { printf("Steepness: %f\n", steepness_start); fann_set_activation_steepness_hidden(ann, steepness_start); fann_set_activation_steepness_output(ann, steepness_start); } else { break; } } } fann_destroy_train(data); }
NeuralNet::NeuralNet(){ cout << "Initializing neural network" << endl; this->numInputNeurons = 144; this->numOutputNeurons = 1; this->numLayers = 3; this->numHiddenNeurons = 140; this->ptrNeuralNet = fann_create_standard(numLayers, numInputNeurons, numHiddenNeurons, numOutputNeurons); fann_set_activation_steepness_hidden(this->ptrNeuralNet, 1); fann_set_activation_steepness_output(this->ptrNeuralNet, 1); //sigmoidal function fann_set_activation_function_hidden(this->ptrNeuralNet, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(this->ptrNeuralNet, FANN_SIGMOID_SYMMETRIC); fann_set_train_stop_function(this->ptrNeuralNet, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(this->ptrNeuralNet, 0.01f); fann_set_training_algorithm(this->ptrNeuralNet, FANN_TRAIN_RPROP); }
/*! ann:set_activation_steepness_output(function) *# Sets the steepness of the activation function for the output neurons. *x ann:set_activation_steepness_output(1) *- */ static int ann_set_activation_steepness_output(lua_State *L) { struct fann **ann; fann_type steep; ann = luaL_checkudata(L, 1, FANN_METATABLE); luaL_argcheck(L, ann != NULL, 1, "'neural net' expected"); if(lua_gettop(L) < 2) luaL_error(L, "insufficient parameters"); steep = lua_tonumber(L, 2); #ifdef FANN_VERBOSE printf("Setting output layer activation steepness to %f\n", steep); #endif fann_set_activation_steepness_output(*ann, steep); return 0; }
int main() { fann_type *calc_out; const unsigned int num_input = 22500; const unsigned int num_output = 1; //const unsigned int num_layers = 4; const unsigned int num_layers = 4; /* this value can be changed to tweak the network */ const unsigned int num_neurons_hidden = 50; //const unsigned int num_neurons_hidden = 150; const float desired_error = (const float) 0.02; const unsigned int max_epochs = 15000; const unsigned int epochs_between_reports = 20; float learning_rate = .5; struct fann *ann; struct fann_train_data *data; int num_neurons = 0; unsigned int i = 0; unsigned int decimal_point; /* CREATING NETWORK */ ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output); /* reading training data */ data = fann_read_train_from_file("training.data"); fann_set_activation_steepness_hidden(ann, 1); fann_set_activation_steepness_output(ann, 1); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); fann_init_weights(ann, data); /* TRAINING NETWORK run x epochs at learn rate .y */ //fann_set_learning_rate(ann, .7); //fann_train_on_data(ann, data, 200, epochs_between_reports, .4); //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); fann_set_learning_rate(ann, .5); fann_train_on_data(ann, data,5000, epochs_between_reports, .2); //fann_train_on_data(ann, data,50, epochs_between_reports, .2); fann_set_learning_rate(ann, .2); fann_train_on_data(ann, data,1000, epochs_between_reports, .15); //fann_train_on_data(ann, data,100, epochs_between_reports, .15); fann_set_learning_rate(ann, .1); fann_train_on_data(ann, data,5000, epochs_between_reports, .002); //fann_train_on_data(ann, data,200, epochs_between_reports, .00002); /* TESTING NETWORK */ printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); /*printf("%f, should be %f, difference=%f\n", calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); */ } /* SAVING NETWORK */ fann_save(ann, "image_spam.net"); /* CLEANING UP */ fann_destroy_train(data); fann_destroy(ann); return 0; }
int main() { printf("Reading XML.. .. ..\n"); ezxml_t f1 = ezxml_parse_file("test.xml"), classification, temp, algo, temp2; classification = ezxml_child(f1, "classification"); temp = ezxml_child(classification, "algorithm"); algo = ezxml_child(temp, "MultiLayerPerceptron"); const unsigned int num_input = atoi(ezxml_child(classification, "input")->txt); const unsigned int num_output = atoi(ezxml_child(classification, "output")->txt); const unsigned int num_layers = atoi(ezxml_child(classification, "numberOfLayers")->txt); const unsigned int num_neurons_hidden = atoi(ezxml_child(algo, "hiddenNeurons")->txt); const float desired_error = (const float) (atof(ezxml_child(algo, "desiredError")->txt)); const unsigned int max_epochs = atoi(ezxml_child(algo, "maxEpochs")->txt); const unsigned int epochs_between_reports = atoi(ezxml_child(algo, "epochsBetweenReports")->txt); fann_type *calc_out; struct fann *ann; struct fann_train_data *data; unsigned int i = 0; unsigned int decimal_point; printf("Creating network.\n"); ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); data = fann_read_train_from_file(ezxml_child(classification, "datafile")->txt); fann_set_activation_steepness_hidden(ann, atoi(ezxml_child(algo, "hiddenActivationSteepness")->txt)); fann_set_activation_steepness_output(ann, atoi(ezxml_child(algo, "outputActivationSteepness")->txt)); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); temp2 = ezxml_child(algo, "trainStopFuction"); const char *stopFunc = temp2->txt; if(stopFunc == "FANN_STOPFUNC_BIT"){ fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); } else { fann_set_train_stop_function(ann, FANN_STOPFUNC_MSE); } fann_set_bit_fail_limit(ann, 0.01f); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_init_weights(ann, data); printf("Training network.\n"); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("Test Results (%f,%f,%f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], data->input[i][2], calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); } printf("Saving network.\n"); fann_save(ann, "xor_float.net"); decimal_point = fann_save_to_fixed(ann, "xor_fixed.net"); fann_save_train_to_fixed(data, "xor_fixed.data", decimal_point); printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); ezxml_free(f1); return 0; }
/* arguments (all required): - data filename - topology, as number of neurons per layer separated by dashes - epochs (integer) - learning rate (0.0-1.0 float) - output filename */ int main(int argc, char **argv) { // Argument 1: data filename. const char *datafn = argv[1]; // Argument 2: topology. unsigned int layer_sizes[MAX_LAYERS]; unsigned int num_layers = 0; char *token = strtok(argv[2], "-"); while (token != NULL) { layer_sizes[num_layers] = atoi(token); ++num_layers; token = strtok(NULL, "-"); } // Argument 3: epoch count. unsigned int max_epochs = atoi(argv[3]); // Argument 4: learning rate. float learning_rate = atof(argv[4]); // Argument 5: output filename. const char *outfn = argv[5]; struct fann *ann; ann = fann_create_standard_array(num_layers, layer_sizes); // Misc parameters. fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_set_activation_steepness_hidden(ann, 0.5); fann_set_activation_steepness_output(ann, 0.5); fann_set_activation_function_hidden(ann, FANN_SIGMOID); fann_set_activation_function_output(ann, FANN_SIGMOID); //fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); //fann_set_bit_fail_limit(ann, 0.01f); struct fann_train_data *data; data = fann_read_train_from_file(datafn); fann_init_weights(ann, data); fann_set_learning_rate(ann, learning_rate); fann_train_on_data( ann, data, max_epochs, 10, // epochs between reports DESIRED_ERROR ); printf("Testing network. %f\n", fann_test_data(ann, data)); fann_type *calc_out; for(unsigned int i = 0; i < fann_length_train_data(data); ++i) { calc_out = fann_run(ann, data->input[i]); } printf("RMSE = %f\n", sqrt(fann_get_MSE(ann))); fann_save(ann, outfn); fann_destroy_train(data); fann_destroy(ann); return 0; }
struct fann * setup_net(struct fann_train_data * data) { struct fann *ann; #if MIMO_FANN #if OPTIMIZE == 0 ann = fann_create_standard( 3, data->num_input, H_DIM, data->num_output); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); #endif #if OPTIMIZE == 1 unsigned int i, j; struct fann_descr *descr=(struct fann_descr*) calloc(1, sizeof(struct fann_descr)); fann_setup_descr(descr, 2, data->num_input); i=0; fann_setup_layer_descr( &(descr->layers_descr[i]), "connected_any_any", 1, NULL ); for (j=0; j< descr->layers_descr[i].num_neurons; j++) { fann_setup_neuron_descr( descr->layers_descr[i].neurons_descr+j, H_DIM, "scalar_rprop_sigmoid_symmetric", NULL ); } i=1; fann_setup_layer_descr( &(descr->layers_descr[i]), "connected_any_any", 1, NULL ); for (j=0; j< descr->layers_descr[i].num_neurons; j++) { fann_setup_neuron_descr( descr->layers_descr[i].neurons_descr+j, data->num_output, "scalar_rprop_sigmoid_symmetric", NULL ); } ann = fann_create_from_descr( descr ); #endif #if OPTIMIZE >= 2 { unsigned int layers[] = { data->num_input, H_DIM, data->num_output }; /*char *type; asprintf(&type, "%s_%s_%s", vals(implementation), vals(algorithm), vals(activation));*/ ann = fann_create_standard_array_typed(layer_type, neuron_type, 3, layers); } #endif #else /*MIMO_FANN*/ #ifdef SPARSE ann = fann_create_sparse( SPARSE, 3, data->num_input, H_DIM, data->num_output); #else ann = fann_create_standard( 3, data->num_input, H_DIM, data->num_output); #endif fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); #endif /*MIMO_FANN*/ fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(ann, 0.01f); fann_set_activation_steepness_hidden(ann, 1); fann_set_activation_steepness_output(ann, 1); #if INIT_WEIGHTS == 1 fann_randomize_weights(ann,0,1); #endif #if INIT_WEIGHTS == 2 fann_init_weights(ann, data); #endif #ifdef USE_RPROP fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); #else fann_set_training_algorithm(ann, FANN_TRAIN_BATCH); #endif return ann; }