int main() { fann_type *calc_out; const unsigned int num_input = 2; const unsigned int num_output = 1; const unsigned int num_layers = 3; const unsigned int num_neurons_hidden = 9; const float desired_error = (const float) 0; const unsigned int max_epochs = 500000; const unsigned int epochs_between_reports = 1000; struct fann *ann; struct fann_train_data *data; unsigned int i = 0; unsigned int decimal_point; printf("Creating network.\n"); ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); data = fann_read_train_from_file("osyslec_train.data"); fann_set_activation_steepness_hidden(ann, 1); fann_set_activation_steepness_output(ann, 1); fann_set_activation_function_hidden(ann, FANN_SIGMOID); fann_set_activation_function_output(ann, FANN_SIGMOID); fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(ann, 0.01f); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_init_weights(ann, data); printf("Training network.\n"); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("GG test (%f,%f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); } printf("Saving network.\n"); fann_save(ann, "osyslec_train_float.net"); decimal_point = fann_save_to_fixed(ann, "osyslec_train_fixed.net"); fann_save_train_to_fixed(data, "osyslec_train_fixed.data", decimal_point); printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); return 0; }
int main( int argc, char** argv ) { fann_type *calc_out; unsigned int i; int ret = 0; struct fann *ann; struct fann_train_data *data; printf("Creating network.\n"); ann = fann_create_from_file("xor_float.net"); if(!ann) { printf("Error creating ann --- ABORTING.\n"); return 0; } fann_print_connections(ann); fann_print_parameters(ann); printf("Testing network.\n"); data = fann_read_train_from_file("5K.txt"); for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); fann_scale_input( ann, data->input[i] ); calc_out = fann_run( ann, data->input[i] ); fann_descale_output( ann, calc_out ); printf("Result %f original %f error %f\n", calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0])); } printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); return ret; }
void cunit_xor_test(void) { fann_type *calc_out = NULL; unsigned int i; int ret = 0; struct fann *ann = NULL; struct fann_train_data *data = NULL; #ifdef FIXEDFANN ann = fann_create_from_file("xor_fixed.net"); #else ann = fann_create_from_file("xor_float.net"); #endif CU_ASSERT_PTR_NOT_NULL_FATAL(ann); #ifdef FIXEDFANN data = fann_read_train_from_file("xor_fixed.data"); #else data = fann_read_train_from_file("xor.data"); #endif CU_ASSERT_PTR_NOT_NULL_FATAL(data); for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); calc_out = fann_test(ann, data->input[i], data->output[i]); CU_ASSERT_PTR_NOT_NULL_FATAL(calc_out); #ifdef FIXEDFANN /*printf("XOR test (%d, %d) -> %d, should be %d, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann));*/ if((float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann) > 0.2) { CU_FAIL("XOR test failed."); ret = -1; } #else /*printf("XOR test (%f, %f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]));*/ #endif } fann_destroy_train(data); fann_destroy(ann); }
int main(int argc, char **argv) { fann_type *calc_out; unsigned int i, j; struct fann *ann; struct fann_train_data *data; float error = 0.0; if (argc < 3) { fprintf(stderr, "Use: %s FANN_network.net patternsFile\n", argv[0]); exit(1); } printf("Openning ANN `%s'\n", argv[1]); ann = fann_create_from_file(argv[1]); if (!ann) { fprintf(stderr, "Error creating the ANN.\n"); return (1); } printf("Running ANN.\n"); data = fann_read_train_from_file(argv[2]); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("ANN: %f ", calc_out[0]); printf("Expected: %f ", data->output[i][0]); printf("Error: %f ", (float) (data->output[i][0] -calc_out[0])); printf("Throttle_Effort: %f Brake_Effort: %f Current_Velocity: %f\n", // essa multiplicacao ocorre para desfazer o que o 'gerarEntrada.c' fez data->input[i][360-3]*100.0, data->input[i][360-2]*100.0, data->input[i][360-1]*5.0); error += (float) powf(fann_abs(calc_out[0] - data->output[i][0]),2); } printf("Test:: Squared Error: %f Mean Squared Error: %f\n", error, error/(fann_length_train_data(data)-1)); printf("Cleaning memory.\n"); fann_destroy_train(data); fann_destroy(ann); return (0); }
/* INTERNAL FUNCTION Helper function to update the MSE value and return a diff which takes symmetric functions into account */ fann_type fann_update_MSE(struct fann *ann, struct fann_neuron* neuron, fann_type neuron_diff) { float neuron_diff2; switch (neuron->activation_function) { case FANN_LINEAR_PIECE_SYMMETRIC: case FANN_THRESHOLD_SYMMETRIC: case FANN_SIGMOID_SYMMETRIC: case FANN_SIGMOID_SYMMETRIC_STEPWISE: case FANN_ELLIOT_SYMMETRIC: case FANN_GAUSSIAN_SYMMETRIC: case FANN_SIN_SYMMETRIC: case FANN_COS_SYMMETRIC: neuron_diff /= (fann_type)2.0; break; case FANN_THRESHOLD: case FANN_LINEAR: case FANN_SIGMOID: case FANN_SIGMOID_STEPWISE: case FANN_GAUSSIAN: case FANN_GAUSSIAN_STEPWISE: case FANN_ELLIOT: case FANN_LINEAR_PIECE: case FANN_SIN: case FANN_COS: case FANN_MAXPOOLING: break; } #ifdef FIXEDFANN neuron_diff2 = (neuron_diff / (float) ann->multiplier) * (neuron_diff / (float) ann->multiplier); #else neuron_diff2 = (float) (neuron_diff * neuron_diff); #endif ann->MSE_value += neuron_diff2; /*printf("neuron_diff %f = (%f - %f)[/2], neuron_diff2=%f, sum=%f, MSE_value=%f, num_MSE=%d\n", neuron_diff, *desired_output, neuron_value, neuron_diff2, last_layer_begin->sum, ann->MSE_value, ann->num_MSE); */ if(fann_abs(neuron_diff) >= ann->bit_fail_limit) { ann->num_bit_fail++; } return neuron_diff; }
int main() { const unsigned int num_input = 2; const unsigned int num_output = 1; const unsigned int num_layers = 3; const unsigned int num_neurons_hidden = 3; const float desired_error = (const float) 0.001; const unsigned int max_epochs = 500000; const unsigned int epochs_between_reports = 1000; unsigned int i; fann_type *calc_out; struct fann_train_data *data; struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); data = fann_read_train_from_file("xor.data"); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); fann_set_training_algorithm(ann, FANN_TRAIN_QUICKPROP); train_on_steepness_file(ann, "xor.data", max_epochs, epochs_between_reports, desired_error, (float) 1.0, (float) 0.1, (float) 20.0); fann_set_activation_function_hidden(ann, FANN_THRESHOLD_SYMMETRIC); fann_set_activation_function_output(ann, FANN_THRESHOLD_SYMMETRIC); for(i = 0; i != fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("XOR test (%f, %f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0])); } fann_save(ann, "xor_float.net"); fann_destroy(ann); fann_destroy_train(data); return 0; }
int main(int argc, char **argv) { fann_type *calc_out; unsigned int i, j; struct fann *ann; struct fann_train_data *data; if (argc < 2) { fprintf(stderr, "Use: %s arquivoTeste\n", argv[0]); exit(1); } printf("Abrindo a Rede `%s'\n", ARQ_RNA); ann = fann_create_from_file(ARQ_RNA); if (!ann) { fprintf(stderr, "Erro criando a RNA.\n"); return (1); } //fann_print_connections(ann); //fann_print_parameters(ann); printf("Testando a RNA.\n"); data = fann_read_train_from_file(argv[1]); for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); calc_out = fann_run(ann, data->input[i]); printf("Resultado: %f ", calc_out[0]); printf("Original: %f " , data->output[i][0]); printf("Erro: %f\n" , (float) fann_abs(calc_out[0] - data->output[i][0])); } printf("Limpando memoria.\n"); fann_destroy_train(data); fann_destroy(ann); return (0); }
int main() { printf("Reading XML.. .. ..\n"); ezxml_t f1 = ezxml_parse_file("test.xml"), classification, temp, algo, temp2; classification = ezxml_child(f1, "classification"); temp = ezxml_child(classification, "algorithm"); algo = ezxml_child(temp, "MultiLayerPerceptron"); const unsigned int num_input = atoi(ezxml_child(classification, "input")->txt); const unsigned int num_output = atoi(ezxml_child(classification, "output")->txt); const unsigned int num_layers = atoi(ezxml_child(classification, "numberOfLayers")->txt); const unsigned int num_neurons_hidden = atoi(ezxml_child(algo, "hiddenNeurons")->txt); const float desired_error = (const float) (atof(ezxml_child(algo, "desiredError")->txt)); const unsigned int max_epochs = atoi(ezxml_child(algo, "maxEpochs")->txt); const unsigned int epochs_between_reports = atoi(ezxml_child(algo, "epochsBetweenReports")->txt); fann_type *calc_out; struct fann *ann; struct fann_train_data *data; unsigned int i = 0; unsigned int decimal_point; printf("Creating network.\n"); ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); data = fann_read_train_from_file(ezxml_child(classification, "datafile")->txt); fann_set_activation_steepness_hidden(ann, atoi(ezxml_child(algo, "hiddenActivationSteepness")->txt)); fann_set_activation_steepness_output(ann, atoi(ezxml_child(algo, "outputActivationSteepness")->txt)); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); temp2 = ezxml_child(algo, "trainStopFuction"); const char *stopFunc = temp2->txt; if(stopFunc == "FANN_STOPFUNC_BIT"){ fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); } else { fann_set_train_stop_function(ann, FANN_STOPFUNC_MSE); } fann_set_bit_fail_limit(ann, 0.01f); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); fann_init_weights(ann, data); printf("Training network.\n"); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("Test Results (%f,%f,%f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], data->input[i][2], calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); } printf("Saving network.\n"); fann_save(ann, "xor_float.net"); decimal_point = fann_save_to_fixed(ann, "xor_fixed.net"); fann_save_train_to_fixed(data, "xor_fixed.data", decimal_point); printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); ezxml_free(f1); return 0; }
int main(int argc, char **argv) { if(argc < 3) { printf("Usage: train_net <input.train> <output.net>\n"); exit(-1); } const unsigned int num_input = 2; const unsigned int num_output = 1; const unsigned int num_layers = 3; const unsigned int num_neurons_hidden = 8; const float desired_error = (const float) 0.000042; const unsigned int max_epochs = 500000; const unsigned int epochs_between_reports = 1000; struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); // fann_set_activation_steepness_hidden(ann, 1); // fann_set_activation_steepness_output(ann, 1); // fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); // fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); // fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT); fann_set_bit_fail_limit(ann, 0.01f); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); //fann_train_on_file(ann, argv[1], max_epochs, epochs_between_reports, desired_error); struct fann_train_data *data; data = fann_read_train_from_file(argv[1]); fann_init_weights(ann, data); printf("Training network on data from %s.\n", argv[1]); fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); printf("Testing network. %f\n", fann_test_data(ann, data)); double error, errorSum = 0; unsigned int i = 0, size = fann_length_train_data(data); fann_type *calc_out; for(i = 0; i < size; i++) { calc_out = fann_run(ann, data->input[i]); error = fann_abs(calc_out[0] - data->output[i][0]) * 1000; printf("Distance test (%d dBm,%f%%) -> %f meters, should be %f meters, difference=%f meters\n", (int)(data->input[i][0] * 150 - 150), data->input[i][1], calc_out[0] * 1000, data->output[i][0] * 1000, error); errorSum += error; } printf("Average Error: %f\n", errorSum / size); fann_save(ann, argv[2]); fann_destroy(ann); return 0; }
void NeuralNet::runNet(char* ptrDataFileName){ struct fann_train_data *ptrDataTest = fann_read_train_from_file(ptrDataFileName); fann_reset_MSE(this->ptrNeuralNet); fann_test_data(this->ptrNeuralNet, ptrDataTest); printf("Mean Square Error: %f\n", fann_get_MSE(this->ptrNeuralNet)); fann_type *calc_out; for(int i = 0; i < fann_length_train_data(ptrDataTest); i++){ calc_out = fann_run(this->ptrNeuralNet, ptrDataTest->input[i]); cout << "Sample testing: "<< calc_out[0] << " " << ptrDataTest->output[i][0] << " " << fann_abs(calc_out[0] - ptrDataTest->output[i][0]) << endl; } fann_destroy_train(ptrDataTest); }
int main() { fann_type *calc_out; const unsigned int num_input = 22500; const unsigned int num_output = 1; //const unsigned int num_layers = 4; const unsigned int num_layers = 4; /* this value can be changed to tweak the network */ const unsigned int num_neurons_hidden = 50; //const unsigned int num_neurons_hidden = 150; const float desired_error = (const float) 0.02; const unsigned int max_epochs = 15000; const unsigned int epochs_between_reports = 20; float learning_rate = .5; struct fann *ann; struct fann_train_data *data; int num_neurons = 0; unsigned int i = 0; unsigned int decimal_point; /* CREATING NETWORK */ ann = fann_create_shortcut(2, num_input, num_output); /* reading training data */ data = fann_read_train_from_file("training.data"); //fann_set_cascade_activation_steepness_hidden(ann, 1); //fann_set_cascade_activation_steepness_output(ann, 1); //fann_set_cascade_activation_steepnesses(ann, 1); fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); fann_init_weights(ann, data); /* TRAINING NETWORK run x epochs at learn rate .y */ //fann_cascadetrain_on_data(ann, data, 2000, epochs_between_reports, .4); fann_set_learning_rate(ann, .7); fann_cascadetrain_on_data(ann, data, 2000, epochs_between_reports, .0002); //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); /*fann_set_learning_rate(ann, .5); fann_cascadetrain_on_data(ann, data,5000, epochs_between_reports, .2); //fann_train_on_data(ann, data,50, epochs_between_reports, .2); fann_set_learning_rate(ann, .2); fann_cascadetrain_on_data(ann, data,1000, epochs_between_reports, .15); //fann_train_on_data(ann, data,100, epochs_between_reports, .15); fann_set_learning_rate(ann, .1); //fann_train_on_data(ann, data,5000, epochs_between_reports, .002); fann_cascadetrain_on_data(ann, data,200, epochs_between_reports, .00002); */ /* TESTING NETWORK */ printf("Testing network. %f\n", fann_test_data(ann, data)); for(i = 0; i < fann_length_train_data(data); i++) { calc_out = fann_run(ann, data->input[i]); printf("%f, should be %f, difference=%f\n", calc_out[0], data->output[i][0], fann_abs(calc_out[0] - data->output[i][0])); } /* SAVING NETWORK */ fann_save(ann, "image_spam.net"); /* CLEANING UP */ fann_destroy_train(data); fann_destroy(ann); return 0; }
int main(int argc, const char* argv[]) { if (argc < 2) { printf("Usage: ./dinneuro filename\n"); return -1; } //подготавливаем выборки if (csv2fann2(argv[1], 59, 50, 100, true)) { printf("Converted\n"); } //получим данные о количестве входных и выходных параметров int *params; const char * filename; const char * normfilename; filename = "data.data"; //filename = "scaling.data"; normfilename = "normalized.train"; params = getparams(filename); unsigned int num_threads = omp_get_thread_num(); float error; const unsigned int num_input = params[1]; const unsigned int num_output = params[2]; //printf("num_input=%d num_output=%d\n", num_input, num_output); const unsigned int num_layers = 4; //const unsigned int num_neurons_hidden = num_output; const unsigned int num_neurons_hidden = 5; const float desired_error = (const float) 0.0001; const unsigned int max_epochs = 5000; const unsigned int epochs_between_reports = 1000; struct fann_train_data * data = NULL; struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output); fann_set_activation_function_hidden(ann, FANN_LINEAR); fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); fann_set_training_algorithm(ann, FANN_TRAIN_RPROP); //printf("test\n"); data = fann_read_train_from_file(filename); printf("Readed train from %s\n", filename); fann_set_scaling_params( ann, data, -1, /* New input minimum */ 1, /* New input maximum */ -1, /* New output minimum */ 1); /* New output maximum */ fann_scale_train( ann, data ); printf("Scaled\n"); //сохраним нормализованную обучающу выборку в файл fann_save_train(data, normfilename); printf("Saved scaled file %s\n", normfilename); unsigned int i; printf("Start learning...\n"); for(i = 1; i <= max_epochs; i++) { error = num_threads > 1 ? fann_train_epoch_irpropm_parallel(ann, data, num_threads) : fann_train_epoch(ann, data); //если ошибка обучения меньше или равно заданной - выходим из цикла обучения //if (error <= desired_error) { printf ("Desired error detected. Finishing teaching.\n"); break; } //если текущий счетчик делится без остатка на epochs_between_reports - пишем лог //if (i % epochs_between_reports == 0) { printf("Epochs %8d. Current error: %.10f\n", i, error); } } printf("End learning.\n"); printf("MSE = %f\n", fann_get_MSE(ann)); //fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error); fann_destroy_train( data ); fann_save(ann, "scaling.net"); fann_destroy(ann); //проверка printf("Testing...\n"); fann_type *calc_out; //printf("fann_length_train_data=%d\n",fann_length_train_data(data)); printf("Creating network.\n"); ann = fann_create_from_file("scaling.net"); if(!ann) { printf("Error creating ann --- ABORTING.\n"); return 0; } //печатаем параметры сети //fann_print_connections(ann); //fann_print_parameters(ann); printf("Testing network.\n"); data = fann_read_train_from_file(filename); for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); fann_scale_input( ann, data->input[i] ); calc_out = fann_run( ann, data->input[i] ); fann_descale_output( ann, calc_out ); printf("Result %f original %f error %f or %.2f%%\n", calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]), (100*(float) fann_abs(calc_out[0] - data->output[i][0]))/(float)calc_out[0]); } fann_destroy_train( data ); fann_destroy(ann); return 0; }
int main() { fann_type *calc_out; unsigned int i; int ret = 0; struct fann *ann; struct fann_train_data *data; printf("Creating network.\n"); #ifdef FIXEDFANN ann = fann_create_from_file("./lib/fann/wc2fann/web_comp_fixed.net"); #else ann = fann_create_from_file("./lib/fann/wc2fann/web_comp_config.net"); #endif if(!ann) { printf("Error creating ann --- ABORTING.\n"); return -1; } fann_print_connections(ann); fann_print_parameters(ann); printf("Testing network.\n"); #ifdef FIXEDFANN data = fann_read_train_from_file("./lib/fann/wc2fann/web_comp_fixed.data"); #else data = fann_read_train_from_file("./lib/fann/wc2fann/data/selection.test"); #endif for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); calc_out = fann_test(ann, data->input[i], data->output[i]); #ifdef FIXEDFANN printf("Web Comp test (%d, %d) -> %d, should be %d, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann)); if((float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann) > 0.2) { printf("Test failed\n"); ret = -1; } #else printf("Web Comp test (%f, %f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0])); //Web_Comp double answer = fann_abs(calc_out[0] - data->output[0][0]); FILE *output; output = fopen("./lib/fann/wc2fann/data/Web_Comp_Answer.txt","w"); fprintf(output, "%f", answer); fclose(output); #endif } printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); return ret; }
FANN_EXTERNAL fann_type *FANN_API fann_run(struct fann * ann, fann_type * input) { struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers; unsigned int i, num_connections, num_input, num_output; fann_type neuron_sum, *output; fann_type *weights; struct fann_layer *layer_it, *last_layer; unsigned int activation_function; fann_type steepness; /* store some variabels local for fast access */ struct fann_neuron *first_neuron = ann->first_layer->first_neuron; #if 0 __m128 xmm_weight, xmm_neurons, xmm_sum; #endif #ifdef FIXEDFANN int multiplier = ann->multiplier; unsigned int decimal_point = ann->decimal_point; /* values used for the stepwise linear sigmoid function */ fann_type r1 = 0, r2 = 0, r3 = 0, r4 = 0, r5 = 0, r6 = 0; fann_type v1 = 0, v2 = 0, v3 = 0, v4 = 0, v5 = 0, v6 = 0; fann_type last_steepness = 0; unsigned int last_activation_function = 0; #else fann_type max_sum; #endif /* first set the input */ num_input = ann->num_input; for(i = 0; i != num_input; i++) { #ifdef FIXEDFANN if(fann_abs(input[i]) > multiplier) { printf ("Warning input number %d is out of range -%d - %d with value %d, integer overflow may occur.\n", i, multiplier, multiplier, input[i]); } #endif first_neuron[i].value = input[i]; } /* Set the bias neuron in the input layer */ #ifdef FIXEDFANN (ann->first_layer->last_neuron - 1)->value = multiplier; #else (ann->first_layer->last_neuron - 1)->value = 1; #endif last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { last_neuron = layer_it->last_neuron; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { if(neuron_it->first_con == neuron_it->last_con) { /* bias neurons */ #ifdef FIXEDFANN neuron_it->value = multiplier; #else neuron_it->value = 1; #endif continue; } activation_function = neuron_it->activation_function; steepness = neuron_it->activation_steepness; neuron_sum = 0; num_connections = neuron_it->last_con - neuron_it->first_con; weights = ann->weights + neuron_it->first_con; if(ann->connection_rate >= 1) { if(ann->network_type == FANN_NETTYPE_SHORTCUT) { neurons = ann->first_layer->first_neuron; } else { neurons = (layer_it - 1)->first_neuron; } /* unrolled loop start */ i = num_connections & 3; /* same as modulo 4 */ switch (i) { case 3: neuron_sum += fann_mult(weights[2], neurons[2].value); case 2: neuron_sum += fann_mult(weights[1], neurons[1].value); case 1: neuron_sum += fann_mult(weights[0], neurons[0].value); case 0: break; } #if 0 xmm_sum = _mm_setzero_ps(); for(; i != num_connections; i += 4) { xmm_weight = _mm_loadu_ps(weights + i); xmm_neurons = _mm_set_ps(neurons[i + 3].value,neurons[i + 2].value, neurons[i + 1].value,neurons[i].value); xmm_weight = _mm_mul_ps(xmm_weight,xmm_neurons); xmm_sum =_mm_add_ps(xmm_sum,xmm_weight); } neuron_sum += xmm_sum.m128_f32[3] + xmm_sum.m128_f32[2] + xmm_sum.m128_f32[1] + xmm_sum.m128_f32[0]; #else for(; i != num_connections; i += 4) { neuron_sum += fann_mult(weights[i], neurons[i].value) + fann_mult(weights[i + 1], neurons[i + 1].value) + fann_mult(weights[i + 2], neurons[i + 2].value) + fann_mult(weights[i + 3], neurons[i + 3].value); } #endif /* unrolled loop end */ /* * for(i = 0;i != num_connections; i++){ * printf("%f += %f*%f, ", neuron_sum, weights[i], neurons[i].value); * neuron_sum += fann_mult(weights[i], neurons[i].value); * } */ } else { neuron_pointers = ann->connections + neuron_it->first_con; i = num_connections & 3; /* same as modulo 4 */ switch (i) { case 3: neuron_sum += fann_mult(weights[2], neuron_pointers[2]->value); case 2: neuron_sum += fann_mult(weights[1], neuron_pointers[1]->value); case 1: neuron_sum += fann_mult(weights[0], neuron_pointers[0]->value); case 0: break; } #if 0 xmm_sum = _mm_setzero_ps(); for(; i != num_connections; i += 4) { xmm_weight = _mm_loadu_ps(weights + i); xmm_neurons = _mm_set_ps(neuron_pointers[i + 3]->value,neuron_pointers[i + 2]->value, neuron_pointers[i + 1]->value,neuron_pointers[i + 0]->value); xmm_weight = _mm_mul_ps(xmm_weight,xmm_neurons); xmm_sum =_mm_add_ps(xmm_sum,xmm_weight); } neuron_sum += xmm_sum.m128_f32[3] + xmm_sum.m128_f32[2] + xmm_sum.m128_f32[1] + xmm_sum.m128_f32[0]; #else for(; i != num_connections; i += 4) { neuron_sum += fann_mult(weights[i], neuron_pointers[i]->value) + fann_mult(weights[i + 1], neuron_pointers[i + 1]->value) + fann_mult(weights[i + 2], neuron_pointers[i + 2]->value) + fann_mult(weights[i + 3], neuron_pointers[i + 3]->value); } #endif } #ifdef FIXEDFANN neuron_it->sum = fann_mult(steepness, neuron_sum); if(activation_function != last_activation_function || steepness != last_steepness) { switch (activation_function) { case FANN_SIGMOID: case FANN_SIGMOID_STEPWISE: r1 = ann->sigmoid_results[0]; r2 = ann->sigmoid_results[1]; r3 = ann->sigmoid_results[2]; r4 = ann->sigmoid_results[3]; r5 = ann->sigmoid_results[4]; r6 = ann->sigmoid_results[5]; v1 = ann->sigmoid_values[0] / steepness; v2 = ann->sigmoid_values[1] / steepness; v3 = ann->sigmoid_values[2] / steepness; v4 = ann->sigmoid_values[3] / steepness; v5 = ann->sigmoid_values[4] / steepness; v6 = ann->sigmoid_values[5] / steepness; break; case FANN_SIGMOID_SYMMETRIC: case FANN_SIGMOID_SYMMETRIC_STEPWISE: r1 = ann->sigmoid_symmetric_results[0]; r2 = ann->sigmoid_symmetric_results[1]; r3 = ann->sigmoid_symmetric_results[2]; r4 = ann->sigmoid_symmetric_results[3]; r5 = ann->sigmoid_symmetric_results[4]; r6 = ann->sigmoid_symmetric_results[5]; v1 = ann->sigmoid_symmetric_values[0] / steepness; v2 = ann->sigmoid_symmetric_values[1] / steepness; v3 = ann->sigmoid_symmetric_values[2] / steepness; v4 = ann->sigmoid_symmetric_values[3] / steepness; v5 = ann->sigmoid_symmetric_values[4] / steepness; v6 = ann->sigmoid_symmetric_values[5] / steepness; break; case FANN_THRESHOLD: break; } } switch (activation_function) { case FANN_SIGMOID: case FANN_SIGMOID_STEPWISE: neuron_it->value = (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, 0, multiplier, neuron_sum); break; case FANN_SIGMOID_SYMMETRIC: case FANN_SIGMOID_SYMMETRIC_STEPWISE: neuron_it->value = (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, -multiplier, multiplier, neuron_sum); break; case FANN_THRESHOLD: neuron_it->value = (fann_type) ((neuron_sum < 0) ? 0 : multiplier); break; case FANN_THRESHOLD_SYMMETRIC: neuron_it->value = (fann_type) ((neuron_sum < 0) ? -multiplier : multiplier); break; case FANN_LINEAR: neuron_it->value = neuron_sum; break; case FANN_LINEAR_PIECE: neuron_it->value = (fann_type)((neuron_sum < 0) ? 0 : (neuron_sum > multiplier) ? multiplier : neuron_sum); break; case FANN_LINEAR_PIECE_SYMMETRIC: neuron_it->value = (fann_type)((neuron_sum < -multiplier) ? -multiplier : (neuron_sum > multiplier) ? multiplier : neuron_sum); break; case FANN_ELLIOT: case FANN_ELLIOT_SYMMETRIC: case FANN_GAUSSIAN: case FANN_GAUSSIAN_SYMMETRIC: case FANN_GAUSSIAN_STEPWISE: case FANN_SIN_SYMMETRIC: case FANN_COS_SYMMETRIC: fann_error((struct fann_error *) ann, FANN_E_CANT_USE_ACTIVATION); break; } last_steepness = steepness; last_activation_function = activation_function; #else neuron_sum = fann_mult(steepness, neuron_sum); max_sum = 150/steepness; if(neuron_sum > max_sum) neuron_sum = max_sum; else if(neuron_sum < -max_sum) neuron_sum = -max_sum; neuron_it->sum = neuron_sum; fann_activation_switch(activation_function, neuron_sum, neuron_it->value); #endif } } /* set the output */ output = ann->output; num_output = ann->num_output; neurons = (ann->last_layer - 1)->first_neuron; for(i = 0; i != num_output; i++) { output[i] = neurons[i].value; } return ann->output; }
int main(int argc, char *argv[]) { struct fann_train_data *dadosTreino, *dadosTeste; struct fann *ANN; fann_type *ANN_Answers; int *layers, i, j, aux; chromosome chromo; float erro = 0.0; checkArgs(argc, argv); buildChromosome(argv, &chromo); checkDatasetFiles(); dadosTreino = fann_read_train_from_file(nomeArqTreino); layers = (int *) calloc(2+chromo.qntCamadasOcultas, sizeof(int)); layers[0] = qntNeuroniosEntrada; layers[2+chromo.qntCamadasOcultas-1] = qntNeuroniosSaida; aux = chromo.neurOcultos; for (i=1; i < 2+chromo.qntCamadasOcultas-1 ; i++) { layers[i] = aux; aux = aux/2; } // CRIANDO A RNA: ANN = fann_create_standard_array(2+chromo.qntCamadasOcultas, layers); // TREINO fann_set_learning_rate(ANN, chromo.learnRate); fann_set_learning_momentum(ANN, chromo.moment); fann_set_activation_function_hidden( ANN, chromo.fcOculta ); fann_set_activation_function_output( ANN, chromo.fcSaida ); fann_set_training_algorithm(ANN, chromo.algAprend ); if (fann_get_training_algorithm(ANN) == FANN_TRAIN_QUICKPROP) fann_set_quickprop_decay(ANN, chromo.decay); // Em python, o treino ficava entre um try. // Se desse erro, escrevia "Resultado: 999.0" e exit fann_train_on_data(ANN, dadosTreino, chromo.epocasTreino, 50, desiredError); fann_destroy_train(dadosTreino); // TESTES: dadosTeste = fann_read_train_from_file( nomeArqValidacao); // Em python, o teste também ficava entre um try. // Se desse erro, escrevia "Resultado: 999.0" e exit for(i = 0; i < fann_length_train_data(dadosTeste); i++) { ANN_Answers = fann_run(ANN, dadosTeste->input[i]); if (ANN_Answers == NULL) { printf("Resultado: 999.0\n"); exit(2); } for (j=0; j < qntNeuroniosSaida; j++) erro += (float) powf(fann_abs(ANN_Answers[j] - dadosTeste->output[i][j]), 2); } printf("Resultado: %f\n", erro/(fann_length_train_data(dadosTeste)-1)); fann_destroy_train(dadosTeste); saveANN(argc, argv, ANN); fann_destroy(ANN); }
int main() { fann_type *calc_out; unsigned int i; int ret = 0; struct fann *ann; struct fann_train_data *data; printf("Creating network.\n"); #ifdef FIXEDFANN ann = fann_create_from_file("digitde_validation_fixed.net"); #else ann = fann_create_from_file("digitde_validation_float.net"); #endif if(!ann) { printf("Error creating ann --- ABORTING.\n"); return -1; } fann_print_connections(ann); fann_print_parameters(ann); printf("Testing network.\n"); #ifdef FIXEDFANN data = fann_read_train_from_file("digitde_validation_fixed.data"); #else data = fann_read_train_from_file("digitde_validation.data"); #endif for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); calc_out = fann_test(ann, data->input[i], data->output[i]); #ifdef FIXEDFANN printf("GG test (%d, %d) -> %d, should be %d, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann)); if((float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann) > 0.2) { printf("Test failed\n"); ret = -1; } #else printf("GG test (%f, %f) -> %f, should be %f, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0])); #endif } printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); return ret; }
int main() { fann_type *calc_out; unsigned int i; int ret = 0; int max_expected_idx=0,max_predicted_idx=0,count=0; struct fann *ann; struct fann_train_data *data; printf("Creating network.\n"); #ifdef FIXEDFANN ann = fann_create_from_file("mnist_fixed1.net"); #else ann = fann_create_from_file("mnist_float.net"); #endif if(!ann) { printf("Error creating ann --- ABORTING.\n"); return -1; } fann_print_connections(ann); fann_print_parameters(ann); printf("Testing network.\n"); #ifdef FIXEDFANN data = fann_read_train_from_file("mnist.data"); #else data = fann_read_train_from_file("mnist.data"); #endif for(i = 0; i < fann_length_train_data(data); i++) { fann_reset_MSE(ann); calc_out = fann_test(ann, data->input[i], data->output[i]); #ifdef FIXEDFANN printf("XOR test (%d, %d) -> %d, should be %d, difference=%f\n", data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0], (float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann)); if((float) fann_abs(calc_out[0] - data->output[i][0]) / fann_get_multiplier(ann) > 0.2) { printf("Test failed\n"); ret = -1; } #else max_expected_idx = 0; max_predicted_idx = 0; for(int k=1;k<10;k++) { if(data->output[i][max_expected_idx] < data->output[i][k]) { max_expected_idx = k; } if(calc_out[max_predicted_idx] < calc_out[k]) { max_predicted_idx = k; } } printf("MNIST test %d Expected %d , returned=%d\n", i,max_expected_idx, max_predicted_idx); if(max_expected_idx == max_predicted_idx) count++; #endif } printf("Cleaning up.\n"); fann_destroy_train(data); fann_destroy(ann); printf("Number correct=%d\n",count); return ret; }