Example #1
0
FANN_EXTERNAL void FANN_API fann_train_on_data(struct fann *ann, struct fann_train_data *data,
											   unsigned int max_epochs,
											   unsigned int epochs_between_reports,
											   float desired_error)
{
	float error;
	unsigned int i;
	int desired_error_reached;

#ifdef DEBUG
	printf("Training with %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
#endif

	if(epochs_between_reports && ann->callback == NULL)
	{
		printf("Max epochs %8d. Desired error: %.10f.\n", max_epochs, desired_error);
	}

	for(i = 1; i <= max_epochs; i++)
	{
		/*
		 * train 
		 */
		error = fann_train_epoch(ann, data);
		desired_error_reached = fann_desired_error_reached(ann, desired_error);

		/*
		 * print current output 
		 */
		if(epochs_between_reports &&
		   (i % epochs_between_reports == 0 || i == max_epochs || i == 1 ||
			desired_error_reached == 0))
		{
			if(ann->callback == NULL)
			{
				printf("Epochs     %8d. Current error: %.10f. Bit fail %d.\n", i, error,
					   ann->num_bit_fail);
			}
			else if(((*ann->callback)(ann, data, max_epochs, epochs_between_reports, 
									  desired_error, i)) == -1)
			{
				/*
				 * you can break the training by returning -1 
				 */
				break;
			}
		}

		if(desired_error_reached == 0)
			break;
	}
}
Example #2
0
void testOneEpochTrain(fann *ann, fann_train_data* train, unsigned int trainingAlgorithm, const std::string &header)
{
  printf("TEST: One Epoch %20s ", header.c_str());
  bool passed = true;

  fann *gpunn = fann_copy(ann);
  fann *cpunn = fann_copy(ann);
  gpunn->training_algorithm = (fann_train_enum)trainingAlgorithm;
  cpunn->training_algorithm = (fann_train_enum)trainingAlgorithm;

  gpuann_fann_train_on_data(gpunn, train, 1);

  fann_train_epoch(cpunn, train);

  fann_type *cpuValuesArray = (fann_type *)malloc(cpunn->total_neurons * sizeof(fann_type));
  fann_type *gpuValuesArray = (fann_type *)malloc(cpunn->total_neurons * sizeof(fann_type));  

  fann *tmpnn = cpunn;
  struct fann_neuron * last_neuron = (tmpnn->last_layer - 1)->last_neuron;
  fann_neuron *neuronsArray = tmpnn->first_layer->first_neuron;
  struct fann_neuron * neuron_it   = tmpnn->first_layer->first_neuron;

  for(; neuron_it != last_neuron; neuron_it++)
  {
    unsigned int currentNeuronShift  = neuron_it - neuronsArray;
    cpuValuesArray[currentNeuronShift] = neuron_it->value;
  }

  tmpnn = gpunn;
  last_neuron = (tmpnn->last_layer - 1)->last_neuron;
  neuronsArray = tmpnn->first_layer->first_neuron;
  neuron_it   = tmpnn->first_layer->first_neuron;

  for(; neuron_it != last_neuron; neuron_it++)
  {
    unsigned int currentNeuronShift  = neuron_it - neuronsArray;
    gpuValuesArray[currentNeuronShift] = neuron_it->value;
  }

  passed &= isAlmostSameArrays(gpuValuesArray, cpuValuesArray, cpunn->total_neurons, true, "VALUES:");
  passed &= isAlmostSameArrays(gpunn->weights, cpunn->weights, cpunn->total_connections, true, "WEIGHTS:");

  fann_destroy(gpunn);
  fann_destroy(cpunn);

  if(passed)
    printf("PASSED\n");
  else
    printf("FAILED\n");
}
Example #3
0
void trainMethodsSpeedTestCPU(fann *ann, fann_train_data* train, unsigned int trainingAlgorithm, unsigned int epochCount)
{
    fann *cpunn = fann_copy(ann);
    cpunn->training_algorithm = (fann_train_enum)trainingAlgorithm;

    {
        clock_t start = clock();

        for(unsigned int i = 0; i < epochCount; ++i)
            fann_train_epoch(cpunn, train);

        clock_t ends = clock();
        printf("%10.5f ", (double) (ends - start) / CLOCKS_PER_SEC * 1000.);
    }

    fann_destroy(cpunn);
}
Example #4
0
void train_on_steepness_file(struct fann *ann, char *filename,
							 unsigned int max_epochs, unsigned int epochs_between_reports,
							 float desired_error, float steepness_start,
							 float steepness_step, float steepness_end)
{
	float error;
	unsigned int i;

	struct fann_train_data *data = fann_read_train_from_file(filename);

	if(epochs_between_reports)
	{
		printf("Max epochs %8d. Desired error: %.10f\n", max_epochs, desired_error);
	}

	fann_set_activation_steepness_hidden(ann, steepness_start);
	fann_set_activation_steepness_output(ann, steepness_start);
	for(i = 1; i <= max_epochs; i++)
	{
		/* train */
		error = fann_train_epoch(ann, data);

		/* print current output */
		if(epochs_between_reports &&
		   (i % epochs_between_reports == 0 || i == max_epochs || i == 1 || error < desired_error))
		{
			printf("Epochs     %8d. Current error: %.10f\n", i, error);
		}

		if(error < desired_error)
		{
			steepness_start += steepness_step;
			if(steepness_start <= steepness_end)
			{
				printf("Steepness: %f\n", steepness_start);
				fann_set_activation_steepness_hidden(ann, steepness_start);
				fann_set_activation_steepness_output(ann, steepness_start);
			}
			else
			{
				break;
			}
		}
	}
	fann_destroy_train(data);
}
int main(int argc, char *argv[])
{
	int i = 0;
	const unsigned int width = 10;
	const unsigned int height = 10;
	const unsigned int num_dimensions = 3;
	
	const float desired_error = (const float) 0.001;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;

	struct fann *ann = fann_create_som(width, height, num_dimensions);

	ann->som_params->som_topology = FANN_SOM_TOPOLOGY_HEXAGONAL;
	ann->som_params->som_neighborhood = FANN_SOM_NEIGHBORHOOD_DISTANCE;
	ann->som_params->som_learning_decay	= FANN_SOM_LEARNING_DECAY_LINEAR;
	ann->som_params->som_learning_rate = 0.01;
	ann->som_params->som_learning_rate_constant = 0.01;

	/* Example using init_weights */
	struct fann_train_data *data = fann_read_train_from_file("som_train.data");

	for (i = 1; i < argc; i++) {
		if (strcmp(argv[i], "-init") == 0)
			fann_init_weights_som(ann, data);
		else if (strcmp(argv[i], "-epoch") == 0) {
			fann_train_epoch(ann, data);
			return EXIT_SUCCESS;
		}
	}

	/* Example without init_weights (random initialization)*/
	fann_train_on_file(ann, "som_train.data", max_epochs, epochs_between_reports, desired_error);

	/*fann_save(ann, "som_train.net"); */

	fann_destroy(ann);

	printf("\n");
	return 0;
}
Example #6
0
/*

Creer le meilleur fichier .net (apprentissage) possible
basé sur des tests effectués au cours de l'apprentissage
en fonction du nombre de neuronnes cachés choisis en entrée.

*/
void train(struct fann *ann, char* trainFile, char *testFile, char *netFile , unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error, const unsigned int num_neurons_hidden) {
        
	struct fann_train_data *trainData, *testData;
	struct fann *annBest = fann_copy(ann);
	float error;
	unsigned int i;
	char buffer[1024];
	float testError = 1;
	float testErrorBest = 1;

	trainData = fann_read_train_from_file(trainFile);
	testData = fann_read_train_from_file(testFile);
	
        	for(i = 1; i <= max_epochs; i++){ 

  		fann_shuffle_train_data(trainData); //melange les données
		error = fann_train_epoch(ann, trainData); //fait une epoque, ann : le réseaux créer, erreur : l'erreur d'apprentissage
               
		//Toute les 5 epoques 
		if(i % epochs_between_reports == 0 || error < desired_error){
                        	
			fann_test_data(ann,testData);// teste le reseau sur les donnée de test
	
			testError = fann_get_MSE(ann);
			if (testError < testErrorBest) {
				testErrorBest = testError;
				annBest = fann_copy(ann);
				printf("Epochs     %8d; trainError : %f; testError : %f;\n", i, error,testError);
				sprintf(buffer,"%s_%u_%d.net",netFile,num_neurons_hidden,i);
				fann_save(annBest, buffer);
			}		
                	}
		if(error < desired_error){
			break;
		}
	}
	sprintf(buffer,"%s_%u.net",netFile,num_neurons_hidden);
	fann_save(annBest, buffer);
	fann_destroy_train(trainData);
	fann_destroy_train(testData);
}
Example #7
0
void testOneEpochParallelTrain(fann *ann, fann_train_data* train, unsigned int trainingAlgorithm, const std::string &header)
{
  printf("TEST: One Epoch Parallel %20s ", header.c_str());
  bool passed = true;

  fann *gpunn = fann_copy(ann);
  fann *cpunn = fann_copy(ann);
  gpunn->training_algorithm = (fann_train_enum)trainingAlgorithm;
  cpunn->training_algorithm = (fann_train_enum)trainingAlgorithm;

  gpuann_fann_parallel_train_on_data(gpunn, train, 1);

  fann_train_epoch(cpunn, train);

  passed &= isAlmostSameArrays(gpunn->weights, cpunn->weights, cpunn->total_connections, true, "WEIGHTS:");

  fann_destroy(gpunn);
  fann_destroy(cpunn);

  if(passed)
    printf("PASSED\n");
  else
    printf("FAILED\n");
}
Example #8
0
int main ( int argc, char **argv )
{
    srand(time(NULL));
    if ( argc<=1 )
    {
        //      printf ( "neuro num\r\n" );
        //     exit ( 0 );
    }

    if (argc>2)
    {
        //desired_error=atof(argv[2]);
        numn=atoi(argv[1]);
        l1n=atoi(argv[2]);
        if (argc>3)
            l2n=atoi(argv[3]);
        if (argc>4)
            l3n=atoi(argv[4]);
        if (argc>5)
            l4n=atoi(argv[5]);
        if (argc>6)
            l5n=atoi(argv[6]);
        if (argc>7)
            l6n=atoi(argv[7]);
    }

    signal ( 2, sig_term );



    srand ( time ( NULL ) );

    printf("loading training data...");

    train_data = fann_read_train_from_file ( "train.dat" );
    test_data = fann_read_train_from_file ( "test.dat" );

    weight_data=fann_merge_train_data(train_data,test_data);

    cln_weight_data=fann_duplicate_train_data(weight_data);
    cln_test_data=fann_duplicate_train_data(test_data);
    cln_train_data=fann_duplicate_train_data(train_data);

    //num_neurons_hidden = atoi ( argv[1] );



    srand(time(NULL));

    y=atoi(argv[2]);

    lay=atoi(argv[1]);
    ln=lay+2;
    if (lay==1)
        y2=train_data->num_output;
    best_perc=1;

    printf("\r\ndoing %ux%u [layers=%u,out=%u]",lay,y,ln, train_data->num_output);
    while (true)

    {
        neur1=1+(rand()%y);
        neur2=1+(rand()%y);
        conn_rate=0.5f+((rand()%50)*0.01f);
        printf("\r\n%2dx%-4d: ",neur1,neur2);
        //  printf("create network: layers=%d l1n=%d l2n=%d l3n=%d l4n=%d\ l5n=%d l6n=%dr\n",numn,l1n,l2n,l3n,l4n,l5n,l6n);
        ann = fann_create_standard (//conn_rate,
                  ln,
                  train_data->num_input,
                  neur1,
                  neur2,
                  train_data->num_output );
        //fann_init_weights ( ann, train_data );
        printf(" [%p] ",ann);

        if ( ( int ) ann==NULL )
        {
            printf ( "error" );
            exit ( 0 );
        }



        fann_set_activation_function_hidden(ann,FANN_SIGMOID);
        fann_set_activation_function_output(ann,FANN_SIGMOID);

        rebuild_functions(neur1);
        fann_set_training_algorithm ( ann, FANN_TRAIN_RPROP );
        fann_set_sarprop_temperature(ann,15000.0f);
        //fann_randomize_weights ( ann, -((rand()%10)*0.1f), ((rand()%10)*0.1f) );
        fann_init_weights(ann,train_data);
        got_inc=0;
        prev_epoch_mse=1;
        //
        epochs=0;

        unsigned last_best_perc_epoch=0;
        unsigned last_sync_epoch=0;
        unsigned last_ftest_secs=0;

        last_sync_epoch=0;
        last_best_perc_epoch=0;
        if (good_ann)
            fann_destroy(good_ann);

        good_ann=fann_copy(ann);
        unlink(histfile);
        for (u=0;u<1000;u++)
        {
            fflush(NULL);
            train_mse=fann_train_epoch(ann, train_data);

            if (jitter_train)
                apply_jjit(train_data,cln_train_data);


            if (time(NULL)-last_ftest_secs>=1)
            {
                //printf("\r\n%5u %9.6f %5.2f ",epochs,train_mse,test_perc);
                //printf(" %4.2f",test_perc);
                printf(".");


                last_ftest_secs=time(NULL);
            }
            ftest_data();
            plot(epochs,train_mse,test_mse);

            /*         if (epochs>10&&((int)test_perc==43||(int)test_perc==57))
                    {
                        printf(" [excluded %.2f] ",test_perc);
                        break;
                    }            else            {

                    } */
            //printf("excluded %f ",test_perc);

            double prev_test_perc;
            //   if (prev_epoch_mse==best_perc)
            //  printf("o");
            if ((int)test_perc>(int)train_perc&&epochs-last_stat_epoch>10)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);

                if (test_perc!=prev_test_perc)
                    printf("%.2f [%f]",test_perc,train_mse);

                //printf(" sync[%4.2f]",test_perc);
                last_stat_epoch=epochs;
            }
            else 	if (epochs-last_sync_epoch>111500)
            {
                last_sync_epoch=epochs;
            }
            if (epochs>210&&test_perc>best_perc)
            {
                //	u--;
                //  fann_destroy(good_ann);
                //   good_ann=fann_copy(ann);
                printf(" [saved best %.0f] ",test_perc);
                last_stat_epoch=epochs;
                //		printf("%f",test_perc);
                //	fann_destroy(ann);
                //	ann=fann_copy(good_ann);
                fann_save(ann,"mutate-best.net");
                best_perc=test_perc;
                printf(" %6.2f [%f]",test_perc,train_mse);
                last_best_perc_epoch=epochs;

            }
            else     if (epochs>11100&&((int)test_perc<=63||(int)test_perc==(int)prev_test_perc))
            {
                //best_perc=test_perc;
                //		printf("x");
                //  printf(".");
                //printf("\r%6.8f",train_mse);
                //			printf("done\r\n");
                break;


            }
            static unsigned last_restore_epoch=0;
            if (epochs>100&&test_mse-train_mse>=0.25f&&epochs-last_restore_epoch>=120)
            {
                /* 	fann_set_learning_rate ( ann,0.31f+(rand()%90)*0.01f);
                	fann_set_learning_momentum(ann,(rand()%90)*0.01f);
                	printf(" [restored @ %u lr %.2f mm %.2f]",epochs,fann_get_learning_rate(ann),
                	fann_get_learning_momentum(ann));
                	fann_destroy(ann);
                	ann=fann_copy(good_ann);
                	last_stat_epoch=epochs;
                	last_restore_epoch=epochs; */





                double rdec,rinc;
                rdec=0.0101f+((rand()%100)*0.00001f);
                if (!rdec)
                    rdec=0.01f;
                rinc=1.0001f+((rand()%90)*0.00001f);
                if (!rinc)
                    rinc=1.1f;
                static double prev_test_epoch_mse;

                //		rinc+=diff_mse*0.000001f;
                //			fann_set_rprop_increase_factor(ann,rinc );
                //	fann_set_rprop_decrease_factor(ann, rdec);
            }
            else if (test_mse-train_mse<=0.1f)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);
                //	printf("s");
            }
            else
            {
                fann_set_sarprop_temperature(ann,fann_get_sarprop_temperature(ann)-0.0001f);
            }
            static unsigned last_train_change_epoch=0;
            if (test_mse>=train_mse&&epochs-last_train_change_epoch>=100)
            {
                last_train_change_epoch=epochs;
                //fann_set_training_algorithm(ann,FANN_TRAIN_SARPROP);
                jitter_train=0;
            }
            else
            {
                //fann_set_training_algorithm(ann,FANN_TRAIN_RPROP);
                jitter_train=0;
            }

            got_inc=test_perc-prev_epoch_mse;
            prev_epoch_mse=test_perc;
            prev_test_perc=test_perc;
            epochs++;
            if (epochs-last_best_perc_epoch>511500)
            {
                printf(" failed");
                break;
            }
            if (epochs>2200&&(int)train_perc<40)
            {
                printf("skip 1\r\n");
                break;
            }

            if ((int)test_perc>=80)
            {
                printf("\r\ngot it %f\r\n",test_perc);
                fann_save(ann,"good.net");
                exit(0);
            }
            // printf("\n%6u ",epochs);
        }
        printf(" %6.2f inc: %.2f",test_perc,got_inc);
//            printf("%6.2f %6.2f",train_perc,test_perc);

        fann_destroy ( ann );

    }

    fann_destroy_train ( train_data );
    fann_destroy_train ( test_data );
    fann_destroy ( ann );

    return 0;
}
Example #9
0
int main(int argc, const char* argv[])
{
	const unsigned int max_epochs = 1000;
	unsigned int num_threads = 1;
	struct fann_train_data *data;
	struct fann *ann;
	long before;
	float error;
	unsigned int i;

	if(argc == 2)
		num_threads = atoi(argv[1]);

	data = fann_read_train_from_file("../../datasets/mushroom.train");
	ann = fann_create_standard(3, fann_num_input_train_data(data), 32, fann_num_output_train_data(data));

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID);

	before = GetTickCount();
	for(i = 1; i <= max_epochs; i++)
	{
		error = num_threads > 1 ? fann_train_epoch_irpropm_parallel(ann, data, num_threads) : fann_train_epoch(ann, data);
		printf("Epochs     %8d. Current error: %.10f\n", i, error);
	}
	printf("ticks %d", GetTickCount()-before);

	fann_destroy(ann);
	fann_destroy_train(data);

	return 0;
}
Example #10
0
int main (int argc, char * argv[]) {
  int i, epoch, k, num_bits_failing, num_correct;
  int max_epochs = 10000, exit_code = 0, batch_items = -1;
  int flag_cups = 0, flag_last = 0, flag_mse = 0, flag_verbose = 0,
    flag_bit_fail = 0, flag_ignore_limits = 0, flag_percent_correct = 0;
  int mse_reporting_period = 1, bit_fail_reporting_period = 1,
    percent_correct_reporting_period = 1;
  float bit_fail_limit = 0.05, mse_fail_limit = -1.0;
  double learning_rate = 0.7;
  char id[100] = "0";
  char * file_video_string = NULL;
  FILE * file_video = NULL;
  struct fann * ann = NULL;
  struct fann_train_data * data = NULL;
  fann_type * calc_out;
  enum fann_train_enum type_training = FANN_TRAIN_BATCH;

  char * file_nn = NULL, * file_train = NULL;
  int c;
  while (1) {
    static struct option long_options[] = {
      {"video-data",           required_argument, 0, 'b'},
      {"stat-cups",            no_argument,       0, 'c'},
      {"num-batch-items",      required_argument, 0, 'd'},
      {"max-epochs",           required_argument, 0, 'e'},
      {"bit-fail-limit",       required_argument, 0, 'f'},
      {"mse-fail-limit",       required_argument, 0, 'g'},
      {"help",                 no_argument,       0, 'h'},
      {"id",                   required_argument, 0, 'i'},
      {"stat-last",            no_argument,       0, 'l'},
      {"stat-mse",             optional_argument, 0, 'm'},
      {"nn-config",            required_argument, 0, 'n'},
      {"stat-bit-fail",        optional_argument, 0, 'o'},
      {"stat-percent-correct", optional_argument, 0, 'q'},
      {"learning-rate",        required_argument, 0, 'r'},
      {"train-file",           required_argument, 0, 't'},
      {"verbose",              no_argument,       0, 'v'},
      {"incremental",          optional_argument, 0, 'x'},
      {"ignore-limits",        no_argument,       0, 'z'}
    };
    int option_index = 0;
     c = getopt_long (argc, argv, "b:cd:e:f:g:hi:lm::n:o::q::r:t:vx::z",
                     long_options, &option_index);
    if (c == -1)
      break;
    switch (c) {
    case 'b': file_video_string = optarg; break;
    case 'c': flag_cups = 1; break;
    case 'd': batch_items = atoi(optarg); break;
    case 'e': max_epochs = atoi(optarg); break;
    case 'f': bit_fail_limit = atof(optarg); break;
    case 'g': mse_fail_limit = atof(optarg); break;
    case 'h': usage(); exit_code = 0; goto bail;
    case 'i': strcpy(id, optarg); break;
    case 'l': flag_last = 1; break;
    case 'm':
      if (optarg)
        mse_reporting_period = atoi(optarg);
      flag_mse = 1;
      break;
    case 'n': file_nn = optarg; break;
    case 'o':
      if (optarg)
        bit_fail_reporting_period = atoi(optarg);
      flag_bit_fail = 1;
      break;
    case 'q':
      if (optarg)
        percent_correct_reporting_period = atoi(optarg);
      flag_percent_correct = 1;
      break;
    case 'r': learning_rate = atof(optarg); break;
    case 't': file_train = optarg; break;
    case 'v': flag_verbose = 1; break;
    case 'x': type_training=(optarg)?atoi(optarg):FANN_TRAIN_INCREMENTAL; break;
    case 'z': flag_ignore_limits = 1; break;
    }
  };

  // Make sure there aren't any arguments left over
  if (optind != argc) {
    fprintf(stderr, "[ERROR] Bad argument\n\n");
    usage();
    exit_code = -1;
    goto bail;
  }

  // Make sure we have all required inputs
  if (file_nn == NULL || file_train == NULL) {
    fprintf(stderr, "[ERROR] Missing required input argument\n\n");
    usage();
    exit_code = -1;
    goto bail;
  }

  // The training type needs to make sense
  if (type_training > FANN_TRAIN_SARPROP) {
    fprintf(stderr, "[ERROR] Training type %d outside of enumerated range (max: %d)\n",
            type_training, FANN_TRAIN_SARPROP);
    exit_code = -1;
    goto bail;
  }

  ann = fann_create_from_file(file_nn);
  data = fann_read_train_from_file(file_train);
  if (batch_items != -1 && batch_items < data->num_data)
    data->num_data = batch_items;
  enum fann_activationfunc_enum af =
    fann_get_activation_function(ann, ann->last_layer - ann->first_layer -1, 0);

  ann->training_algorithm = type_training;
  ann->learning_rate = learning_rate;
  printf("[INFO] Using training type %d\n", type_training);

  if (file_video_string != NULL)
    file_video = fopen(file_video_string, "w");

  double mse;
  for (epoch = 0; epoch < max_epochs; epoch++) {
    fann_train_epoch(ann, data);
    num_bits_failing = 0;
    num_correct = 0;
    fann_reset_MSE(ann);
    for (i = 0; i < fann_length_train_data(data); i++) {
      calc_out = fann_test(ann, data->input[i], data->output[i]);
      if (flag_verbose) {
        printf("[INFO] ");
        for (k = 0; k < data->num_input; k++) {
          printf("%8.5f ", data->input[i][k]);
        }
      }
      int correct = 1;
      for (k = 0; k < data->num_output; k++) {
        if (flag_verbose)
          printf("%8.5f ", calc_out[k]);
        num_bits_failing +=
          fabs(calc_out[k] - data->output[i][k]) > bit_fail_limit;
        if (fabs(calc_out[k] - data->output[i][k]) > bit_fail_limit)
          correct = 0;
        if (file_video)
          fprintf(file_video, "%f ", calc_out[k]);
      }
      if (file_video)
        fprintf(file_video, "\n");
      num_correct += correct;
      if (flag_verbose) {
        if (i < fann_length_train_data(data) - 1)
          printf("\n");
      }
    }
    if (flag_verbose)
      printf("%5d\n\n", epoch);
    if (flag_mse  && (epoch % mse_reporting_period == 0)) {
      mse = fann_get_MSE(ann);
      switch(af) {
      case FANN_LINEAR_PIECE_SYMMETRIC:
      case FANN_THRESHOLD_SYMMETRIC:
      case FANN_SIGMOID_SYMMETRIC:
      case FANN_SIGMOID_SYMMETRIC_STEPWISE:
      case FANN_ELLIOT_SYMMETRIC:
      case FANN_GAUSSIAN_SYMMETRIC:
      case FANN_SIN_SYMMETRIC:
      case FANN_COS_SYMMETRIC:
        mse *= 4.0;
      default:
        break;
      }
      printf("[STAT] epoch %d id %s mse %8.8f\n", epoch, id, mse);
    }
    if (flag_bit_fail && (epoch % bit_fail_reporting_period == 0))
      printf("[STAT] epoch %d id %s bfp %8.8f\n", epoch, id,
             1 - (double) num_bits_failing / data->num_output /
             fann_length_train_data(data));
    if (flag_percent_correct && (epoch % percent_correct_reporting_period == 0))
      printf("[STAT] epoch %d id %s perc %8.8f\n", epoch, id,
             (double) num_correct / fann_length_train_data(data));
    if (!flag_ignore_limits && (num_bits_failing == 0 || mse < mse_fail_limit))
      goto finish;
    // printf("%8.5f\n\n", fann_get_MSE(ann));
  }

 finish:
  if (flag_last)
    printf("[STAT] x 0 id %s epoch %d\n", id, epoch);
  if (flag_cups)
    printf("[STAT] x 0 id %s cups %d / ?\n", id,
           epoch * fann_get_total_connections(ann));

 bail:
  if (ann != NULL)
    fann_destroy(ann);
  if (data != NULL)
    fann_destroy_train(data);
  if (file_video != NULL)
    fclose(file_video);

  return exit_code;
}
Example #11
0
int main(int argc, const char* argv[])
{
	
	if (argc < 2) {
    printf("Usage: ./dinneuro filename\n");
    return -1;
	}
	
	//подготавливаем выборки
	if (csv2fann2(argv[1], 59, 50, 100, true)) { printf("Converted\n"); }
	
	//получим данные о количестве входных и выходных параметров
	int *params;
	const char * filename;
	const char * normfilename;
	filename = "data.data";
	//filename = "scaling.data";
	normfilename = "normalized.train";
	params = getparams(filename);
	
    unsigned int num_threads = omp_get_thread_num();
	float error;
	const unsigned int num_input = params[1];
	const unsigned int num_output = params[2];
	//printf("num_input=%d num_output=%d\n", num_input, num_output);
	const unsigned int num_layers = 4;
	//const unsigned int num_neurons_hidden = num_output;
	const unsigned int num_neurons_hidden = 5;
	const float desired_error = (const float) 0.0001;
	const unsigned int max_epochs = 5000;
	const unsigned int epochs_between_reports = 1000;
	struct fann_train_data * data = NULL;
	struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output);
	fann_set_activation_function_hidden(ann, FANN_LINEAR);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	//printf("test\n");
	data = fann_read_train_from_file(filename);
	
	printf("Readed train from %s\n", filename);
	
	fann_set_scaling_params(
		    ann,
			data,
			-1,	/* New input minimum */
			1,	/* New input maximum */
			-1,	/* New output minimum */
			1);	/* New output maximum */

	fann_scale_train( ann, data );
	printf("Scaled\n");
	
	//сохраним нормализованную обучающу выборку в файл
	fann_save_train(data, normfilename);
	printf("Saved scaled file %s\n", normfilename);
	
	unsigned int i;
	printf("Start learning...\n");
	for(i = 1; i <= max_epochs; i++)
	{
		error = num_threads > 1 ? fann_train_epoch_irpropm_parallel(ann, data, num_threads) : fann_train_epoch(ann, data);
		//если ошибка обучения меньше или равно заданной - выходим из цикла обучения
		//if (error <= desired_error) { printf ("Desired error detected. Finishing teaching.\n"); break; }
		//если текущий счетчик делится без остатка на epochs_between_reports - пишем лог
		//if (i % epochs_between_reports == 0) { printf("Epochs     %8d. Current error: %.10f\n", i, error); }
		
	}
	printf("End learning.\n");
	printf("MSE = %f\n", fann_get_MSE(ann));

	//fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);
	fann_destroy_train( data );
	fann_save(ann, "scaling.net");
	fann_destroy(ann);
	
	//проверка
	printf("Testing...\n");
	fann_type *calc_out;
	//printf("fann_length_train_data=%d\n",fann_length_train_data(data));
	printf("Creating network.\n");
	ann = fann_create_from_file("scaling.net");
	if(!ann)
	{
		printf("Error creating ann --- ABORTING.\n");
		return 0;
	}
	
	//печатаем параметры сети
	//fann_print_connections(ann);
	//fann_print_parameters(ann);
	
	printf("Testing network.\n");
	data = fann_read_train_from_file(filename);
	for(i = 0; i < fann_length_train_data(data); i++)
	{
		fann_reset_MSE(ann);
    	fann_scale_input( ann, data->input[i] );
		calc_out = fann_run( ann, data->input[i] );
		fann_descale_output( ann, calc_out );
		printf("Result %f original %f error %f or %.2f%%\n",
			calc_out[0], data->output[i][0],
			(float) fann_abs(calc_out[0] - data->output[i][0]), (100*(float) fann_abs(calc_out[0] - data->output[i][0]))/(float)calc_out[0]);
	}

	fann_destroy_train( data );
	fann_destroy(ann);

return 0;
}
Example #12
0
int main( int argc, char ** argv)
{
	float mse=1000;
	unsigned int num_train=R_NUM;
	unsigned int num_test=T_NUM;
	struct fann_train_data* data ;
	unsigned int i;
	const float desired_error = (const float) E_DES;
	const unsigned int epochs_between_reports = N_EPR;
	unsigned int bitf_limit=0;
	unsigned int bitf=bitf_limit+1;
	struct fann *ann;

#if MIMO_FANN
	printf("MIMO fann\n");
#else
	printf("Old fann\n");
#endif

#ifdef USE_XOR_DATA
	if (argc<2)
	{
		printf("Error: please supply a data file\n");
		return -1;
	}
	printf("Using %s\n", argv[1]);
	data=fann_read_train_from_file(argv[1]);
#else
	printf("Generating training data\n");
	data = fann_create_train(S_DIM, I_DIM, O_DIM);
	for ( i=0; i< S_DIM; i++)
	{
		f1(data, i); 
	}
#endif

	ann=setup_net(data);

#if VERBOSE
	fann_print_parameters(ann);
#endif

	for (i=0; mse>desired_error && i!=num_train && bitf>bitf_limit; i++)
	{
#if VERBOSE
		mse=train_epoch_debug(ann, data, i);
#else 
		mse=fann_train_epoch(ann, data);
#endif
		bitf=fann_get_bit_fail(ann);
		if ( !((i) % epochs_between_reports))
			printf("Epochs     %8d. Current error: %.10f. Bit fail: %u\n", i+(!i), mse, bitf);
			/*printf ("[ %7u ] MSE Error : %.10e ###################\n", i, mse);*/
	}
	printf("Epochs     %8d. Current error: %.10f. Bit fail: %u\n", i+(!i), mse, bitf);

	printf("Testing network. %f\n", fann_test_data(ann, data));
	
	gettimeofday(&tv_start,NULL);
	for (i=0; i!=num_test; i++)
 		fann_run_data(ann, data);
	gettimeofday(&tv_now,NULL);
	report("---",0);	
	

#if 1
	printf("Trying to save network\n");
#if MIMO_FANN 
	fann_save(ann, "saved_mimo.net");
	fann_destroy(ann);
	ann=fann_create_from_file("saved_mimo.net");
	fann_save(ann, "saved_mimo2.net");
	fann_destroy(ann);
#else
	fann_save(ann, "saved_old.net");
#endif
#endif

	return 0;
}
Example #13
0
float ViFann::fannTrainSingleCpu(fann *network, fann_train_data *data)
{
	return fann_train_epoch(network, data);
}
Example #14
0
FANN_EXTERNAL void FANN_API fann_train_on_data(struct fann *ann, struct fann_train_data *data,
											   unsigned int max_epochs,
											   unsigned int epochs_between_reports,
											   float desired_error)
{
	float error;
	unsigned int i;
	int desired_error_reached = -1;
	struct fpts_cl *fptscl;
	cl_int err;
	fptscl = fptsclalloc(ann);
	fptscwrite(ann, fptscl, data);
	fptspush(fptscl); //May be buggy.
#ifdef DEBUG
	printf("Training with %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
#endif
	if(epochs_between_reports && ann->callback == NULL)
	{
		printf("Max epochs %8d. Desired error: %.10f.\n", max_epochs, desired_error);
	}
#ifdef TIMER
	double t_end, t_start = timer_get_time();
#endif

	for(i = 1; i <= max_epochs; i++)
	{
		/*
		 * train 
		 */
		error = fann_train_epoch(ann, data, fptscl);
		//desired_error_reached = fann_desired_error_reached(ann, desired_error);
		if (error < desired_error) desired_error_reached = 0;
		/*
		 * print current output 
		 */
		if(epochs_between_reports &&
		   (i % epochs_between_reports == 0 || i == max_epochs || i == 1 ||
			desired_error_reached == 0))
		{
			if(ann->callback == NULL)
			{
				printf("Epochs     %8d. Current error: %.10f. Bit fail %d.", i, error, ann->num_bit_fail);
#ifdef TIMER
				clFinish(fptscl->hardware.queue);//TESTING
				t_end = timer_get_time();
				printf (". Time %0.1lf\n", (t_end - t_start)*1000);
				t_start = t_end;
#else
				printf("\n");
#endif
			}
			else if(((*ann->callback)(ann, data, max_epochs, epochs_between_reports, 
									  desired_error, i)) == -1)
			{
				/*
				 * you can break the training by returning -1 
				 */
				break;
			}
		}

		if(desired_error_reached == 0)
			break;
	}
	fptscread(ann, fptscl);
}