int main()
{
	fann_type *calc_out;
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 9;
	const float desired_error = (const float) 0;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;
	struct fann *ann;
	struct fann_train_data *data;

	unsigned int i = 0;
	unsigned int decimal_point;

	printf("Creating network.\n");
	ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file("osyslec_train.data");

	fann_set_activation_steepness_hidden(ann, 1);
	fann_set_activation_steepness_output(ann, 1);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID);
	fann_set_activation_function_output(ann, FANN_SIGMOID);

	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);

	fann_init_weights(ann, data);
	
	printf("Training network.\n");
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	for(i = 0; i < fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("GG test (%f,%f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   fann_abs(calc_out[0] - data->output[i][0]));
	}

	printf("Saving network.\n");

	fann_save(ann, "osyslec_train_float.net");

	decimal_point = fann_save_to_fixed(ann, "osyslec_train_fixed.net");
	fann_save_train_to_fixed(data, "osyslec_train_fixed.data", decimal_point);

	printf("Cleaning up.\n");
	fann_destroy_train(data);
	fann_destroy(ann);

	return 0;
}
Beispiel #2
0
bool Trainer::Train(const AnnData& data, const float* random_weight_limit,
                    std::size_t max_epochs, std::size_t epochs_between_reports,
                    float desired_error,
                    float* mse, std::size_t* bit_fail) {
  if (random_weight_limit == NULL)
    fann_init_weights(ann_, data.data());
  else
    fann_randomize_weights(ann_, -*random_weight_limit, *random_weight_limit);
  fann_shuffle_train_data(data.data());
  fann_train_on_data(ann_, data.data(), max_epochs, epochs_between_reports,
                     desired_error);
  return GetMseAndBitFail(ann_, &mse, &bit_fail);
}
Beispiel #3
0
// fann_init_weights           Initialize the weights using Widrow + Nguyen’s algorithm.
int sci_fann_init_weights(char * fname)
{
  int res;
  struct fann * result_ann = NULL;
  struct fann_train_data * result_ann_train = NULL;

  if ((Rhs!=2)&&(Lhs!=1))
    {
      Scierror(999,"%s usage: ann_out = %s(ann_in, data_train_in)", fname, fname);
      return 0;
    }

  // Get the ann
  res = detect_fannlist(1);
  if (res==-1) return 0;

  result_ann = createCFannStructFromScilabFannStruct(1,&res);
  if (res==-1) return 0;

  if (result_ann==NULL)
    {
      Scierror(999,"%s: Problem while creating the fann scilab structure\n",fname);
      return 0;
    }

  // Get the train data
  res = detect_fanntraindatalist(2);
  if (res==-1) return 0;

  result_ann_train = createCFannTrainDataStructFromScilabFannTrainDataStruct(2,&res);

  fann_init_weights(result_ann, result_ann_train);
  
  res = createScilabFannStructFromCFannStruct(result_ann, Rhs + 1);
  if (res==-1) return 0;

  LhsVar(1) = Rhs + 1;

  return 0;
}
Beispiel #4
0
/*! ann:init_weights(train)
 *# Initializes the weights using Widrow and Nguyen's algorithm based on the
 *# given training data {{train}}.
 *x ann:init_weights(train)
 *-
 */
static int ann_init_weights(lua_State *L)
{
	struct fann **ann;
	struct fann_train_data **train;

	if(lua_gettop(L) < 2)
		luaL_error(L, "insufficient parameters");

	ann = luaL_checkudata(L, 1, FANN_METATABLE);
	luaL_argcheck(L, ann != NULL, 1, "'neural net' expected");

	train = luaL_checkudata(L, 2, FANN_TRAIN_METATABLE);
	luaL_argcheck(L, train != NULL, 1, "'training data' expected");

#ifdef FANN_VERBOSE
	printf("Initialising weights to training data...\n");
#endif

	fann_init_weights(*ann, *train);

	return 0;
}
Beispiel #5
0
bool ViFann::setWeights(const Weights &initialization, const qreal &minimum, const qreal &maximum)
{
	if(mNetwork == NULL) return false;
	mWeights = initialization;

	if(initialization == Random)
	{
		fann_randomize_weights(mNetwork, minimum, maximum);
		mWeightsMinimum = minimum;
		mWeightsMaximum = maximum;
	}
	else if(initialization == WidrowNguyen)
	{
		// Create fake training set so that FANN can determine the min and max values
		fann_train_data *data = fann_create_train(1, 2, 1);
		data->input[0][0] = 1;
		data->input[0][1] = -1;
		fann_init_weights(mNetwork, data);
		fann_destroy_train(data);
	}

	return true;
}
Beispiel #6
0
int main ( int argc, char **argv )
{
    srand(time(NULL));
    if ( argc<=1 )
    {
        //      printf ( "neuro num\r\n" );
        //     exit ( 0 );
    }

    if (argc>2)
    {
        //desired_error=atof(argv[2]);
        numn=atoi(argv[1]);
        l1n=atoi(argv[2]);
        if (argc>3)
            l2n=atoi(argv[3]);
        if (argc>4)
            l3n=atoi(argv[4]);
        if (argc>5)
            l4n=atoi(argv[5]);
        if (argc>6)
            l5n=atoi(argv[6]);
        if (argc>7)
            l6n=atoi(argv[7]);
    }

    signal ( 2, sig_term );



    srand ( time ( NULL ) );

    printf("loading training data...");

    train_data = fann_read_train_from_file ( "train.dat" );
    test_data = fann_read_train_from_file ( "test.dat" );

    weight_data=fann_merge_train_data(train_data,test_data);

    cln_weight_data=fann_duplicate_train_data(weight_data);
    cln_test_data=fann_duplicate_train_data(test_data);
    cln_train_data=fann_duplicate_train_data(train_data);

    //num_neurons_hidden = atoi ( argv[1] );



    srand(time(NULL));

    y=atoi(argv[2]);

    lay=atoi(argv[1]);
    ln=lay+2;
    if (lay==1)
        y2=train_data->num_output;
    best_perc=1;

    printf("\r\ndoing %ux%u [layers=%u,out=%u]",lay,y,ln, train_data->num_output);
    while (true)

    {
        neur1=1+(rand()%y);
        neur2=1+(rand()%y);
        conn_rate=0.5f+((rand()%50)*0.01f);
        printf("\r\n%2dx%-4d: ",neur1,neur2);
        //  printf("create network: layers=%d l1n=%d l2n=%d l3n=%d l4n=%d\ l5n=%d l6n=%dr\n",numn,l1n,l2n,l3n,l4n,l5n,l6n);
        ann = fann_create_standard (//conn_rate,
                  ln,
                  train_data->num_input,
                  neur1,
                  neur2,
                  train_data->num_output );
        //fann_init_weights ( ann, train_data );
        printf(" [%p] ",ann);

        if ( ( int ) ann==NULL )
        {
            printf ( "error" );
            exit ( 0 );
        }



        fann_set_activation_function_hidden(ann,FANN_SIGMOID);
        fann_set_activation_function_output(ann,FANN_SIGMOID);

        rebuild_functions(neur1);
        fann_set_training_algorithm ( ann, FANN_TRAIN_RPROP );
        fann_set_sarprop_temperature(ann,15000.0f);
        //fann_randomize_weights ( ann, -((rand()%10)*0.1f), ((rand()%10)*0.1f) );
        fann_init_weights(ann,train_data);
        got_inc=0;
        prev_epoch_mse=1;
        //
        epochs=0;

        unsigned last_best_perc_epoch=0;
        unsigned last_sync_epoch=0;
        unsigned last_ftest_secs=0;

        last_sync_epoch=0;
        last_best_perc_epoch=0;
        if (good_ann)
            fann_destroy(good_ann);

        good_ann=fann_copy(ann);
        unlink(histfile);
        for (u=0;u<1000;u++)
        {
            fflush(NULL);
            train_mse=fann_train_epoch(ann, train_data);

            if (jitter_train)
                apply_jjit(train_data,cln_train_data);


            if (time(NULL)-last_ftest_secs>=1)
            {
                //printf("\r\n%5u %9.6f %5.2f ",epochs,train_mse,test_perc);
                //printf(" %4.2f",test_perc);
                printf(".");


                last_ftest_secs=time(NULL);
            }
            ftest_data();
            plot(epochs,train_mse,test_mse);

            /*         if (epochs>10&&((int)test_perc==43||(int)test_perc==57))
                    {
                        printf(" [excluded %.2f] ",test_perc);
                        break;
                    }            else            {

                    } */
            //printf("excluded %f ",test_perc);

            double prev_test_perc;
            //   if (prev_epoch_mse==best_perc)
            //  printf("o");
            if ((int)test_perc>(int)train_perc&&epochs-last_stat_epoch>10)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);

                if (test_perc!=prev_test_perc)
                    printf("%.2f [%f]",test_perc,train_mse);

                //printf(" sync[%4.2f]",test_perc);
                last_stat_epoch=epochs;
            }
            else 	if (epochs-last_sync_epoch>111500)
            {
                last_sync_epoch=epochs;
            }
            if (epochs>210&&test_perc>best_perc)
            {
                //	u--;
                //  fann_destroy(good_ann);
                //   good_ann=fann_copy(ann);
                printf(" [saved best %.0f] ",test_perc);
                last_stat_epoch=epochs;
                //		printf("%f",test_perc);
                //	fann_destroy(ann);
                //	ann=fann_copy(good_ann);
                fann_save(ann,"mutate-best.net");
                best_perc=test_perc;
                printf(" %6.2f [%f]",test_perc,train_mse);
                last_best_perc_epoch=epochs;

            }
            else     if (epochs>11100&&((int)test_perc<=63||(int)test_perc==(int)prev_test_perc))
            {
                //best_perc=test_perc;
                //		printf("x");
                //  printf(".");
                //printf("\r%6.8f",train_mse);
                //			printf("done\r\n");
                break;


            }
            static unsigned last_restore_epoch=0;
            if (epochs>100&&test_mse-train_mse>=0.25f&&epochs-last_restore_epoch>=120)
            {
                /* 	fann_set_learning_rate ( ann,0.31f+(rand()%90)*0.01f);
                	fann_set_learning_momentum(ann,(rand()%90)*0.01f);
                	printf(" [restored @ %u lr %.2f mm %.2f]",epochs,fann_get_learning_rate(ann),
                	fann_get_learning_momentum(ann));
                	fann_destroy(ann);
                	ann=fann_copy(good_ann);
                	last_stat_epoch=epochs;
                	last_restore_epoch=epochs; */





                double rdec,rinc;
                rdec=0.0101f+((rand()%100)*0.00001f);
                if (!rdec)
                    rdec=0.01f;
                rinc=1.0001f+((rand()%90)*0.00001f);
                if (!rinc)
                    rinc=1.1f;
                static double prev_test_epoch_mse;

                //		rinc+=diff_mse*0.000001f;
                //			fann_set_rprop_increase_factor(ann,rinc );
                //	fann_set_rprop_decrease_factor(ann, rdec);
            }
            else if (test_mse-train_mse<=0.1f)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);
                //	printf("s");
            }
            else
            {
                fann_set_sarprop_temperature(ann,fann_get_sarprop_temperature(ann)-0.0001f);
            }
            static unsigned last_train_change_epoch=0;
            if (test_mse>=train_mse&&epochs-last_train_change_epoch>=100)
            {
                last_train_change_epoch=epochs;
                //fann_set_training_algorithm(ann,FANN_TRAIN_SARPROP);
                jitter_train=0;
            }
            else
            {
                //fann_set_training_algorithm(ann,FANN_TRAIN_RPROP);
                jitter_train=0;
            }

            got_inc=test_perc-prev_epoch_mse;
            prev_epoch_mse=test_perc;
            prev_test_perc=test_perc;
            epochs++;
            if (epochs-last_best_perc_epoch>511500)
            {
                printf(" failed");
                break;
            }
            if (epochs>2200&&(int)train_perc<40)
            {
                printf("skip 1\r\n");
                break;
            }

            if ((int)test_perc>=80)
            {
                printf("\r\ngot it %f\r\n",test_perc);
                fann_save(ann,"good.net");
                exit(0);
            }
            // printf("\n%6u ",epochs);
        }
        printf(" %6.2f inc: %.2f",test_perc,got_inc);
//            printf("%6.2f %6.2f",train_perc,test_perc);

        fann_destroy ( ann );

    }

    fann_destroy_train ( train_data );
    fann_destroy_train ( test_data );
    fann_destroy ( ann );

    return 0;
}
Beispiel #7
0
int main() 

{ 

 fann_type *calc_out; 

 const unsigned int num_input = 22500; 

 const unsigned int num_output = 1; 

 //const unsigned int num_layers = 4; 
 const unsigned int num_layers = 4; 

 /* this value can be changed to tweak the network */ 

 const unsigned int num_neurons_hidden = 50; 
 //const unsigned int num_neurons_hidden = 150; 

const float desired_error = (const float) 0.02; 

 const unsigned int max_epochs = 15000; 
const unsigned int epochs_between_reports = 20; 

 float learning_rate = .5; 

 struct fann *ann; 

 struct fann_train_data *data; 

 int num_neurons = 0; 

 unsigned int i = 0; 

 unsigned int decimal_point; 

 /* CREATING NETWORK */ 

ann = fann_create_standard(num_layers, num_input, 

 num_neurons_hidden, 

 num_neurons_hidden, num_output); 

 /* reading training data */ 

 data = fann_read_train_from_file("training.data");

 fann_set_activation_steepness_hidden(ann, 1); 

 fann_set_activation_steepness_output(ann, 1); 

 fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_init_weights(ann, data); 

 /* 

 TRAINING NETWORK 

 run x epochs at learn rate .y 

 */ 

 //fann_set_learning_rate(ann, .7); 

 //fann_train_on_data(ann, data, 200, epochs_between_reports, .4); 
 //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); 

 fann_set_learning_rate(ann, .5); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .2); 
 //fann_train_on_data(ann, data,50, epochs_between_reports, .2); 

 fann_set_learning_rate(ann, .2); 

 fann_train_on_data(ann, data,1000, epochs_between_reports, .15); 
 //fann_train_on_data(ann, data,100, epochs_between_reports, .15); 

 fann_set_learning_rate(ann, .1); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .002); 
 //fann_train_on_data(ann, data,200, epochs_between_reports, .00002); 

 /* TESTING NETWORK */ 

 printf("Testing network. %f\n", fann_test_data(ann, data)); 

 for(i = 0; i < fann_length_train_data(data); i++) 

 { 

 calc_out = fann_run(ann, data->input[i]); 

 /*printf("%f, should be %f, difference=%f\n", 

 calc_out[0], data->output[i][0], 

 fann_abs(calc_out[0] - data->output[i][0])); */

 } 

 /* SAVING NETWORK */ 

 fann_save(ann, "image_spam.net"); 

 /* CLEANING UP */ 

 fann_destroy_train(data);
fann_destroy(ann); 

 return 0; 

}
int main() {

	printf("Reading XML.. .. ..\n");
	ezxml_t f1 = ezxml_parse_file("test.xml"), classification, temp, algo, temp2;
	 
	classification = ezxml_child(f1, "classification");
	temp = ezxml_child(classification, "algorithm");
	algo = ezxml_child(temp, "MultiLayerPerceptron");

	const unsigned int num_input = atoi(ezxml_child(classification, "input")->txt);
	const unsigned int num_output = atoi(ezxml_child(classification, "output")->txt);
	const unsigned int num_layers = atoi(ezxml_child(classification, "numberOfLayers")->txt);
	const unsigned int num_neurons_hidden = atoi(ezxml_child(algo, "hiddenNeurons")->txt);
	const float desired_error = (const float) (atof(ezxml_child(algo, "desiredError")->txt));
	const unsigned int max_epochs = atoi(ezxml_child(algo, "maxEpochs")->txt);
	const unsigned int epochs_between_reports = atoi(ezxml_child(algo, "epochsBetweenReports")->txt);

	fann_type *calc_out;
	
	struct fann *ann;
	struct fann_train_data *data;

	unsigned int i = 0;
	unsigned int decimal_point;

	printf("Creating network.\n");
	ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file(ezxml_child(classification, "datafile")->txt);

	fann_set_activation_steepness_hidden(ann, atoi(ezxml_child(algo, "hiddenActivationSteepness")->txt));
	fann_set_activation_steepness_output(ann, atoi(ezxml_child(algo, "outputActivationSteepness")->txt));

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
	
	temp2 = ezxml_child(algo, "trainStopFuction");
	const char *stopFunc = temp2->txt;
	if(stopFunc == "FANN_STOPFUNC_BIT"){
		fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	} else {
		fann_set_train_stop_function(ann, FANN_STOPFUNC_MSE);
	}
	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);

	fann_init_weights(ann, data);
	
	printf("Training network.\n");
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	for(i = 0; i < fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("Test Results (%f,%f,%f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], data->input[i][2], calc_out[0], data->output[i][0],
			   fann_abs(calc_out[0] - data->output[i][0]));
	}

	printf("Saving network.\n");

	fann_save(ann, "xor_float.net");

	decimal_point = fann_save_to_fixed(ann, "xor_fixed.net");
	fann_save_train_to_fixed(data, "xor_fixed.data", decimal_point);

	printf("Cleaning up.\n");
	fann_destroy_train(data);
	fann_destroy(ann);

	ezxml_free(f1);

	return 0;
}
Beispiel #9
0
/*
arguments (all required):
 - data filename
 - topology, as number of neurons per layer separated by dashes
 - epochs (integer)
 - learning rate (0.0-1.0 float)
 - output filename
*/
int main(int argc, char **argv)
{
    // Argument 1: data filename.
    const char *datafn = argv[1];

    // Argument 2: topology.
    unsigned int layer_sizes[MAX_LAYERS];
    unsigned int num_layers = 0;
    char *token = strtok(argv[2], "-");
    while (token != NULL) {
        layer_sizes[num_layers] = atoi(token);
        ++num_layers;
        token = strtok(NULL, "-");
    }

    // Argument 3: epoch count.
    unsigned int max_epochs = atoi(argv[3]);

    // Argument 4: learning rate.
    float learning_rate = atof(argv[4]);

    // Argument 5: output filename.
    const char *outfn = argv[5];

    struct fann *ann;
	ann = fann_create_standard_array(num_layers, layer_sizes);

    // Misc parameters.
    fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	fann_set_activation_steepness_hidden(ann, 0.5);
	fann_set_activation_steepness_output(ann, 0.5);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID);
	fann_set_activation_function_output(ann, FANN_SIGMOID);
    //fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
    //fann_set_bit_fail_limit(ann, 0.01f);

    struct fann_train_data *data;
    data = fann_read_train_from_file(datafn);
	fann_init_weights(ann, data);
	
    fann_set_learning_rate(ann, learning_rate);
	fann_train_on_data(
        ann,
        data,
        max_epochs,
        10,  // epochs between reports
        DESIRED_ERROR
    );

	printf("Testing network. %f\n", fann_test_data(ann, data));

    fann_type *calc_out;
	for(unsigned int i = 0; i < fann_length_train_data(data); ++i)
	{
		calc_out = fann_run(ann, data->input[i]);
	}
	
	printf("RMSE = %f\n", sqrt(fann_get_MSE(ann)));

	fann_save(ann, outfn);

	fann_destroy_train(data);
	fann_destroy(ann);

	return 0;
}
Beispiel #10
0
int main(int argc, char **argv)
{
	if(argc < 3)
	{
		printf("Usage: train_net <input.train> <output.net>\n");
		exit(-1);
		
	}
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 8;
	const float desired_error = (const float) 0.000042;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;

	struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

// 	fann_set_activation_steepness_hidden(ann, 1);
// 	fann_set_activation_steepness_output(ann, 1);

// 	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
// 	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

// 	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
 	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);


	
	//fann_train_on_file(ann, argv[1], max_epochs, epochs_between_reports, desired_error);

	struct fann_train_data *data;
	data = fann_read_train_from_file(argv[1]);
	fann_init_weights(ann, data);

	printf("Training network on data from %s.\n", argv[1]);
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	double error, errorSum = 0;
	unsigned int i = 0, size = fann_length_train_data(data);
	fann_type *calc_out;
	for(i = 0; i < size; i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		error = fann_abs(calc_out[0] - data->output[i][0]) * 1000;
		printf("Distance test (%d dBm,%f%%) -> %f meters, should be %f meters, difference=%f meters\n",
			   (int)(data->input[i][0] * 150 - 150), data->input[i][1], calc_out[0] * 1000, data->output[i][0] * 1000,
			   error);
		errorSum += error;
	}

	printf("Average Error: %f\n", errorSum / size);
	fann_save(ann, argv[2]);

	fann_destroy(ann);

	return 0;
}
Beispiel #11
0
struct fann * setup_net(struct fann_train_data * data)
{
	struct fann *ann;
#if MIMO_FANN
#if OPTIMIZE == 0
	ann = fann_create_standard( 3, data->num_input, H_DIM, data->num_output);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
#endif

#if OPTIMIZE == 1
	unsigned int i, j;
	struct fann_descr *descr=(struct fann_descr*) calloc(1, sizeof(struct fann_descr));
	fann_setup_descr(descr, 2, data->num_input);


	i=0;

	fann_setup_layer_descr(
					&(descr->layers_descr[i]),
					"connected_any_any",
					1,
					NULL
					);

		for (j=0; j< descr->layers_descr[i].num_neurons; j++)
		{
						fann_setup_neuron_descr(
							descr->layers_descr[i].neurons_descr+j,
							H_DIM,
							"scalar_rprop_sigmoid_symmetric",
							NULL
							);
		}

	i=1;

	fann_setup_layer_descr(
					&(descr->layers_descr[i]),
					"connected_any_any",
					1,
					NULL
					);

		for (j=0; j< descr->layers_descr[i].num_neurons; j++)
		{
						fann_setup_neuron_descr(
							descr->layers_descr[i].neurons_descr+j,
							data->num_output,
							"scalar_rprop_sigmoid_symmetric",
							NULL
							);
		}
	ann = fann_create_from_descr( descr );
#endif

#if OPTIMIZE >= 2
	{
		unsigned int layers[] = { data->num_input, H_DIM, data->num_output };
		/*char *type;
		asprintf(&type, "%s_%s_%s", vals(implementation), vals(algorithm), vals(activation));*/

		ann = fann_create_standard_array_typed(layer_type, neuron_type, 3,  layers);

	}
#endif
#else /*MIMO_FANN*/

#ifdef SPARSE
	ann = fann_create_sparse( SPARSE, 3, data->num_input, H_DIM, data->num_output);
#else
	ann = fann_create_standard( 3, data->num_input, H_DIM, data->num_output);
#endif
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

#endif /*MIMO_FANN*/ 

	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_set_bit_fail_limit(ann, 0.01f);
	fann_set_activation_steepness_hidden(ann, 1);
	fann_set_activation_steepness_output(ann, 1);

#if INIT_WEIGHTS == 1
	fann_randomize_weights(ann,0,1);
#endif
#if INIT_WEIGHTS == 2
	fann_init_weights(ann, data);
#endif

#ifdef USE_RPROP
	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
#else
	fann_set_training_algorithm(ann, FANN_TRAIN_BATCH);
#endif

	return ann;
}
Beispiel #12
0
const _tstring CNeuroNetwok::Teach(const std::vector< std::pair < _tstring, bool > >& InputData)
{
	// На входе в Data карта путей к файлам и результатам, которые должны получится при распознавании
	// Подгружаем данные для каждого файла
	std::vector< std::pair < _tstring, bool > >::const_iterator it = InputData.begin();
	const std::vector< std::pair < _tstring, bool > >::const_iterator itEnd = InputData.end();
	for (; it != itEnd; ++it)
	{
		// Путь
		const _tstring& sPath = it->first;

		// Результат
		const bool bResult = it->second;

		// Данные
		std::list< float > BmpData;

		// Получаем данные для смещения 0 градусов
		AnalizeBMP(sPath, BmpData);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		/*
		// Получаем данные для смещения 90 градусов
		AnalizeBMP(sPath, BmpData, 90);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		// Получаем данные для смещения 180 градусов
		AnalizeBMP(sPath, BmpData, 180);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		// Получаем данные для смещения 270 градусов
		AnalizeBMP(sPath, BmpData, 270);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));*/
	}

	// Получили структуру данных, необходимую для тренировки нейросети
	// преобразуем ее к виду, необходимую для библиотеки fann
	boost::scoped_ptr< fann_train_data > pTrainData(MakeTrainData(m_TrainData));

	if (!pTrainData)
		throw std::runtime_error("Failed to make train data!");
	
#ifdef _DEBUG
	// Для дебага
	fann_save_train(pTrainData.get(), "debug_data.dat");
#endif

	// Инициализируем веса связей нейронов
	fann_init_weights(m_pANN, pTrainData.get());

	const float fDesiredError = (const float) 0;
	const unsigned int nEpochsBetweenReports = 10;

	// Тренируем нейросеть
	fann_train_on_data(m_pANN, pTrainData.get(), m_nEpochsCount, nEpochsBetweenReports, fDesiredError);	

	// Сохраняем нейросеть
	fann_save(m_pANN, NETWORK_FILE_NAME);

	// Тестируем сеть и возвращаем результат
	m_bIsNetworkTeached = true;
	return boost::lexical_cast< _tstring > (fann_test_data(m_pANN, pTrainData.get()));
}
Beispiel #13
0
int main(int argc,char **argv)
{
    unlink(histfile);
    srand ( time ( NULL ) );
    // printf ( "Reading data.\n" );
    train_data = fann_read_train_from_file ( "train.dat" );
    test_data = fann_read_train_from_file ( "test.dat" );
//   signal ( 2, sig_term );

    //  fann_scale_train_data ( train_data, 0, 1.54 );
    // fann_scale_train_data ( test_data, 0, 1.54 );
    //cln_test_data=fann_duplicate_train_data(test_data);
    cln_train_data=fann_duplicate_train_data(train_data);


    printf ( "Creating cascaded network.\n" );
    ann =
        fann_create_shortcut ( 2, fann_num_input_train_data ( train_data ),
                               fann_num_output_train_data ( train_data ) );
    fann_set_training_algorithm ( ann, FANN_TRAIN_RPROP );
    fann_set_activation_function_hidden ( ann, FANN_SIGMOID );
    fann_set_activation_function_output ( ann, FANN_SIGMOID);
    fann_set_train_error_function ( ann, FANN_ERRORFUNC_LINEAR );

    //  if (fann_set_scaling_params(ann, train_data,-1.0f,1.0f,0.0f, 1.0f)==-1)
    //    printf("set scaling error: %s\n",fann_get_errno((struct fann_error*)ann));

    //    fann_scale_train_input(ann,train_data);
    // fann_scale_output_train_data(train_data,0.0f,1.0f);
//	   fann_scale_input_train_data(train_data, -1.0,1.0f);
    // fann_scale_output_train_data(test_data,-1.0f,1.0f);
    // fann_scale_input_train_data(test_data, -1.0,1.0f);
//fann_scale_train(ann,train_data);
    //  fann_scale_train(ann,weight_data);
    //  fann_scale_train(ann,test_data);
    /*
     * fann_set_cascade_output_change_fraction(ann, 0.1f);
     *  ;
     * fann_set_cascade_candidate_change_fraction(ann, 0.1f);
     *
     */


    //  fann_set_cascade_output_stagnation_epochs ( ann, 180 );

    //fann_set_cascade_weight_multiplier ( ann, ( fann_type ) 0.1f );


    fann_set_callback ( ann, cascade_callback );
    if ( !multi )
    {

        /*  */
        //  steepness[0] = 0.22;
        steepness[0] = 0.9;
        steepness[1] = 1.0;

        /*
         * steepness[1] = 0.55;
         *  ;
         * steepness[1] = 0.33;
         *  ;
         * steepness[3] = 0.11;
         *  ;
         * steepness[1] = 0.01;
         *
         */

        /*
         *  steepness = 0.5;
         *
         */
        // fann_set_cascade_activation_steepnesses ( ann, steepness, 2);

        /*
         * activation = FANN_SIN_SYMMETRIC;
         */

        /*
         * activation[0] = FANN_SIGMOID;
         *
         */
        activation[0] = FANN_SIGMOID;

        /*
         * activation[2] = FANN_ELLIOT_SYMMETRIC;
         *
         */
        activation[1] = FANN_LINEAR_PIECE;

        /*
         * activation[4] = FANN_GAUSSIAN_SYMMETRIC;
         *  ;
         * activation[5] = FANN_SIGMOID;
         *
         */
        activation[2] = FANN_ELLIOT;
        activation[3] = FANN_COS;
        /*
         *
         *
         */
        activation[4] = FANN_SIN;
        fann_set_cascade_activation_functions ( ann, activation, 5);
        /*   fann_set_cascade_num_candidate_groups ( ann,
                                                  fann_num_input_train_data
                                                  ( train_data ) ); */

    }
    else
    {

        /*
         * fann_set_cascade_activation_steepnesses(ann, &steepness, 0.75);
         *
         */
        // fann_set_cascade_num_candidate_groups ( ann, 1 );

    }

    /* TODO: weight mult > 0.01 */
    /*  if ( training_algorithm == FANN_TRAIN_QUICKPROP )
      {
          fann_set_learning_rate ( ann, 0.35f );


      }
      else
      {
          fann_set_learning_rate ( ann, 0.7f );

      }
      fann_set_bit_fail_limit ( ann, ( fann_type ) 0.9f );*/

    /*
     * fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
     *
     */

    //fann_scale_output_train_data(train_data,0.0f,1.0f);
    //fann_scale_input_train_data(train_data, -1.0f,1.0f);
//	fann_scale_output_train_data(test_data, 0.0f,1.0f);
    //fann_scale_input_train_data(test_data, -1.0f,1.0f);

    // fann_randomize_weights ( ann, -0.2f, 0.2f );
    fann_init_weights ( ann, train_data );



    printf ( "Training network.\n" );
    fann_cascadetrain_on_data ( ann, train_data, max_neurons,
                                1, desired_error );
    fann_print_connections ( ann );
    mse_train = fann_test_data ( ann, train_data );
    bit_fail_train = fann_get_bit_fail ( ann );
    mse_test = fann_test_data ( ann, test_data );
    bit_fail_test = fann_get_bit_fail ( ann );
    printf
    ( "\nTrain error: %.08f, Train bit-fail: %d, Test error: %.08f, Test bit-fail: %d\n\n",
      mse_train, bit_fail_train, mse_test, bit_fail_test );

    printf ( "Saving cascaded network.\n" );
    fann_save ( ann, "cascaded.net" );
    //  printf ( "Cleaning up.\n" );
    fann_destroy_train ( train_data );
    fann_destroy_train ( test_data );
    fann_destroy ( ann );
    return 0;

}