Пример #1
0
int FANN_API cascade_callback
( struct fann *ann, struct fann_train_data *train,
  unsigned int max_epochs, unsigned int epochs_between_reports,
  float desired_error, unsigned int epochs )
{

    mse_train = fann_test_data ( ann, train_data );
    bit_fail_train = fann_get_bit_fail ( ann );
    mse_test = fann_test_data ( ann, test_data );
    bit_fail_test = fann_get_bit_fail ( ann );


    if (mse_test<min_mse_test)
    {
        fann_save ( ann, "cascaded-test.net" );
        min_mse_test=mse_test;
        lowest_test_mse_epoch=epochs;
    }

    if (mse_train<min_mse_train)
    {
        fann_save ( ann, "cascaded.net" );
        min_mse_train=mse_train;
    }

    plot((double)epochs,mse_train,mse_test,train_perc/100,test_perc/100);

    // if ( prev_mse < mse_test && last_bads++>=3 )
    // {

    // do
    // {
    // func_num=func_num+rand() %6;
    // activation[0] = ( enum fann_activationfunc_enum ) func_num;
    // fann_set_cascade_activation_functions ( ann, activation, 1 );
    // printf ( "\n   Over-fitting. new func %s", FANN_ACTIVATIONFUNC_NAMES[func_num] );
    // }
    // while ( fann_get_errno ( ( struct fann_error* ) ann ) == 12 );

    // last_bads=0;
    // func_num=0;
    // }
    // else if ( last_bads>=1 && prev_mse > mse_test )
    // last_bads--;

    // prev_mse = mse_test;
    ftest_data();
    printf
    ( "\n %5d %4d %.08f %5.2f%% (%.08f) | %.08f %5.2f%% (%.08f e=%d) | %-4d  %-4d %.2lf %s",
      epochs, ann->total_neurons, mse_train,train_perc, min_mse_train, mse_test, test_perc,min_mse_test, lowest_test_mse_epoch, bit_fail_train,
      bit_fail_test,
      ( ann->last_layer - 2 )->first_neuron->activation_steepness,
      FANN_ACTIVATIONFUNC_NAMES[ ( ann->last_layer -
                                   2 )->first_neuron->activation_function] );

    //  fann_save ( ann, "cascaded.net" );
    jitter_train(train, cln_train_data);
    return 0;

}
int main()
{
	fann_type *calc_out;
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 9;
	const float desired_error = (const float) 0;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;
	struct fann *ann;
	struct fann_train_data *data;

	unsigned int i = 0;
	unsigned int decimal_point;

	printf("Creating network.\n");
	ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file("osyslec_train.data");

	fann_set_activation_steepness_hidden(ann, 1);
	fann_set_activation_steepness_output(ann, 1);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID);
	fann_set_activation_function_output(ann, FANN_SIGMOID);

	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);

	fann_init_weights(ann, data);
	
	printf("Training network.\n");
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	for(i = 0; i < fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("GG test (%f,%f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   fann_abs(calc_out[0] - data->output[i][0]));
	}

	printf("Saving network.\n");

	fann_save(ann, "osyslec_train_float.net");

	decimal_point = fann_save_to_fixed(ann, "osyslec_train_fixed.net");
	fann_save_train_to_fixed(data, "osyslec_train_fixed.data", decimal_point);

	printf("Cleaning up.\n");
	fann_destroy_train(data);
	fann_destroy(ann);

	return 0;
}
Пример #3
0
int main()
{
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;
	const float desired_error = (const float) 0.001;
	unsigned int max_neurons = 40;
	unsigned int neurons_between_reports = 1;

	printf("Reading data.\n");

	train_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.train");
	test_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.test");

	fann_scale_train_data(train_data, 0, 1);
	fann_scale_train_data(test_data, 0, 1);

	printf("Creating network.\n");

	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data));

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_LINEAR_PIECE);
	fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR);

	fann_print_parameters(ann);

	printf("Training network.\n");

	fann_cascadetrain_on_data(ann, train_data, max_neurons, neurons_between_reports, desired_error);

	fann_print_connections(ann);

	printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data),
		   fann_test_data(ann, test_data));

	printf("Saving network.\n");

	fann_save(ann, "two_spirali.net");

	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	fann_destroy(ann);

	return 0;
}
void NeuralNet::runNet(char* ptrDataFileName){
    struct fann_train_data *ptrDataTest = fann_read_train_from_file(ptrDataFileName);
    fann_reset_MSE(this->ptrNeuralNet);
    fann_test_data(this->ptrNeuralNet, ptrDataTest);
    printf("Mean Square Error: %f\n", fann_get_MSE(this->ptrNeuralNet));

    fann_type *calc_out;
    for(int i = 0; i < fann_length_train_data(ptrDataTest); i++){
        calc_out = fann_run(this->ptrNeuralNet, ptrDataTest->input[i]);
        cout << "Sample testing:  "<< calc_out[0] << " " << ptrDataTest->output[i][0] << " " << fann_abs(calc_out[0] - ptrDataTest->output[i][0]) << endl;
    }


    fann_destroy_train(ptrDataTest);
}
Пример #5
0
int main()
{
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 96;
	const float desired_error = (const float) 0.001;
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;

	float momentum;

	train_data = fann_read_train_from_file("../benchmarks/datasets/robot.train");
	test_data = fann_read_train_from_file("../benchmarks/datasets/robot.test");

	for ( momentum = 0.0; momentum < 0.7; momentum += 0.1 )
	{
		printf("============= momentum = %f =============\n", momentum);

		ann = fann_create_standard(num_layers,
						train_data->num_input, num_neurons_hidden, train_data->num_output);

		fann_set_training_algorithm(ann, FANN_TRAIN_INCREMENTAL);

		fann_set_learning_momentum(ann, momentum);

		fann_train_on_data(ann, train_data, 2000, 500, desired_error);

		printf("MSE error on train data: %f\n", fann_test_data(ann, train_data));
		printf("MSE error on test data : %f\n", fann_test_data(ann, test_data));

		fann_destroy(ann);
	}

	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	return 0;
}
Пример #6
0
/*

Creer le meilleur fichier .net (apprentissage) possible
basé sur des tests effectués au cours de l'apprentissage
en fonction du nombre de neuronnes cachés choisis en entrée.

*/
void train(struct fann *ann, char* trainFile, char *testFile, char *netFile , unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error, const unsigned int num_neurons_hidden) {
        
	struct fann_train_data *trainData, *testData;
	struct fann *annBest = fann_copy(ann);
	float error;
	unsigned int i;
	char buffer[1024];
	float testError = 1;
	float testErrorBest = 1;

	trainData = fann_read_train_from_file(trainFile);
	testData = fann_read_train_from_file(testFile);
	
        	for(i = 1; i <= max_epochs; i++){ 

  		fann_shuffle_train_data(trainData); //melange les données
		error = fann_train_epoch(ann, trainData); //fait une epoque, ann : le réseaux créer, erreur : l'erreur d'apprentissage
               
		//Toute les 5 epoques 
		if(i % epochs_between_reports == 0 || error < desired_error){
                        	
			fann_test_data(ann,testData);// teste le reseau sur les donnée de test
	
			testError = fann_get_MSE(ann);
			if (testError < testErrorBest) {
				testErrorBest = testError;
				annBest = fann_copy(ann);
				printf("Epochs     %8d; trainError : %f; testError : %f;\n", i, error,testError);
				sprintf(buffer,"%s_%u_%d.net",netFile,num_neurons_hidden,i);
				fann_save(annBest, buffer);
			}		
                	}
		if(error < desired_error){
			break;
		}
	}
	sprintf(buffer,"%s_%u.net",netFile,num_neurons_hidden);
	fann_save(annBest, buffer);
	fann_destroy_train(trainData);
	fann_destroy_train(testData);
}
Пример #7
0
/*! ann:test_data(train)
 *# Runs the network through the training data in {{train}} and
 *# returns the MSE.
 *x mse = ann:test_data(train)
 *-
 */
static int ann_test_data(lua_State *L)
{
	struct fann **ann;
	struct fann_train_data **train;
	fann_type answer;

	if(lua_gettop(L) < 2)
		luaL_error(L, "insufficient parameters");

	ann = luaL_checkudata(L, 1, FANN_METATABLE);
	luaL_argcheck(L, ann != NULL, 1, "'neural net' expected");

	train = luaL_checkudata(L, 2, FANN_TRAIN_METATABLE);
	luaL_argcheck(L, train != NULL, 1, "'training data' expected");

#ifdef FANN_VERBOSE
	printf("Testing network on training data...\n");
#endif

	answer = fann_test_data(*ann, *train);

	lua_pushnumber(L, answer);
	return 1;
}
Пример #8
0
int ftest_data(void)
{
    //	sar_start_epoch=0;
    //  printf("\r\n\r\n--------------------------------------------------------------------------------");

    double val_2[10];
    fann_type *calc_out2;
    unsigned calc2;
    int curi=0;
    unsigned fails=0,success=0;
    double perc=0;
    double minv=9,maxv=-1;
    int i;
    int minat=0,maxat=0;

    test_mse=fann_test_data(ann,test_data);

    for (curi=0;curi<fann_length_train_data(train_data);curi++)
    {

        calc2=curi;//rand()%(fann_length_train_data(train_data)-1);
        //printf("\r\ntesting %u %u ",calc1,calc2);
        //fann_scale_input(ann, test_data->input[calc1]);
        //fann_scale_input(ann, train_data->input[calc2]);
        //	fann_scale_output(ann, test_data->input[calc1]);

        //fann_scale_input(ann, train_data->input[calc2]);
        calc_out2 = fann_run(ann, train_data->input[calc2]);
        //	fann_descale_output(ann,calc_out2);

        memcpy(&val_2,  calc_out2, sizeof(double)*3);





        minv=9;
        maxv=-1;
        for (i=0;i<train_data->num_output;i++)
        {
            if ((double)calc_out2[i]<minv)
            {
                minv=val_2[i];
                minat=i;
            }
            if ((double)calc_out2[i]>maxv)
            {
                maxv=val_2[i];
                maxat=i;
            }
        }

        int ok=0;
        ok=0;
        for (i=0;i<train_data->num_output;i++)
            if (train_data->output[calc2][i]==1&&maxat==i)
                ok=1;

        if (ok)success++;
        else
            fails++;

    }
    train_perc=((double)success/(double)fann_length_train_data(train_data))*100.0f;
    /*   printf(" fails %5u success %5u (%5.2f%%) ",
             fails,success,train_perc
            ); */

    fails=0;
    success=0;
    unsigned failed_classes[10];

    for (curi=0;curi<test_data->num_output;curi++)
        failed_classes[curi]=0;

    int nfunc=0;
    double train_thr_mse=0;


    nfunc=fann_get_activation_function(ann, 3, 0);
    int stpns;
    stpns=fann_get_activation_steepness(ann,1,0);
    //	printf("\r\n%f",diff_mse*0.1f);
    //fann_set_activation_steepness_layer(ann, 0.3f, 1);
    //fann_set_activation_function_layer(ann,FANN_THRESHOLD_SYMMETRIC,3);




    for (curi=0;curi<fann_length_train_data(test_data);curi++)
    {

        calc2=curi;//rand()%(fann_length_train_data(train_data)-1);
        //printf("\r\ntesting %u %u ",calc1,calc2);
        //fann_scale_input(ann, test_data->input[calc1]);
        //fann_scale_input(ann, train_data->input[calc2]);
        //	fann_scale_output(ann, test_data->input[calc1]);

        //fann_scale_input(ann, train_data->input[calc2]);
        calc_out2 = fann_run(ann, test_data->input[calc2]);
        //	fann_descale_output(ann,calc_out2);

        memcpy(&val_2,  calc_out2, sizeof(double)*3);





        minv=9;
        maxv=-1;
        for (i=0;i<test_data->num_output;i++)
        {
            if (val_2[i]<minv)
            {
                minv=val_2[i];
                minat=i;
            }
            if (val_2[i]>maxv)
            {
                maxv=val_2[i];
                maxat=i;
            }
        }

        int ok=0;
        ok=0;
        for (i=0;i<test_data->num_output;i++)
        {
            if (test_data->output[calc2][i]==1&&maxat==i)
                ok=1;
            else if (test_data->output[calc2][i]==1&&maxat!=i)
                failed_classes[i]++;
        }

        if (ok)success++;
        else
            fails++;

    }
    test_perc=((double)success/(double)fann_length_train_data(test_data))*100.0f;
    /*   printf(" fails %5u success %5u (%5.2f%%) [fails: ",
             fails,success,test_perc
            );
      for (curi=0;curi<test_data->num_output;curi++)
          printf("%4u ",failed_classes[curi]);
      printf("] "); */
    // fann_set_activation_function_hidden ( ann,  rand()*0.81);
    // printf("\r\n rpropfact dec/inc r %.5f %.5f lr %.5f mom %.5f",fann_get_rprop_decrease_factor(ann),fann_get_rprop_increase_factor(ann), fann_get_learning_rate ( ann),
    //       fann_get_learning_momentum(ann));

    //	rebuild_functions();

    fann_set_activation_function_layer(ann,nfunc,3);
    fann_set_activation_steepness_layer(ann,stpns, 1);
}
Пример #9
0
int main()
{
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;
	const float desired_error = (const float)0.0;
	unsigned int max_neurons = 30;
	unsigned int neurons_between_reports = 1;
	unsigned int bit_fail_train, bit_fail_test;
	float mse_train, mse_test;
	unsigned int i = 0;
	fann_type *output;
	fann_type steepness;
	int multi = 0;
	enum fann_activationfunc_enum activation;
	enum fann_train_enum training_algorithm = FANN_TRAIN_RPROP;
	
	printf("Reading data.\n");
	 
	train_data = fann_read_train_from_file("../benchmarks/datasets/parity8.train");
	test_data = fann_read_train_from_file("../benchmarks/datasets/parity8.test");

	fann_scale_train_data(train_data, -1, 1);
	fann_scale_train_data(test_data, -1, 1);
	
	printf("Creating network.\n");
	
	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data));
		
	fann_set_training_algorithm(ann, training_algorithm);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_LINEAR);
	fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR);
	
	if(!multi)
	{
		/*steepness = 0.5;*/
		steepness = 1;
		fann_set_cascade_activation_steepnesses(ann, &steepness, 1);
		/*activation = FANN_SIN_SYMMETRIC;*/
		activation = FANN_SIGMOID_SYMMETRIC;
		
		fann_set_cascade_activation_functions(ann, &activation, 1);		
		fann_set_cascade_num_candidate_groups(ann, 8);
	}	
		
	if(training_algorithm == FANN_TRAIN_QUICKPROP)
	{
		fann_set_learning_rate(ann, 0.35);
		fann_randomize_weights(ann, -2.0,2.0);
	}
	
	fann_set_bit_fail_limit(ann, 0.9);
	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_print_parameters(ann);
		
	fann_save(ann, "cascade_train2.net");
	
	printf("Training network.\n");

	fann_cascadetrain_on_data(ann, train_data, max_neurons, neurons_between_reports, desired_error);
	
	fann_print_connections(ann);
	
	mse_train = fann_test_data(ann, train_data);
	bit_fail_train = fann_get_bit_fail(ann);
	mse_test = fann_test_data(ann, test_data);
	bit_fail_test = fann_get_bit_fail(ann);
	
	printf("\nTrain error: %f, Train bit-fail: %d, Test error: %f, Test bit-fail: %d\n\n", 
		   mse_train, bit_fail_train, mse_test, bit_fail_test);
	
	for(i = 0; i < train_data->num_data; i++)
	{
		output = fann_run(ann, train_data->input[i]);
		if((train_data->output[i][0] >= 0 && output[0] <= 0) ||
		   (train_data->output[i][0] <= 0 && output[0] >= 0))
		{
			printf("ERROR: %f does not match %f\n", train_data->output[i][0], output[0]);
		}
	}
	
	printf("Saving network.\n");
	
	fann_save(ann, "cascade_train.net");
	
	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	fann_destroy(ann);
	
	return 0;
}
Пример #10
0
int main() 

{ 

 fann_type *calc_out; 

 const unsigned int num_input = 22500; 

 const unsigned int num_output = 1; 

 //const unsigned int num_layers = 4; 
 const unsigned int num_layers = 4; 

 /* this value can be changed to tweak the network */ 

 const unsigned int num_neurons_hidden = 50; 
 //const unsigned int num_neurons_hidden = 150; 

const float desired_error = (const float) 0.02; 

 const unsigned int max_epochs = 15000; 
const unsigned int epochs_between_reports = 20; 

 float learning_rate = .5; 

 struct fann *ann; 

 struct fann_train_data *data; 

 int num_neurons = 0; 

 unsigned int i = 0; 

 unsigned int decimal_point; 

 /* CREATING NETWORK */ 

ann = fann_create_standard(num_layers, num_input, 

 num_neurons_hidden, 

 num_neurons_hidden, num_output); 

 /* reading training data */ 

 data = fann_read_train_from_file("training.data");

 fann_set_activation_steepness_hidden(ann, 1); 

 fann_set_activation_steepness_output(ann, 1); 

 fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_init_weights(ann, data); 

 /* 

 TRAINING NETWORK 

 run x epochs at learn rate .y 

 */ 

 //fann_set_learning_rate(ann, .7); 

 //fann_train_on_data(ann, data, 200, epochs_between_reports, .4); 
 //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); 

 fann_set_learning_rate(ann, .5); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .2); 
 //fann_train_on_data(ann, data,50, epochs_between_reports, .2); 

 fann_set_learning_rate(ann, .2); 

 fann_train_on_data(ann, data,1000, epochs_between_reports, .15); 
 //fann_train_on_data(ann, data,100, epochs_between_reports, .15); 

 fann_set_learning_rate(ann, .1); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .002); 
 //fann_train_on_data(ann, data,200, epochs_between_reports, .00002); 

 /* TESTING NETWORK */ 

 printf("Testing network. %f\n", fann_test_data(ann, data)); 

 for(i = 0; i < fann_length_train_data(data); i++) 

 { 

 calc_out = fann_run(ann, data->input[i]); 

 /*printf("%f, should be %f, difference=%f\n", 

 calc_out[0], data->output[i][0], 

 fann_abs(calc_out[0] - data->output[i][0])); */

 } 

 /* SAVING NETWORK */ 

 fann_save(ann, "image_spam.net"); 

 /* CLEANING UP */ 

 fann_destroy_train(data);
fann_destroy(ann); 

 return 0; 

}
Пример #11
0
int main(int argc,char **argv)
{
    unlink(histfile);
    srand ( time ( NULL ) );
    // printf ( "Reading data.\n" );
    train_data = fann_read_train_from_file ( "train.dat" );
    test_data = fann_read_train_from_file ( "test.dat" );
//   signal ( 2, sig_term );

    //  fann_scale_train_data ( train_data, 0, 1.54 );
    // fann_scale_train_data ( test_data, 0, 1.54 );
    //cln_test_data=fann_duplicate_train_data(test_data);
    cln_train_data=fann_duplicate_train_data(train_data);


    printf ( "Creating cascaded network.\n" );
    ann =
        fann_create_shortcut ( 2, fann_num_input_train_data ( train_data ),
                               fann_num_output_train_data ( train_data ) );
    fann_set_training_algorithm ( ann, FANN_TRAIN_RPROP );
    fann_set_activation_function_hidden ( ann, FANN_SIGMOID );
    fann_set_activation_function_output ( ann, FANN_SIGMOID);
    fann_set_train_error_function ( ann, FANN_ERRORFUNC_LINEAR );

    //  if (fann_set_scaling_params(ann, train_data,-1.0f,1.0f,0.0f, 1.0f)==-1)
    //    printf("set scaling error: %s\n",fann_get_errno((struct fann_error*)ann));

    //    fann_scale_train_input(ann,train_data);
    // fann_scale_output_train_data(train_data,0.0f,1.0f);
//	   fann_scale_input_train_data(train_data, -1.0,1.0f);
    // fann_scale_output_train_data(test_data,-1.0f,1.0f);
    // fann_scale_input_train_data(test_data, -1.0,1.0f);
//fann_scale_train(ann,train_data);
    //  fann_scale_train(ann,weight_data);
    //  fann_scale_train(ann,test_data);
    /*
     * fann_set_cascade_output_change_fraction(ann, 0.1f);
     *  ;
     * fann_set_cascade_candidate_change_fraction(ann, 0.1f);
     *
     */


    //  fann_set_cascade_output_stagnation_epochs ( ann, 180 );

    //fann_set_cascade_weight_multiplier ( ann, ( fann_type ) 0.1f );


    fann_set_callback ( ann, cascade_callback );
    if ( !multi )
    {

        /*  */
        //  steepness[0] = 0.22;
        steepness[0] = 0.9;
        steepness[1] = 1.0;

        /*
         * steepness[1] = 0.55;
         *  ;
         * steepness[1] = 0.33;
         *  ;
         * steepness[3] = 0.11;
         *  ;
         * steepness[1] = 0.01;
         *
         */

        /*
         *  steepness = 0.5;
         *
         */
        // fann_set_cascade_activation_steepnesses ( ann, steepness, 2);

        /*
         * activation = FANN_SIN_SYMMETRIC;
         */

        /*
         * activation[0] = FANN_SIGMOID;
         *
         */
        activation[0] = FANN_SIGMOID;

        /*
         * activation[2] = FANN_ELLIOT_SYMMETRIC;
         *
         */
        activation[1] = FANN_LINEAR_PIECE;

        /*
         * activation[4] = FANN_GAUSSIAN_SYMMETRIC;
         *  ;
         * activation[5] = FANN_SIGMOID;
         *
         */
        activation[2] = FANN_ELLIOT;
        activation[3] = FANN_COS;
        /*
         *
         *
         */
        activation[4] = FANN_SIN;
        fann_set_cascade_activation_functions ( ann, activation, 5);
        /*   fann_set_cascade_num_candidate_groups ( ann,
                                                  fann_num_input_train_data
                                                  ( train_data ) ); */

    }
    else
    {

        /*
         * fann_set_cascade_activation_steepnesses(ann, &steepness, 0.75);
         *
         */
        // fann_set_cascade_num_candidate_groups ( ann, 1 );

    }

    /* TODO: weight mult > 0.01 */
    /*  if ( training_algorithm == FANN_TRAIN_QUICKPROP )
      {
          fann_set_learning_rate ( ann, 0.35f );


      }
      else
      {
          fann_set_learning_rate ( ann, 0.7f );

      }
      fann_set_bit_fail_limit ( ann, ( fann_type ) 0.9f );*/

    /*
     * fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
     *
     */

    //fann_scale_output_train_data(train_data,0.0f,1.0f);
    //fann_scale_input_train_data(train_data, -1.0f,1.0f);
//	fann_scale_output_train_data(test_data, 0.0f,1.0f);
    //fann_scale_input_train_data(test_data, -1.0f,1.0f);

    // fann_randomize_weights ( ann, -0.2f, 0.2f );
    fann_init_weights ( ann, train_data );



    printf ( "Training network.\n" );
    fann_cascadetrain_on_data ( ann, train_data, max_neurons,
                                1, desired_error );
    fann_print_connections ( ann );
    mse_train = fann_test_data ( ann, train_data );
    bit_fail_train = fann_get_bit_fail ( ann );
    mse_test = fann_test_data ( ann, test_data );
    bit_fail_test = fann_get_bit_fail ( ann );
    printf
    ( "\nTrain error: %.08f, Train bit-fail: %d, Test error: %.08f, Test bit-fail: %d\n\n",
      mse_train, bit_fail_train, mse_test, bit_fail_test );

    printf ( "Saving cascaded network.\n" );
    fann_save ( ann, "cascaded.net" );
    //  printf ( "Cleaning up.\n" );
    fann_destroy_train ( train_data );
    fann_destroy_train ( test_data );
    fann_destroy ( ann );
    return 0;

}
Пример #12
0
int main() {

	printf("Reading XML.. .. ..\n");
	ezxml_t f1 = ezxml_parse_file("test.xml"), classification, temp, algo, temp2;
	 
	classification = ezxml_child(f1, "classification");
	temp = ezxml_child(classification, "algorithm");
	algo = ezxml_child(temp, "MultiLayerPerceptron");

	const unsigned int num_input = atoi(ezxml_child(classification, "input")->txt);
	const unsigned int num_output = atoi(ezxml_child(classification, "output")->txt);
	const unsigned int num_layers = atoi(ezxml_child(classification, "numberOfLayers")->txt);
	const unsigned int num_neurons_hidden = atoi(ezxml_child(algo, "hiddenNeurons")->txt);
	const float desired_error = (const float) (atof(ezxml_child(algo, "desiredError")->txt));
	const unsigned int max_epochs = atoi(ezxml_child(algo, "maxEpochs")->txt);
	const unsigned int epochs_between_reports = atoi(ezxml_child(algo, "epochsBetweenReports")->txt);

	fann_type *calc_out;
	
	struct fann *ann;
	struct fann_train_data *data;

	unsigned int i = 0;
	unsigned int decimal_point;

	printf("Creating network.\n");
	ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file(ezxml_child(classification, "datafile")->txt);

	fann_set_activation_steepness_hidden(ann, atoi(ezxml_child(algo, "hiddenActivationSteepness")->txt));
	fann_set_activation_steepness_output(ann, atoi(ezxml_child(algo, "outputActivationSteepness")->txt));

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
	
	temp2 = ezxml_child(algo, "trainStopFuction");
	const char *stopFunc = temp2->txt;
	if(stopFunc == "FANN_STOPFUNC_BIT"){
		fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	} else {
		fann_set_train_stop_function(ann, FANN_STOPFUNC_MSE);
	}
	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);

	fann_init_weights(ann, data);
	
	printf("Training network.\n");
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	for(i = 0; i < fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("Test Results (%f,%f,%f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], data->input[i][2], calc_out[0], data->output[i][0],
			   fann_abs(calc_out[0] - data->output[i][0]));
	}

	printf("Saving network.\n");

	fann_save(ann, "xor_float.net");

	decimal_point = fann_save_to_fixed(ann, "xor_fixed.net");
	fann_save_train_to_fixed(data, "xor_fixed.data", decimal_point);

	printf("Cleaning up.\n");
	fann_destroy_train(data);
	fann_destroy(ann);

	ezxml_free(f1);

	return 0;
}
Пример #13
0
/*
arguments (all required):
 - data filename
 - topology, as number of neurons per layer separated by dashes
 - epochs (integer)
 - learning rate (0.0-1.0 float)
 - output filename
*/
int main(int argc, char **argv)
{
    // Argument 1: data filename.
    const char *datafn = argv[1];

    // Argument 2: topology.
    unsigned int layer_sizes[MAX_LAYERS];
    unsigned int num_layers = 0;
    char *token = strtok(argv[2], "-");
    while (token != NULL) {
        layer_sizes[num_layers] = atoi(token);
        ++num_layers;
        token = strtok(NULL, "-");
    }

    // Argument 3: epoch count.
    unsigned int max_epochs = atoi(argv[3]);

    // Argument 4: learning rate.
    float learning_rate = atof(argv[4]);

    // Argument 5: output filename.
    const char *outfn = argv[5];

    struct fann *ann;
	ann = fann_create_standard_array(num_layers, layer_sizes);

    // Misc parameters.
    fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	fann_set_activation_steepness_hidden(ann, 0.5);
	fann_set_activation_steepness_output(ann, 0.5);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID);
	fann_set_activation_function_output(ann, FANN_SIGMOID);
    //fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
    //fann_set_bit_fail_limit(ann, 0.01f);

    struct fann_train_data *data;
    data = fann_read_train_from_file(datafn);
	fann_init_weights(ann, data);
	
    fann_set_learning_rate(ann, learning_rate);
	fann_train_on_data(
        ann,
        data,
        max_epochs,
        10,  // epochs between reports
        DESIRED_ERROR
    );

	printf("Testing network. %f\n", fann_test_data(ann, data));

    fann_type *calc_out;
	for(unsigned int i = 0; i < fann_length_train_data(data); ++i)
	{
		calc_out = fann_run(ann, data->input[i]);
	}
	
	printf("RMSE = %f\n", sqrt(fann_get_MSE(ann)));

	fann_save(ann, outfn);

	fann_destroy_train(data);
	fann_destroy(ann);

	return 0;
}
Пример #14
0
bool Trainer::Test(const AnnData& data, float* mse, std::size_t* bit_fail) {
  float mse_tmp = fann_test_data(ann_, data.data());
  BOOST_ASSERT(fabs(mse_tmp - fann_get_MSE(ann_)) < 0.0001);
  return GetMseAndBitFail(ann_, &mse, &bit_fail);
}
Пример #15
0
int main(int argc, char **argv)
{
	if(argc < 3)
	{
		printf("Usage: train_net <input.train> <output.net>\n");
		exit(-1);
		
	}
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 8;
	const float desired_error = (const float) 0.000042;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;

	struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

// 	fann_set_activation_steepness_hidden(ann, 1);
// 	fann_set_activation_steepness_output(ann, 1);

// 	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
// 	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

// 	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
 	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);


	
	//fann_train_on_file(ann, argv[1], max_epochs, epochs_between_reports, desired_error);

	struct fann_train_data *data;
	data = fann_read_train_from_file(argv[1]);
	fann_init_weights(ann, data);

	printf("Training network on data from %s.\n", argv[1]);
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	double error, errorSum = 0;
	unsigned int i = 0, size = fann_length_train_data(data);
	fann_type *calc_out;
	for(i = 0; i < size; i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		error = fann_abs(calc_out[0] - data->output[i][0]) * 1000;
		printf("Distance test (%d dBm,%f%%) -> %f meters, should be %f meters, difference=%f meters\n",
			   (int)(data->input[i][0] * 150 - 150), data->input[i][1], calc_out[0] * 1000, data->output[i][0] * 1000,
			   error);
		errorSum += error;
	}

	printf("Average Error: %f\n", errorSum / size);
	fann_save(ann, argv[2]);

	fann_destroy(ann);

	return 0;
}
Пример #16
0
const _tstring CNeuroNetwok::Teach(const std::vector< std::pair < _tstring, bool > >& InputData)
{
	// На входе в Data карта путей к файлам и результатам, которые должны получится при распознавании
	// Подгружаем данные для каждого файла
	std::vector< std::pair < _tstring, bool > >::const_iterator it = InputData.begin();
	const std::vector< std::pair < _tstring, bool > >::const_iterator itEnd = InputData.end();
	for (; it != itEnd; ++it)
	{
		// Путь
		const _tstring& sPath = it->first;

		// Результат
		const bool bResult = it->second;

		// Данные
		std::list< float > BmpData;

		// Получаем данные для смещения 0 градусов
		AnalizeBMP(sPath, BmpData);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		/*
		// Получаем данные для смещения 90 градусов
		AnalizeBMP(sPath, BmpData, 90);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		// Получаем данные для смещения 180 градусов
		AnalizeBMP(sPath, BmpData, 180);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));

		// Получаем данные для смещения 270 градусов
		AnalizeBMP(sPath, BmpData, 270);

		// Добавляем данные
		m_TrainData.push_back(std::pair< std::list< float >, bool > (BmpData, bResult));*/
	}

	// Получили структуру данных, необходимую для тренировки нейросети
	// преобразуем ее к виду, необходимую для библиотеки fann
	boost::scoped_ptr< fann_train_data > pTrainData(MakeTrainData(m_TrainData));

	if (!pTrainData)
		throw std::runtime_error("Failed to make train data!");
	
#ifdef _DEBUG
	// Для дебага
	fann_save_train(pTrainData.get(), "debug_data.dat");
#endif

	// Инициализируем веса связей нейронов
	fann_init_weights(m_pANN, pTrainData.get());

	const float fDesiredError = (const float) 0;
	const unsigned int nEpochsBetweenReports = 10;

	// Тренируем нейросеть
	fann_train_on_data(m_pANN, pTrainData.get(), m_nEpochsCount, nEpochsBetweenReports, fDesiredError);	

	// Сохраняем нейросеть
	fann_save(m_pANN, NETWORK_FILE_NAME);

	// Тестируем сеть и возвращаем результат
	m_bIsNetworkTeached = true;
	return boost::lexical_cast< _tstring > (fann_test_data(m_pANN, pTrainData.get()));
}
Пример #17
0
int main( int argc, char ** argv)
{
	float mse=1000;
	unsigned int num_train=R_NUM;
	unsigned int num_test=T_NUM;
	struct fann_train_data* data ;
	unsigned int i;
	const float desired_error = (const float) E_DES;
	const unsigned int epochs_between_reports = N_EPR;
	unsigned int bitf_limit=0;
	unsigned int bitf=bitf_limit+1;
	struct fann *ann;

#if MIMO_FANN
	printf("MIMO fann\n");
#else
	printf("Old fann\n");
#endif

#ifdef USE_XOR_DATA
	if (argc<2)
	{
		printf("Error: please supply a data file\n");
		return -1;
	}
	printf("Using %s\n", argv[1]);
	data=fann_read_train_from_file(argv[1]);
#else
	printf("Generating training data\n");
	data = fann_create_train(S_DIM, I_DIM, O_DIM);
	for ( i=0; i< S_DIM; i++)
	{
		f1(data, i); 
	}
#endif

	ann=setup_net(data);

#if VERBOSE
	fann_print_parameters(ann);
#endif

	for (i=0; mse>desired_error && i!=num_train && bitf>bitf_limit; i++)
	{
#if VERBOSE
		mse=train_epoch_debug(ann, data, i);
#else 
		mse=fann_train_epoch(ann, data);
#endif
		bitf=fann_get_bit_fail(ann);
		if ( !((i) % epochs_between_reports))
			printf("Epochs     %8d. Current error: %.10f. Bit fail: %u\n", i+(!i), mse, bitf);
			/*printf ("[ %7u ] MSE Error : %.10e ###################\n", i, mse);*/
	}
	printf("Epochs     %8d. Current error: %.10f. Bit fail: %u\n", i+(!i), mse, bitf);

	printf("Testing network. %f\n", fann_test_data(ann, data));
	
	gettimeofday(&tv_start,NULL);
	for (i=0; i!=num_test; i++)
 		fann_run_data(ann, data);
	gettimeofday(&tv_now,NULL);
	report("---",0);	
	

#if 1
	printf("Trying to save network\n");
#if MIMO_FANN 
	fann_save(ann, "saved_mimo.net");
	fann_destroy(ann);
	ann=fann_create_from_file("saved_mimo.net");
	fann_save(ann, "saved_mimo2.net");
	fann_destroy(ann);
#else
	fann_save(ann, "saved_old.net");
#endif
#endif

	return 0;
}