Esempio n. 1
0
int main() {
	printf("Neural Network training\n");

	const unsigned int num_input = 3;
    const unsigned int num_output = 4;
    const unsigned int num_layers = 4;
    const unsigned int num_neurons_hidden = 30;
    const float desired_error = (const float) 0.02;
    const unsigned int max_epochs = 500000;
    const unsigned int epochs_between_reports = 10;

    struct fann *ann = fann_create_standard(
		num_layers,
    	num_input,
    	num_neurons_hidden,
        num_neurons_hidden,
    	// num_neurons_hidden,
    	num_output);

    fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    // fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

    struct fann_train_data *data = fann_read_train_from_file("../training.data");

    fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);
    fann_destroy_train(data);

    fann_save(ann, "../neural.net");

    fann_destroy(ann);

	return 0;
}
Esempio n. 2
0
void CNeuroNetwok::Init(const int nNeirons, const int nResults, const int nEpochs)
{
	const unsigned int nLayersCount = 3;
	const unsigned int nHiddenNeironsCount = 3;
	m_nEpochsCount = nEpochs;
	
	// Создаем нейросеть
	// Количество входных нейронов столько же, сколько и входных параметров
	// Выходных нейронов столько же, сколько и результатов.
	m_pANN = fann_create_standard(nLayersCount, nNeirons, nHiddenNeironsCount, nResults);

	if (!m_pANN)
		throw std::runtime_error("Failed to init fann!");

	// Выполняем очень важные инициализации :)
	fann_set_activation_steepness_hidden(m_pANN, 1);
	fann_set_activation_steepness_output(m_pANN, 1);

	fann_set_activation_function_hidden(m_pANN, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(m_pANN, FANN_SIGMOID_SYMMETRIC);

	fann_set_train_stop_function(m_pANN, FANN_STOPFUNC_BIT);
	fann_set_bit_fail_limit(m_pANN, 0.01f);
	m_bIsInited = true;
}
Esempio n. 3
0
int 
main(int argc, char **argv)
{
	const unsigned int num_input = 360;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 4;
	const unsigned int num_neurons_hidden = num_input / 2;
	const float desired_error = (const float) 0.001;
	const unsigned int max_epochs = 300;
	const unsigned int epochs_between_reports = 10;
	struct fann_train_data * data = NULL;
	struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden / 2, num_output);
	unsigned int i, j;

	if (argc != 2)
	{
		fprintf(stderr, "Use: %s arquivoTreino\n", argv[0]); 
		exit(1);
	}

	fann_set_activation_function_hidden(ann, FANN_ELLIOT);
	fann_set_activation_function_output(ann, FANN_LINEAR);
	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	fann_set_learning_rate(ann, 0.1);
	fann_set_learning_momentum(ann, 0.6);
	data = fann_read_train_from_file(argv[1]);

	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	free(data);
	fann_save(ann, ARQ_RNA);
	fann_destroy(ann);

	return 0;
}
Esempio n. 4
0
void MainWindow::onLearnMyocardial()
{
    QString trainFileName = QFileDialog::getOpenFileName(this, tr("Open training file"),
                                                    "", tr("Training file (*.data)"));

    if(trainFileName.isEmpty())
        return;

    if(mAnn != 0)
         fann_destroy(mAnn);

    const unsigned int num_input = 3;
    const unsigned int num_output = 1;
    const unsigned int num_layers = 3;
    const unsigned int num_neurons_hidden = 5;
    const float desired_error = 0.001f;
    const unsigned int max_epochs = 50000;
    const unsigned int epochs_between_reports = 0;

     mAnn= fann_create_standard(num_layers, num_input,
        num_neurons_hidden, num_output);

     fann_set_activation_function_hidden(mAnn, FANN_SIGMOID_SYMMETRIC);
     fann_set_activation_function_output(mAnn, FANN_SIGMOID_SYMMETRIC);

     QByteArray ba = trainFileName.toLocal8Bit();

    fann_train_on_file(mAnn, ba.data(), max_epochs,
        epochs_between_reports, desired_error);

    QMessageBox::information(this, "Training Complete", QString("Training error = ") + QString::number(fann_get_MSE(mAnn)));

    ui->tabWidget->setTabEnabled(1, true);
}
int main() {
    //unsigned int layers[] = {17, 500, 400, 300, 200, 1};
    //unsigned int layerCount = sizeof(layers) / sizeof(unsigned int);
    const char* netLoadFile =
        "data/AAPL_ADBE_ATVI_COKE_EBAY_EXPE_NFLX_VA.300.250.200_100.net";
    const char* netSaveFile =
        "data/AAPL_ADBE_ATVI_COKE_EBAY_EXPE_NFLX_VA.300.250.200_200.net";

    const float desired_error = (const float) 0.10;
    const unsigned int max_epochs = 100;
    const unsigned int epochs_between_reports = 1;

    //struct fann *ann = fann_create_standard_array(layerCount, layers);
    struct fann *ann = fann_create_from_file(netLoadFile);

    fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

    printf("%s\n", netSaveFile);

    fann_train_on_file(ann,
                       "data/AAPL_ADBE_ATVI_COKE_EBAY_EXPE_NFLX_VA.train.fann", max_epochs,
                       epochs_between_reports, desired_error);

    fann_save(ann, netSaveFile);

    fann_destroy(ann);

    printf("%s\n", netSaveFile);

    return 0;
}
Esempio n. 6
0
int main(int argc , char **argv) {
	unsigned int num_data;
	unsigned int num_input;
	unsigned int num_output;
	const unsigned int num_layers=3 ; //input, hidden and output
	unsigned int num_neurons_hidden;
	const float desired_error = 0.0001;
	const unsigned int max_iterations = 1000;
	const unsigned int iterations_between_reports = 5;

	if (argc != 5) {
		printf("usage : train trainFile.txt testFile.txt ouputNetBasename nbrHiddenNeurons\n");
		return 0;
	}

	FILE *in= fopen(argv[1],"r");
	fscanf(in,"%d %d %d\n",&num_data,&num_input,&num_output);
	fclose(in);

	num_neurons_hidden = (unsigned int) atoi (argv[4]);

	struct fann *ann = fann_create_standard( num_layers, num_input, num_neurons_hidden, num_output);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); 
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); 

	train(ann, argv[1],argv[2],argv[3], max_iterations,iterations_between_reports, desired_error,num_neurons_hidden);

	fann_destroy(ann);

	return 0;
}
Esempio n. 7
0
int main(int argc, const char* argv[])
{
	const unsigned int max_epochs = 1000;
	unsigned int num_threads = 1;
	struct fann_train_data *data;
	struct fann *ann;
	long before;
	float error;
	unsigned int i;

	if(argc == 2)
		num_threads = atoi(argv[1]);

	data = fann_read_train_from_file("../../datasets/mushroom.train");
	ann = fann_create_standard(3, fann_num_input_train_data(data), 32, fann_num_output_train_data(data));

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID);

	before = GetTickCount();
	for(i = 1; i <= max_epochs; i++)
	{
		error = num_threads > 1 ? fann_train_epoch_irpropm_parallel(ann, data, num_threads) : fann_train_epoch(ann, data);
		printf("Epochs     %8d. Current error: %.10f\n", i, error);
	}
	printf("ticks %d", GetTickCount()-before);

	fann_destroy(ann);
	fann_destroy_train(data);

	return 0;
}
Esempio n. 8
0
int main( int argc, char** argv )
{
	const unsigned int num_input = 3;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 4;
	const unsigned int num_neurons_hidden = 5;
	const float desired_error = (const float) 0.0001;
	const unsigned int max_epochs = 5000;
	const unsigned int epochs_between_reports = 1000;
	struct fann_train_data * data = NULL;
	struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_LINEAR);
	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	data = fann_read_train_from_file("../../datasets/scaling.data");
	fann_set_scaling_params(
		    ann,
			data,
			-1,	/* New input minimum */
			1,	/* New input maximum */
			-1,	/* New output minimum */
			1);	/* New output maximum */

	fann_scale_train( ann, data );

	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);
	fann_destroy_train( data );
	fann_save(ann, "scaling.net");
	fann_destroy(ann);
	return 0;
}
Esempio n. 9
0
OcrNet::OcrNet(int data_set_size, int num_rows, int num_columns, int num_hidden, int num_output, int max_epochs, float error)
{
	const int num_layers = 3;

	DataSetSize = data_set_size;
	Rows = num_rows;
	Cols = num_columns;
	MaxEpochs = max_epochs;
	Error = error;
	Ann = new struct fann* [DataSetSize];

	for(int i=0; i < DataSetSize; i++)
	{
		Ann[i] = fann_create_standard(num_layers, Rows * Cols, num_hidden, num_output);
		fann_set_activation_function_hidden(Ann[i], FANN_SIGMOID_SYMMETRIC);
		fann_set_activation_function_output(Ann[i], FANN_SIGMOID_SYMMETRIC);
	}

	//	fann_set_activation_steepness_hidden(Ann, 1);
	//	fann_set_activation_steepness_output(Ann, 1);
	//	fann_set_learning_rate(Ann, 0.01);
	//	fann_set_learning_momentum(Ann, 0.7);
	//	fann_set_rprop_increase_factor(Ann, 5);
	//	fann_set_rprop_decrease_factor(Ann, 0.007);
	//	fann_set_rprop_delta_max(Ann, 17);
	//	fann_set_rprop_delta_zero(Ann, 0.01);
}
Esempio n. 10
0
void BasicBrain::makeNN() {
	assert(layout_.numLayers_ >= 3);
	assert(layout_.numLayers_ < 20);
	unsigned int* layerArray = new unsigned int[layout_.numLayers_];
	layerArray[0] = layout_.numInputs_;

	for(size_t i = 1; i < (layout_.numLayers_ - 1); i++) {
		layerArray[i] = layout_.neuronsPerHidden;
	}

	layerArray[layout_.numLayers_ - 1] = layout_.numOutputs;

	nn_ = fann_create_standard_array(layout_.numLayers_, layerArray);
    fann_set_activation_function_hidden(nn_, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(nn_, FANN_SIGMOID_SYMMETRIC);

    inputs_ = new fann_type[layout_.numInputs_];
    reset();
    delete[] layerArray;

#ifdef _CHECK_BRAIN_ALLOC
    size_t id = ++nnAllocCnt_;
    nnAllocs_[nn_] = id;
    std::cerr << "alloc: " << id << std::endl;
#endif
}
Esempio n. 11
0
int main()
{
    const unsigned int num_input = 2;
    const unsigned int num_output = 1;
    const unsigned int num_layers = 3;
    const unsigned int num_neurons_hidden = 3;
    const float desired_error = (const float) 0.001;
    const unsigned int max_epochs = 500000;
    const unsigned int epochs_between_reports = 1000;

    struct fann *ann = fann_create_standard(num_layers, num_input,
        num_neurons_hidden, num_output);

    fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

    fann_train_on_file(ann, "xor.data", max_epochs,
        epochs_between_reports, desired_error);

    fann_save(ann, "xor_float.net");

    fann_destroy(ann);

    return 0;
}
Esempio n. 12
0
int main()
{

    const unsigned int num_input = 1;
    const unsigned int num_output = 1;
    const unsigned int num_layers = 3;
    const unsigned int num_neurons_hidden = 10;
    const float desired_error = (const float) 0.0003;
    const unsigned int max_epochs = 50000;
    const unsigned int epochs_between_reports = 1000;
    struct fann_train_data 	*train_data;

    struct fann *ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

    fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
	
	train_data=fann_read_train_from_file("seno.data");

	//AQUI
	//fann_set_scaling_params(ann,train_data,-1.0,1.0,-1.0,1.0);
	//fann_scale_train(ann,train_data);
	//fann_scale_train_data(train_data,-1,1);	

    fann_train_on_data(ann, train_data, max_epochs, epochs_between_reports, desired_error);

    fann_save(ann, "mlp2.net");

    fann_destroy(ann);

    return 0;
}
int main()
{
	fann_type *calc_out;
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 9;
	const float desired_error = (const float) 0;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;
	struct fann *ann;
	struct fann_train_data *data;

	unsigned int i = 0;
	unsigned int decimal_point;

	printf("Creating network.\n");
	ann = fann_create_standard(num_layers, num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file("osyslec_train.data");

	fann_set_activation_steepness_hidden(ann, 1);
	fann_set_activation_steepness_output(ann, 1);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID);
	fann_set_activation_function_output(ann, FANN_SIGMOID);

	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_set_bit_fail_limit(ann, 0.01f);

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);

	fann_init_weights(ann, data);
	
	printf("Training network.\n");
	fann_train_on_data(ann, data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network. %f\n", fann_test_data(ann, data));

	for(i = 0; i < fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("GG test (%f,%f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   fann_abs(calc_out[0] - data->output[i][0]));
	}

	printf("Saving network.\n");

	fann_save(ann, "osyslec_train_float.net");

	decimal_point = fann_save_to_fixed(ann, "osyslec_train_fixed.net");
	fann_save_train_to_fixed(data, "osyslec_train_fixed.data", decimal_point);

	printf("Cleaning up.\n");
	fann_destroy_train(data);
	fann_destroy(ann);

	return 0;
}
Esempio n. 14
0
int main()
{
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 3;
	const float desired_error = (const float) 0.001;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;
	unsigned int i;
	fann_type *calc_out;

	struct fann_train_data *data;

	struct fann *ann = fann_create_standard(num_layers,
								   num_input, num_neurons_hidden, num_output);

	data = fann_read_train_from_file("xor.data");

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

	fann_set_training_algorithm(ann, FANN_TRAIN_QUICKPROP);

	train_on_steepness_file(ann, "xor.data", max_epochs,
							epochs_between_reports, desired_error, (float) 1.0, (float) 0.1,
							(float) 20.0);

	fann_set_activation_function_hidden(ann, FANN_THRESHOLD_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_THRESHOLD_SYMMETRIC);

	for(i = 0; i != fann_length_train_data(data); i++)
	{
		calc_out = fann_run(ann, data->input[i]);
		printf("XOR test (%f, %f) -> %f, should be %f, difference=%f\n",
			   data->input[i][0], data->input[i][1], calc_out[0], data->output[i][0],
			   (float) fann_abs(calc_out[0] - data->output[i][0]));
	}


	fann_save(ann, "xor_float.net");

	fann_destroy(ann);
	fann_destroy_train(data);

	return 0;
}
Esempio n. 15
0
 void Classifier::createNew()
 {
     fann_destroy(neuralNetwork);
     neuralNetwork = fann_create_standard(NUM_LAYERS, NUM_INP, numHidden, NUM_OUTP);
     //fann_set_activation_function_hidden(neuralNetwork, FANN_SIGMOID_SYMMETRIC);
     //fann_set_activation_function_output(neuralNetwork, FANN_SIGMOID_SYMMETRIC);
     fann_set_activation_function_hidden(neuralNetwork, FANN_LINEAR_PIECE_SYMMETRIC);
     fann_set_activation_function_output(neuralNetwork, FANN_LINEAR_PIECE_SYMMETRIC);
     fann_set_training_algorithm(neuralNetwork,FANN_TRAIN_INCREMENTAL);
 }
Esempio n. 16
0
int main(int argc, char *argv[])
{
	unsigned int layers[3] = {38, 17, 9};
	struct fann *ann = fann_create_standard_array(3, layers);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
    	fann_set_activation_function_output(ann, FANN_LINEAR);
	fann_train_on_file(ann, "scotland.data", 100000, 100, 0.00001);
	fann_save(ann, "scotland_test.net");
	fann_destroy(ann);
	return 0;
} 
Esempio n. 17
0
struct fann *random_network(){
	// create and randomize activation function
	struct fann *ann = fann_create_standard(num_layers,num_input,num_hidden_nodes,num_output);
	fann_set_activation_function_hidden(ann,FANN_SIGMOID);
	fann_set_activation_function_output(ann,FANN_SIGMOID);
	
	// randomize weights
	fann_randomize_weights(ann,-1,1);
	
	// return
	return ann;
}
Esempio n. 18
0
/***
 * @function rspamd_fann.create(nlayers, [layer1, ... layern])
 * Creates new neural network with `nlayers` that contains `layer1`...`layern`
 * neurons in each layer
 * @param {number} nlayers number of layers
 * @param {number} layerI number of neurons in each layer
 * @return {fann} fann object
 */
static gint
lua_fann_create (lua_State *L)
{
#ifndef WITH_FANN
	return 0;
#else
	struct fann *f, **pfann;
	guint nlayers, *layers, i;

	nlayers = luaL_checknumber (L, 1);

	if (nlayers > 0) {
		layers = g_malloc (nlayers * sizeof (layers[0]));

		if (lua_type (L, 2) == LUA_TNUMBER) {
			for (i = 0; i < nlayers; i ++) {
				layers[i] = luaL_checknumber (L, i + 2);
			}
		}
		else if (lua_type (L, 2) == LUA_TTABLE) {
			for (i = 0; i < nlayers; i ++) {
				lua_rawgeti (L, 2, i + 1);
				layers[i] = luaL_checknumber (L, -1);
				lua_pop (L, 1);
			}
		}

		f = fann_create_standard_array (nlayers, layers);
		fann_set_activation_function_hidden (f, FANN_SIGMOID_SYMMETRIC);
		fann_set_activation_function_output (f, FANN_SIGMOID_SYMMETRIC);
		fann_set_training_algorithm (f, FANN_TRAIN_INCREMENTAL);
		fann_randomize_weights (f, 0, 1);

		if (f != NULL) {
			pfann = lua_newuserdata (L, sizeof (gpointer));
			*pfann = f;
			rspamd_lua_setclass (L, "rspamd{fann}", -1);
		}
		else {
			lua_pushnil (L);
		}

		g_free (layers);
	}
	else {
		lua_pushnil (L);
	}

	return 1;
#endif
}
Esempio n. 19
0
int main()
{
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 32;
	const float desired_error = (const float) 0.0001;
	const unsigned int max_epochs = 300;
	const unsigned int epochs_between_reports = 10;
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;

	unsigned int i = 0;

	printf("Creating network.\n");

	train_data = fann_read_train_from_file("../datasets/mushroom.train");

	ann = fann_create_standard(num_layers,
					  train_data->num_input, num_neurons_hidden, train_data->num_output);

	printf("Training network.\n");

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC_STEPWISE);
	fann_set_activation_function_output(ann, FANN_SIGMOID_STEPWISE);

	/*fann_set_training_algorithm(ann, FANN_TRAIN_INCREMENTAL); */

	fann_train_on_data(ann, train_data, max_epochs, epochs_between_reports, desired_error);

	printf("Testing network.\n");

	test_data = fann_read_train_from_file("../datasets/mushroom.test");

	fann_reset_MSE(ann);
	for(i = 0; i < fann_length_train_data(test_data); i++)
	{
		fann_test(ann, test_data->input[i], test_data->output[i]);
	}
	
	printf("MSE error on test data: %f\n", fann_get_MSE(ann));

	printf("Saving network.\n");

	fann_save(ann, "mushroom_float.net");

	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	fann_destroy(ann);

	return 0;
}
Esempio n. 20
0
int main( void ) {
  fann_type *calc_out;
  fann_type input[ N ];
  int i;

  struct fann *ann = fann_create_standard( 3, N, N, 1 );
  fann_set_activation_function_hidden( ann, ACT );
  fann_set_activation_function_output( ann, FANN_LINEAR );

  for( i=0; i<1000; i++ )
    calc_out = fann_run(ann, input);
  
  fann_destroy(ann);
  return 0;
}
Esempio n. 21
0
int main(int argc, char* argv[]) {
    bool iflag, oflag, tflag, nflag, gflag;
    int flag;
    struct fann *nNetwork;
    char* inputFileName;
    char* dataFileName;
    char* outputFileName;

    srand(time(NULL));

    while((flag = getopt(argc,argv,"i:o:t:")) != -1) {
        switch(flag) {
            case 'i':
                inputFileName = optarg;
                iflag = true;
            break;
            case 'o':
                outputFileName = optarg;
                oflag = true;
            break;
            case 't':
                dataFileName = optarg;
                tflag = true;
            break;
        }
    }

    if(iflag) {
        nNetwork = fann_create_from_file(inputFileName);
    }
    else {
        nNetwork = fann_create_standard(3,NUM_IN_OUT,NUM_IN_OUT,NUM_IN_OUT);
    }

    fann_set_activation_function_hidden(nNetwork, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(nNetwork, FANN_SIGMOID_SYMMETRIC);

    if(tflag) {
        fann_train_on_file(nNetwork,dataFileName,MAX_ITERATIONS,50,ERROR);
    }

    if(oflag) {
        fann_save(nNetwork,outputFileName);
    }

    fann_destroy(nNetwork);
return 0;
}
int main()
{
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;
	const float desired_error = (const float) 0.001;
	unsigned int max_neurons = 40;
	unsigned int neurons_between_reports = 1;

	printf("Reading data.\n");

	train_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.train");
	test_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.test");

	fann_scale_train_data(train_data, 0, 1);
	fann_scale_train_data(test_data, 0, 1);

	printf("Creating network.\n");

	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data));

	fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_LINEAR_PIECE);
	fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR);

	fann_print_parameters(ann);

	printf("Training network.\n");

	fann_cascadetrain_on_data(ann, train_data, max_neurons, neurons_between_reports, desired_error);

	fann_print_connections(ann);

	printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data),
		   fann_test_data(ann, test_data));

	printf("Saving network.\n");

	fann_save(ann, "two_spirali.net");

	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	fann_destroy(ann);

	return 0;
}
NeuralNet::NeuralNet(){
    cout << "Initializing neural network" << endl;
    this->numInputNeurons = 144;
    this->numOutputNeurons = 1;
    this->numLayers = 3;
    this->numHiddenNeurons = 140;
    this->ptrNeuralNet = fann_create_standard(numLayers, numInputNeurons, numHiddenNeurons, numOutputNeurons);
    fann_set_activation_steepness_hidden(this->ptrNeuralNet, 1);
    fann_set_activation_steepness_output(this->ptrNeuralNet, 1);
    //sigmoidal function
    fann_set_activation_function_hidden(this->ptrNeuralNet, FANN_SIGMOID_SYMMETRIC);
    fann_set_activation_function_output(this->ptrNeuralNet, FANN_SIGMOID_SYMMETRIC);

    fann_set_train_stop_function(this->ptrNeuralNet, FANN_STOPFUNC_BIT);
    fann_set_bit_fail_limit(this->ptrNeuralNet, 0.01f);

    fann_set_training_algorithm(this->ptrNeuralNet, FANN_TRAIN_RPROP);
}
Esempio n. 24
0
int main()
{
	//FANN TRAINING:
    	const unsigned int num_input = 4;
    	const unsigned int num_output = 3;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 4;
	const float desired_error = (const float) 0.001;
	const unsigned int max_epochs = 200000;
	const unsigned int epochs_between_reports = 1000;

	struct fann *ann = fann_create_standard(num_layers, num_input,
	num_neurons_hidden, num_output);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

	fann_train_on_file(ann, "irisDataSet.data", max_epochs,
	epochs_between_reports, desired_error);

	fann_save(ann, "irisFANN_float.net");

	fann_destroy(ann);

	//FANN TESTING
	fann_type *calc_out;
	fann_type input[4];

	struct fann *annR = fann_create_from_file("irisFANN_float.net");

	input[0] = 5.1;
	input[1] = 3.5;
	input[2] = 1.4;
	input[3] = 0.2;

	calc_out = fann_run(annR, input);

	std::cout << "Testing irisFANN, with data: " << input[0] << ", " << input[1] << ", " << input[2] << ", " << input[3] << ", "
	<< ", output: " << calc_out[0] << ", " << calc_out[1] << ", " << calc_out[2] << "\n"; 

	fann_destroy(annR);
	return 0;

}
Esempio n. 25
0
/*! ann:set_activation_function_hidden(function)
 *# Sets the activation function for the hidden layer neurons.
 *x ann:set_activation_function_hidden(fann.FANN_SIGMOID_SYMMETRIC)
 *-
 */
static int ann_set_activation_function_hidden(lua_State *L)
{
	struct fann **ann;
	int fun;

	ann = luaL_checkudata(L, 1, FANN_METATABLE);
	luaL_argcheck(L, ann != NULL, 1, "'neural net' expected");

	if(lua_gettop(L) < 2)
		luaL_error(L, "insufficient parameters");

	fun = lua_tointeger(L, 2);

#ifdef FANN_VERBOSE
	printf("Setting hidden activation function to %d\n", fun);
#endif

	fann_set_activation_function_hidden(*ann, fun);
	return 0;
}
Esempio n. 26
0
int main()
{
	const float desired_error = (const float) 0.0001;
	const unsigned int max_epochs = 350;
	const unsigned int epochs_between_reports = 25;
	struct fann *ann;
	struct fann_train_data *train_data;

	unsigned int i = 0;

	printf("Creating network.\n");

	train_data = fann_read_train_from_file("ann_training_data");
    // Using incremental training -> shuffle training data    
    fann_shuffle_train_data(train_data);

//	ann = fann_create_standard(num_layers,
//					  train_data->num_input, num_neurons_hidden, train_data->num_output);

    //ann = fann_create_standard(500, 2, 50, 50, 21);
    unsigned int layers[4] = {150, 70, 30, 22};
    ann = fann_create_standard_array(4, layers);
	printf("Training network.\n");

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC_STEPWISE);
	fann_set_activation_function_output(ann, FANN_LINEAR_PIECE); //FANN_SIGMOID_STEPWISE);

    fann_set_training_algorithm(ann, FANN_TRAIN_INCREMENTAL);

	fann_train_on_data(ann, train_data, max_epochs, epochs_between_reports, desired_error);

	printf("Saving network.\n");

	fann_save(ann, "mymoves_gestures.net");

	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy(ann);

	return 0;
}
Esempio n. 27
0
int main_sample()
{
	const unsigned int num_input = 2;
	const unsigned int num_output = 1;
	const unsigned int num_layers = 3;
	const unsigned int num_neurons_hidden = 3;
	const float desired_error = (const float) 0.001;
	const unsigned int max_epochs = 500000;
	const unsigned int epochs_between_reports = 1000;

	struct fann *ann = fann_create_standard(num_layers, num_input,
		num_neurons_hidden, num_output);

	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);

	fann_train_on_file(ann, "xor.data", max_epochs,
		epochs_between_reports, desired_error);

	fann_save(ann, "xor_float.net");

	fann_type *calc_out;
	fann_type input[2];

	//ann = fann_create_from_file("xor_float.net");

	input[0] = -1;
	input[1] = 1;
	calc_out = fann_run(ann, input);

	printf("xor test (%f,%f) -> %f\n", input[0], input[1], calc_out[0]);

	fann_destroy(ann);

	getchar();
	return 0;
}
Esempio n. 28
0
int main ( int argc, char **argv )
{
    srand(time(NULL));
    if ( argc<=1 )
    {
        //      printf ( "neuro num\r\n" );
        //     exit ( 0 );
    }

    if (argc>2)
    {
        //desired_error=atof(argv[2]);
        numn=atoi(argv[1]);
        l1n=atoi(argv[2]);
        if (argc>3)
            l2n=atoi(argv[3]);
        if (argc>4)
            l3n=atoi(argv[4]);
        if (argc>5)
            l4n=atoi(argv[5]);
        if (argc>6)
            l5n=atoi(argv[6]);
        if (argc>7)
            l6n=atoi(argv[7]);
    }

    signal ( 2, sig_term );



    srand ( time ( NULL ) );

    printf("loading training data...");

    train_data = fann_read_train_from_file ( "train.dat" );
    test_data = fann_read_train_from_file ( "test.dat" );

    weight_data=fann_merge_train_data(train_data,test_data);

    cln_weight_data=fann_duplicate_train_data(weight_data);
    cln_test_data=fann_duplicate_train_data(test_data);
    cln_train_data=fann_duplicate_train_data(train_data);

    //num_neurons_hidden = atoi ( argv[1] );



    srand(time(NULL));

    y=atoi(argv[2]);

    lay=atoi(argv[1]);
    ln=lay+2;
    if (lay==1)
        y2=train_data->num_output;
    best_perc=1;

    printf("\r\ndoing %ux%u [layers=%u,out=%u]",lay,y,ln, train_data->num_output);
    while (true)

    {
        neur1=1+(rand()%y);
        neur2=1+(rand()%y);
        conn_rate=0.5f+((rand()%50)*0.01f);
        printf("\r\n%2dx%-4d: ",neur1,neur2);
        //  printf("create network: layers=%d l1n=%d l2n=%d l3n=%d l4n=%d\ l5n=%d l6n=%dr\n",numn,l1n,l2n,l3n,l4n,l5n,l6n);
        ann = fann_create_standard (//conn_rate,
                  ln,
                  train_data->num_input,
                  neur1,
                  neur2,
                  train_data->num_output );
        //fann_init_weights ( ann, train_data );
        printf(" [%p] ",ann);

        if ( ( int ) ann==NULL )
        {
            printf ( "error" );
            exit ( 0 );
        }



        fann_set_activation_function_hidden(ann,FANN_SIGMOID);
        fann_set_activation_function_output(ann,FANN_SIGMOID);

        rebuild_functions(neur1);
        fann_set_training_algorithm ( ann, FANN_TRAIN_RPROP );
        fann_set_sarprop_temperature(ann,15000.0f);
        //fann_randomize_weights ( ann, -((rand()%10)*0.1f), ((rand()%10)*0.1f) );
        fann_init_weights(ann,train_data);
        got_inc=0;
        prev_epoch_mse=1;
        //
        epochs=0;

        unsigned last_best_perc_epoch=0;
        unsigned last_sync_epoch=0;
        unsigned last_ftest_secs=0;

        last_sync_epoch=0;
        last_best_perc_epoch=0;
        if (good_ann)
            fann_destroy(good_ann);

        good_ann=fann_copy(ann);
        unlink(histfile);
        for (u=0;u<1000;u++)
        {
            fflush(NULL);
            train_mse=fann_train_epoch(ann, train_data);

            if (jitter_train)
                apply_jjit(train_data,cln_train_data);


            if (time(NULL)-last_ftest_secs>=1)
            {
                //printf("\r\n%5u %9.6f %5.2f ",epochs,train_mse,test_perc);
                //printf(" %4.2f",test_perc);
                printf(".");


                last_ftest_secs=time(NULL);
            }
            ftest_data();
            plot(epochs,train_mse,test_mse);

            /*         if (epochs>10&&((int)test_perc==43||(int)test_perc==57))
                    {
                        printf(" [excluded %.2f] ",test_perc);
                        break;
                    }            else            {

                    } */
            //printf("excluded %f ",test_perc);

            double prev_test_perc;
            //   if (prev_epoch_mse==best_perc)
            //  printf("o");
            if ((int)test_perc>(int)train_perc&&epochs-last_stat_epoch>10)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);

                if (test_perc!=prev_test_perc)
                    printf("%.2f [%f]",test_perc,train_mse);

                //printf(" sync[%4.2f]",test_perc);
                last_stat_epoch=epochs;
            }
            else 	if (epochs-last_sync_epoch>111500)
            {
                last_sync_epoch=epochs;
            }
            if (epochs>210&&test_perc>best_perc)
            {
                //	u--;
                //  fann_destroy(good_ann);
                //   good_ann=fann_copy(ann);
                printf(" [saved best %.0f] ",test_perc);
                last_stat_epoch=epochs;
                //		printf("%f",test_perc);
                //	fann_destroy(ann);
                //	ann=fann_copy(good_ann);
                fann_save(ann,"mutate-best.net");
                best_perc=test_perc;
                printf(" %6.2f [%f]",test_perc,train_mse);
                last_best_perc_epoch=epochs;

            }
            else     if (epochs>11100&&((int)test_perc<=63||(int)test_perc==(int)prev_test_perc))
            {
                //best_perc=test_perc;
                //		printf("x");
                //  printf(".");
                //printf("\r%6.8f",train_mse);
                //			printf("done\r\n");
                break;


            }
            static unsigned last_restore_epoch=0;
            if (epochs>100&&test_mse-train_mse>=0.25f&&epochs-last_restore_epoch>=120)
            {
                /* 	fann_set_learning_rate ( ann,0.31f+(rand()%90)*0.01f);
                	fann_set_learning_momentum(ann,(rand()%90)*0.01f);
                	printf(" [restored @ %u lr %.2f mm %.2f]",epochs,fann_get_learning_rate(ann),
                	fann_get_learning_momentum(ann));
                	fann_destroy(ann);
                	ann=fann_copy(good_ann);
                	last_stat_epoch=epochs;
                	last_restore_epoch=epochs; */





                double rdec,rinc;
                rdec=0.0101f+((rand()%100)*0.00001f);
                if (!rdec)
                    rdec=0.01f;
                rinc=1.0001f+((rand()%90)*0.00001f);
                if (!rinc)
                    rinc=1.1f;
                static double prev_test_epoch_mse;

                //		rinc+=diff_mse*0.000001f;
                //			fann_set_rprop_increase_factor(ann,rinc );
                //	fann_set_rprop_decrease_factor(ann, rdec);
            }
            else if (test_mse-train_mse<=0.1f)
            {
                fann_destroy(good_ann);
                good_ann=fann_copy(ann);
                //	printf("s");
            }
            else
            {
                fann_set_sarprop_temperature(ann,fann_get_sarprop_temperature(ann)-0.0001f);
            }
            static unsigned last_train_change_epoch=0;
            if (test_mse>=train_mse&&epochs-last_train_change_epoch>=100)
            {
                last_train_change_epoch=epochs;
                //fann_set_training_algorithm(ann,FANN_TRAIN_SARPROP);
                jitter_train=0;
            }
            else
            {
                //fann_set_training_algorithm(ann,FANN_TRAIN_RPROP);
                jitter_train=0;
            }

            got_inc=test_perc-prev_epoch_mse;
            prev_epoch_mse=test_perc;
            prev_test_perc=test_perc;
            epochs++;
            if (epochs-last_best_perc_epoch>511500)
            {
                printf(" failed");
                break;
            }
            if (epochs>2200&&(int)train_perc<40)
            {
                printf("skip 1\r\n");
                break;
            }

            if ((int)test_perc>=80)
            {
                printf("\r\ngot it %f\r\n",test_perc);
                fann_save(ann,"good.net");
                exit(0);
            }
            // printf("\n%6u ",epochs);
        }
        printf(" %6.2f inc: %.2f",test_perc,got_inc);
//            printf("%6.2f %6.2f",train_perc,test_perc);

        fann_destroy ( ann );

    }

    fann_destroy_train ( train_data );
    fann_destroy_train ( test_data );
    fann_destroy ( ann );

    return 0;
}
Esempio n. 29
0
int main()
{
	struct fann *ann;
	struct fann_train_data *train_data, *test_data;
	const float desired_error = (const float)0.0;
	unsigned int max_neurons = 30;
	unsigned int neurons_between_reports = 1;
	unsigned int bit_fail_train, bit_fail_test;
	float mse_train, mse_test;
	unsigned int i = 0;
	fann_type *output;
	fann_type steepness;
	int multi = 0;
	enum fann_activationfunc_enum activation;
	enum fann_train_enum training_algorithm = FANN_TRAIN_RPROP;
	
	printf("Reading data.\n");
	 
	train_data = fann_read_train_from_file("../benchmarks/datasets/parity8.train");
	test_data = fann_read_train_from_file("../benchmarks/datasets/parity8.test");

	fann_scale_train_data(train_data, -1, 1);
	fann_scale_train_data(test_data, -1, 1);
	
	printf("Creating network.\n");
	
	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data));
		
	fann_set_training_algorithm(ann, training_algorithm);
	fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
	fann_set_activation_function_output(ann, FANN_LINEAR);
	fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR);
	
	if(!multi)
	{
		/*steepness = 0.5;*/
		steepness = 1;
		fann_set_cascade_activation_steepnesses(ann, &steepness, 1);
		/*activation = FANN_SIN_SYMMETRIC;*/
		activation = FANN_SIGMOID_SYMMETRIC;
		
		fann_set_cascade_activation_functions(ann, &activation, 1);		
		fann_set_cascade_num_candidate_groups(ann, 8);
	}	
		
	if(training_algorithm == FANN_TRAIN_QUICKPROP)
	{
		fann_set_learning_rate(ann, 0.35);
		fann_randomize_weights(ann, -2.0,2.0);
	}
	
	fann_set_bit_fail_limit(ann, 0.9);
	fann_set_train_stop_function(ann, FANN_STOPFUNC_BIT);
	fann_print_parameters(ann);
		
	fann_save(ann, "cascade_train2.net");
	
	printf("Training network.\n");

	fann_cascadetrain_on_data(ann, train_data, max_neurons, neurons_between_reports, desired_error);
	
	fann_print_connections(ann);
	
	mse_train = fann_test_data(ann, train_data);
	bit_fail_train = fann_get_bit_fail(ann);
	mse_test = fann_test_data(ann, test_data);
	bit_fail_test = fann_get_bit_fail(ann);
	
	printf("\nTrain error: %f, Train bit-fail: %d, Test error: %f, Test bit-fail: %d\n\n", 
		   mse_train, bit_fail_train, mse_test, bit_fail_test);
	
	for(i = 0; i < train_data->num_data; i++)
	{
		output = fann_run(ann, train_data->input[i]);
		if((train_data->output[i][0] >= 0 && output[0] <= 0) ||
		   (train_data->output[i][0] <= 0 && output[0] >= 0))
		{
			printf("ERROR: %f does not match %f\n", train_data->output[i][0], output[0]);
		}
	}
	
	printf("Saving network.\n");
	
	fann_save(ann, "cascade_train.net");
	
	printf("Cleaning up.\n");
	fann_destroy_train(train_data);
	fann_destroy_train(test_data);
	fann_destroy(ann);
	
	return 0;
}
Esempio n. 30
0
int main() 

{ 

 fann_type *calc_out; 

 const unsigned int num_input = 22500; 

 const unsigned int num_output = 1; 

 //const unsigned int num_layers = 4; 
 const unsigned int num_layers = 4; 

 /* this value can be changed to tweak the network */ 

 const unsigned int num_neurons_hidden = 50; 
 //const unsigned int num_neurons_hidden = 150; 

const float desired_error = (const float) 0.02; 

 const unsigned int max_epochs = 15000; 
const unsigned int epochs_between_reports = 20; 

 float learning_rate = .5; 

 struct fann *ann; 

 struct fann_train_data *data; 

 int num_neurons = 0; 

 unsigned int i = 0; 

 unsigned int decimal_point; 

 /* CREATING NETWORK */ 

ann = fann_create_standard(num_layers, num_input, 

 num_neurons_hidden, 

 num_neurons_hidden, num_output); 

 /* reading training data */ 

 data = fann_read_train_from_file("training.data");

 fann_set_activation_steepness_hidden(ann, 1); 

 fann_set_activation_steepness_output(ann, 1); 

 fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC); 

 fann_init_weights(ann, data); 

 /* 

 TRAINING NETWORK 

 run x epochs at learn rate .y 

 */ 

 //fann_set_learning_rate(ann, .7); 

 //fann_train_on_data(ann, data, 200, epochs_between_reports, .4); 
 //fann_train_on_data(ann, data, 500, epochs_between_reports, .002); 

 fann_set_learning_rate(ann, .5); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .2); 
 //fann_train_on_data(ann, data,50, epochs_between_reports, .2); 

 fann_set_learning_rate(ann, .2); 

 fann_train_on_data(ann, data,1000, epochs_between_reports, .15); 
 //fann_train_on_data(ann, data,100, epochs_between_reports, .15); 

 fann_set_learning_rate(ann, .1); 

 fann_train_on_data(ann, data,5000, epochs_between_reports, .002); 
 //fann_train_on_data(ann, data,200, epochs_between_reports, .00002); 

 /* TESTING NETWORK */ 

 printf("Testing network. %f\n", fann_test_data(ann, data)); 

 for(i = 0; i < fann_length_train_data(data); i++) 

 { 

 calc_out = fann_run(ann, data->input[i]); 

 /*printf("%f, should be %f, difference=%f\n", 

 calc_out[0], data->output[i][0], 

 fann_abs(calc_out[0] - data->output[i][0])); */

 } 

 /* SAVING NETWORK */ 

 fann_save(ann, "image_spam.net"); 

 /* CLEANING UP */ 

 fann_destroy_train(data);
fann_destroy(ann); 

 return 0; 

}