Esempio n. 1
0
// Train the net with all the data
void NeuralNet::train_net( int num_data, int num_input, float** input,int num_output, float ** output)
//void train_net(FANN::neural_net &net,unsigned int num_data, unsigned int num_input, fann_type **input,unsigned int num_output,fann_type **output)
{
  net = new FANN::neural_net();
  const float learning_rate = 0.07f;
  const unsigned int num_layers=3;
  int num_hidden=num_input/2;
  unsigned int layers[3]={num_input,num_hidden,num_output};
  net->create_standard_array(num_layers,layers); 
  net->set_learning_rate(learning_rate);
  net->set_activation_steepness_hidden(.1);
  net->set_activation_steepness_output(.1);
  
  net->set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC_STEPWISE);
  net->set_activation_function_output(FANN::SIGMOID_SYMMETRIC_STEPWISE);

  //net.set_training_algorithm(FANN::TRAIN_INCREMENTAL);
  // Set additional properties such as the training algorithm
  //net.set_training_algorithm(FANN::TRAIN_QUICKPROP);

    const float desired_error = 0.001f;
    const unsigned int max_iterations = 1000;
    const unsigned int iterations_between_reports = 1000;

    FANN::training_data data;

    data.set_train_data(num_data, num_input, input, num_output, output);

    // Initialize and train the network with the data
    net->init_weights(data);

    net->train_on_data(data, max_iterations,0, desired_error);

    data.save_train("ddumbassfile.save");
}
Esempio n. 2
0
// Train the net with all the data
void train_net(FANN::neural_net &net,std::string oppName, std::string day, std::string type, const int num_output)
{
  

    const float desired_error = 0.001f;
    const unsigned int max_iterations = 1000;
    const unsigned int iterations_between_reports = 1000;

    std::string trainFileName="input/"+type+"_Casino_Day-"+day+"_"+oppName+"_vs_mybotisamazing.txt";
    std::string valFileName="input/"+type+"_Casino_Day-"+day+"_mybotisamazing_vs_"+oppName+".txt";
    cout << endl << "Training network." << endl;

    FANN::training_data data;
    FANN::training_data vData;
    data.read_train_from_file(trainFileName);
    vData.read_train_from_file(valFileName);

    // Initialize and train the network with the data
    net.init_weights(data);

    cout << "Max Epochs " << setw(8) << max_iterations << ". "
        << "Desired Error: " << left << desired_error << right << endl;
    net.set_callback(print_callback, NULL);
    clock_t start=clock();
    net.train_on_data(data, max_iterations,
        iterations_between_reports, desired_error);
    clock_t end=clock();
    
    cout<<"Runtime:"<< end-start<<endl;
    cout << endl << "Testing network." << endl;
    
    std::string oFileName="output/"+type+"_"+oppName+"_day_"+day+".txt";
    std::string actionFile="input/action_Casino_Day-"+day+"_mybotisamazing_vs_"+oppName+".txt";
    validate_net(net, oFileName,actionFile, &vData, num_output);
    /////////////////////////////////////////////////////
    net.train_on_data(vData,max_iterations,iterations_between_reports,desired_error);
    std::string oFileName2="output/"+type+"2_"+oppName+"_day_"+day+".txt";
    std::string actionFile2="input/action_Casino_Day-"+day+"_mybotisamazing_vs_"+oppName+".txt";
    validate_net(net,oFileName2,actionFile,&vData,num_output);

    // Save the network in floating point and fixed point
    std::string netFile="output/"+type+"_"+oppName+"_day_"+day+".net";
    net.save(netFile);
    //unsigned int decimal_point = net.save_to_fixed("training.net");
    //data.save_train_to_fixed("training_fixed.data", decimal_point);
	
}
void Player_neural::train(FANN::training_data &data)
{
	net.set_learning_rate(LEARNING_RATE);
	for (unsigned int i = 0; i < NB_EPOCHS; ++i)
	{
		data.shuffle_train_data();
		net.train_epoch(data);
		net.set_learning_rate(net.get_learning_rate() * LEARNING_RATE_DECAY);
	}
	net.print_connections();
}
Esempio n. 4
0
void train(Configuration *cfg)
{
	QString fileName(QDir::homePath() + "/" + QCoreApplication::applicationName() + ".ini");
	qDebug() << "using config file:" << fileName;
	QSettings settings(fileName, QSettings::IniFormat);
	const float learningRate = settings.value("learningRate", 0.8).toFloat();
	const unsigned int numLayers = settings.value("numLayers", 3).toInt();
	const unsigned int numInput = settings.value("numInput", 1024).toInt();
	const unsigned int numHidden = settings.value("numHidden", 32).toInt();
	const unsigned int numOutput = settings.value("numOutput", 1).toInt();
	const float desiredError = settings.value("desiredError", 0.0001f).toFloat();
	const unsigned int maxIterations = settings.value("maxIterations", 3000).toInt();
	const unsigned int iterationsBetweenReports = settings.value("iterationsBetweenReports", 100).toInt();

	FANN::neural_net net;
	net.create_standard(numLayers, numInput, numHidden, numOutput);
	net.set_learning_rate(learningRate);
	net.set_activation_steepness_hidden(0.5);
	net.set_activation_steepness_output(0.5);
	net.set_learning_momentum(0.6);
	net.set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC);
	net.set_activation_function_output(FANN::SIGMOID_SYMMETRIC);
	net.set_training_algorithm(FANN::TRAIN_RPROP);
	net.print_parameters();

	FANN::training_data data;
	if (data.read_train_from_file(cfg->getDataSavePath().toStdString()))
	{
		qDebug() << "Wczytano dane";
		//inicjalizacja wag
		net.init_weights(data);
		data.shuffle_train_data();
		net.set_callback(printCallback, NULL);
		net.train_on_data(data, maxIterations,
			iterationsBetweenReports, desiredError);
		net.save(cfg->getNetSavePath().toStdString());
		qDebug() << "Nauczono i zapisano siec";
	}
}
Esempio n. 5
0
void Execution::initializeWeights()
{
    assert(net && "Invalid program state, net uninitialized");
    const int numInputs = net->get_num_input();
    if (trainingInputsSelection.isValid() == 0)
    {
        QMessageBox::warning(this, tr("Information"),
                             tr("Select inputs columns for Training in Execution tab to initialize weights"));
        return;
    }
    if (numInputs != trainingInputsSelection.numColumns())
    {
        QMessageBox::warning(this, tr("Information"),
                             tr("Selected inputs columns must match net inputs"));
        return;
    }

    double **inputPatterns = trainingInputsSelection.getData();
    FANN::training_data data;
    data.set_train_data(trainingInputsSelection.numRows(), numInputs, inputPatterns, 0, 0);
    net->init_weights(data);
}
Esempio n. 6
0
int main(int argc, char *argv[])
{
    if (argv[1][0] == 'r') {
        WAVFile inp(argv[2]);
        translate_wav(inp);
        return 0;
    }
    if (argc == 1 || argc % 2 != 1) {
        std::cout << "bad number of training examples\n";
        return -1;
    }

    int to_open = (argc - 1)/2;
    for (int i = 0; i < to_open; i++) {
        WAVFile inp(argv[2*i+1]);
        WAVFile out(argv[2*i+2]);
        add_training_sound(inp, out);
    }

    float *train_in[input_training.size()];
    float *train_out[output_training.size()];
    for (int i = 0; i < input_training.size(); i++) {
        train_in[i] = input_training[i];
        train_out[i] = output_training[i];
    }
    FANN::training_data training;
    training.set_train_data(input_training.size(), (samples_per_segment/2+1)*2,
            train_in, (samples_per_segment/2+1)*2, train_out);
    FANN::neural_net net; 
    const unsigned int layers[] = {(samples_per_segment/2+1)*2, (samples_per_segment/2+1)*2, (samples_per_segment/2+1)*2};
    net.create_standard_array(3, (unsigned int*)layers);
    net.set_activation_function_output(FANN::LINEAR);
    //net.set_activation_function_hidden(FANN::LINEAR);
    net.set_learning_rate(1.2f);
    net.train_on_data(training, 50000, 1, 3.0f);

    net.save("net.net");
}
Esempio n. 7
0
void Execution::runTraining()
{
    assert(net && "Illegal program state, netuninitialized");
    if (isTrainingSelectionValid() == 0)
    {
        return;
    }
    emit preparingToTrain(this);

    double **inputPatterns = trainingInputsSelection.getData();
    double **referenceOutputs = trainingReferenceSelection.getData();
    const int numPatterns = trainingInputsSelection.numRows();

    FANN::training_data data;
    data.set_train_data(numPatterns, net->get_num_input(), inputPatterns,
                        net->get_num_output(), referenceOutputs);

    resetErrorButton->setDisabled(true);
    for (int i = 0; i < maxEpochs; ++i)
    {
        Selection& s = trainingErrorSelection;
        std::cout << i << std::endl;
        double mse = net->train_epoch(data);
        if (s.isValid() && (i < s.numRows()))
        {
            client->writeToCell(s.getSheet(), s.getStartR() + i, s.getStartC(), mse);
        }
        if (mse < desiredMSE)
        {
            QMessageBox::information(this, tr("Information"),
                                     tr("Net has reached the desired MSE of ") + QString::number(mse) +
                                     tr(" after ") + QString::number(i) + tr(" epochs."));
            break;
        }
    }
    resetErrorButton->setEnabled(true);
}
Esempio n. 8
0
void neuralNetworkTraining(std::string training_data_file)
{
    /*
     * Parameters for create_standard method.
     *
     * num_layers             : The total number of layers including the input and the output layer.
     * num_input_neurons      : The number of neurons in the input layer.
     * num_hidden_one_neurons : The number of neurons in the first hidden layer.
     * num_hidden_two_neurons : The number of neurons in the second hidden layer.
     * num_output_neurons     : The number of neurons in the output layer.
     */
    const unsigned int num_layers = 3;
    const unsigned int num_input_neurons = 8;
    const unsigned int num_hidden_neurons = 5;
    const unsigned int num_output_neurons = 1;

    /*
     * Parameters for train_on_data method.
     *
     * desired_errors         : The desired get_MSE or get_bit_fail, depending on which stop function is chosen by set_train_stop_function.
     * max_epochs             : The maximum number of epochs the training should continue.
     * epochs_between_reports : The number of epochs between printing a status report to stdout. A value of zero means no reports should be printed.
     */
    const float desired_error = DESIRED_ERROR;
    const unsigned int max_epochs = MAX_EPOCHS;
    const unsigned int epochs_between_reports = EPOCHS_BETWEEN_REPORTS;

    FANN::neural_net net;
    // Create a standard fully connected backpropagation neural network.
    net.create_standard(num_layers, num_input_neurons, num_hidden_neurons, num_output_neurons);

    net.set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC_STEPWISE); // Set the activation function for all of the hidden layers.
    net.set_activation_function_output(FANN::SIGMOID_SYMMETRIC_STEPWISE); // Set the activation function for the output layer.
    net.set_training_algorithm(FANN::TRAIN_RPROP);                        // Set the training algorithm.
    net.randomize_weights(-INIT_EPSILON, INIT_EPSILON);                   // Give each connection a random weight between -INIT_EPSILON and INIT_EPSILON.

    std::cout << std::endl << "Network Type                         :  ";
    switch (net.get_network_type())
    {
    case FANN::LAYER:
        std::cout << "LAYER" << std::endl;
        break;
    case FANN::SHORTCUT:
        std::cout << "SHORTCUT" << std::endl;
        break;
    default:
        std::cout << "UNKNOWN" << std::endl;
        break;
    }
    net.print_parameters();

    std::cout << std::endl << "Training Network." << std::endl;
    FANN::training_data data;
    if (data.read_train_from_file(training_data_file))
    {
        std::cout << "Max Epochs: " << std::setw(8) << max_epochs << ". " << "Desired Error: " << std::left << desired_error << std::right << std::endl;

        net.set_callback(printCallback, NULL);                                      // Sets the callback function for use during training.
        net.train_on_data(data, max_epochs, epochs_between_reports, desired_error); // Trains on an entire dataset, for a period of time.

        std::cout << "Saving Network." << std::endl;
        net.save("neural_network_controller_float.net");
        unsigned int decimal_point = net.save_to_fixed("neural_network_controller_fixed.net");
        data.save_train_to_fixed("neural_network_controller_fixed.data", decimal_point);
    }
}
Esempio n. 9
0
void trainingThread::train()
{
    std::stringstream log;
    log << std::endl << " test started." << std::endl;

    const float learning_rate = netConfigPTR->learning_rate ;
    const unsigned int num_layers = netConfigPTR->num_layers;
    const unsigned int num_input = netConfigPTR->num_input;
    const unsigned int num_hidden = netConfigPTR->num_hidden;
    const unsigned int num_output = netConfigPTR->num_output;
    const float desired_error = netConfigPTR->desired_error;
    const unsigned int max_iterations = netConfigPTR->max_iterations;
    const unsigned int iterations_between_reports = netConfigPTR->iterations_between_reports;

    log << std::endl << "Creating network." << std::endl;

    FANN::neural_net net;
    if (netConfigPTR->leyersVector.size() > 1)
    {
        unsigned int vectorSize = netConfigPTR->leyersVector.size();
        unsigned int* leyers = new unsigned int[vectorSize+2];
        leyers[0] = num_input;
        for (unsigned int i = 0; i < vectorSize; ++i)
        {
            leyers[i+1] = netConfigPTR->leyersVector.at(i);
        }

        leyers[num_layers-1] = num_output;

        for ( unsigned int i = 0 ; i< vectorSize+2 ; ++i)
        {
            qDebug() << "vector size: "<< vectorSize+2<<" i:"<<i<< " leyers "<< leyers[i];
        }
        net.create_standard_array(vectorSize+2, leyers);
        //net.create_standard(vectorSize+2, leyers[0], leyers[2],leyers[3], leyers[1]);


        delete[] leyers;
    }
    else
    {
        net.create_standard(num_layers, num_input, num_hidden, num_output);
    }

    net.set_learning_rate(learning_rate);

    net.set_activation_steepness_hidden(1.0);
    net.set_activation_steepness_output(1.0);

    net.set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC_STEPWISE);
    net.set_activation_function_output(FANN::SIGMOID_SYMMETRIC_STEPWISE);

    // Set additional properties such as the training algorithm

    net.set_training_algorithm(netConfigPTR->trainingAlgo);

    // Output network type and parameters
    log << std::endl << "Network Type                         :  ";
    switch (net.get_network_type())
    {
    case FANN::LAYER:
        log << "LAYER" << std::endl;
        break;
    case FANN::SHORTCUT:
        log << "SHORTCUT" << std::endl;
        break;
    default:
        log << "UNKNOWN" << std::endl;
        break;
    }
    //net.print_parameters();

    log << std::endl << "Training network." << std::endl;

    FANN::training_data data;
    if (data.read_train_from_file(netConfigPTR->trainingDataPatch))
    {
        // Initialize and train the network with the data
        net.init_weights(data);

        log << "Max Epochs " << std::setw(8) << max_iterations << ". "
            << "Desired Error: " << std::left << desired_error << std::right << std::endl;
         emit updateLog(QString::fromStdString(log.str()));
        log << "dupa";
        log.str("");
        log.clear();
        net.set_callback(print_callback, nullptr);
        net.train_on_data(data, max_iterations,
                          iterations_between_reports, desired_error);

        log << std::endl << "Testing network." << std::endl;

        for (unsigned int i = 0; i < data.length_train_data(); ++i)
        {
            // Run the network on the test data
            fann_type *calc_out = net.run(data.get_input()[i]);

            log << "test (";
            for (unsigned int j = 0; j < num_input; ++j)
            {
                log  << std::showpos << data.get_input()[i][j] << ", ";
                //qDebug()<< "jestem w log<<";
            }
            log <<  ") -> " ;
            for(unsigned int k = 0 ; k < num_output ; ++k)
            {
                log << calc_out[k] <<", ";
            }
            log << ",\t should be ";
            for(unsigned int k = 0 ; k < num_output ; ++k)
            {
                log << data.get_output()[i][k] <<", ";
            }
            log << std::endl ;
        }

        log << std::endl << "Saving network." << std::endl;

        // Save the network in floating point and fixed point
        net.save(netConfigPTR->netFloat);
        unsigned int decimal_point = net.save_to_fixed(netConfigPTR->netFixed);
        std::string path = netConfigPTR->trainingDataPatch.substr(0,netConfigPTR->trainingDataPatch.size()-5);
        data.save_train_to_fixed(path +"_fixed.data", decimal_point);

        log << std::endl << "test completed." << std::endl;
        emit updateLog(QString::fromStdString(log.str()));
        emit updateProgressBar(100);
    }
}