Ejemplo n.º 1
0
int main() {
    cv::Mat img = util::in();
    auto vec = mat_to_vector( img );

    FANN::neural_net nn;
    nn.create_from_file( "neural.net" );
    auto result = nn.run( vec.data() );

    std::cout << "Dog index: " << result[0] << '\n';
    std::cout << "Human index: " << result[1] << '\n';
}
Ejemplo n.º 2
0
void validate_net(FANN::neural_net &net,const std::string oFileName, const std::string actionFileName, FANN::training_data * vData, int num_output){

    ofstream outputFile(oFileName.c_str());
    ifstream finAction(actionFileName.c_str());
    float sumMSE=0;
    for (unsigned int i = 0; i < vData->length_train_data(); ++i)
    {
        // Run the network on the test data
      fann_type * input=vData->get_input()[i];
        fann_type *calc_out = net.run(input);
	float mse=calc_MSE(calc_out, vData->get_output()[i],num_output);
	sumMSE+=mse;
	std::string line;
	getline(finAction, line);
	outputFile<< "***********"<<mse<<endl;
	if((int)input[19]==1){
	  outputFile<<"PREFLOP"<<endl;
	}else if((int)input[20]==1){
	  outputFile<<"FLOP"<<endl;
	}else if((int)input[21]==1){
	  outputFile<<"TURN"<<endl;
	}else if((int)input[22]==1){
	  outputFile<<"RIVER"<<endl;
	}
	outputFile<<line<<endl;
	outputFile<<"nn output"<<endl;
	for(int k=0; k<num_output; k++){
	  outputFile<<calc_out[k]<<" ";
	}
	outputFile<<endl;
	for(int j=0; j<num_output;j++){
	  outputFile<<vData->get_output()[i][j]<<" ";
	}
	outputFile<<endl;
	
    }
    outputFile<<"Average MSE: "<<sumMSE/vData->length_train_data()<<endl;
    outputFile.close();
}
Ejemplo n.º 3
0
void translate_wav(WAVFile input)
{
    double *in;
    fftw_complex *out;
    fftw_complex *in_back;
    double *out_back;
    fftw_plan plan;
    fftw_plan plan_back;

    int length = input.m_data_header.sub_chunk_2_size;
    in = (double*)fftw_malloc(sizeof(double)*samples_per_segment);
    out = (fftw_complex*)fftw_malloc(sizeof(fftw_complex)*samples_per_segment/2+1);
    in_back = (fftw_complex*)fftw_malloc(sizeof(fftw_complex)*samples_per_segment/2+1);
    out_back = (double*)fftw_malloc(sizeof(double)*samples_per_segment+2);

    plan = fftw_plan_dft_r2c_1d(samples_per_segment, in, out, FFTW_ESTIMATE);
    plan_back = fftw_plan_dft_c2r_1d(samples_per_segment+1, in_back, out_back, FFTW_ESTIMATE);

    FANN::neural_net net;
    net.create_from_file("net.net");
    
    short outbuffer[(((length/2)/samples_per_segment)+1)*samples_per_segment];

    int sample_number = 0;
    while (sample_number < length/2) {
        for (int i = 0; i < samples_per_segment; i++) {
            if (sample_number+i >= length/2) {
                break;
            }
            in[i] = (double)input.m_data.PCM16[sample_number+i]/((double)(65536/2));
        }

        fftw_execute(plan);

        float *input_train = (float*)malloc(sizeof(float)*samples_per_segment/2+1);
        for (int i = 0; i < samples_per_segment/2+1; i++) {
            input_train[i] = out[0][i];
        }

        // Use neural net to translate voice
        std::cout << "outputting data\n";
        float *out_net = net.run(input_train);

        for (int i = 0; i < samples_per_segment/2+1; i++) {
            in_back[0][i] = out_net[i];
            in_back[1][i] = out_net[i+samples_per_segment/2+1];
            //in_back[0][i] = out[0][i];
            //in_back[1][i] = out[1][i];
        }

        memset(out_back, 0, sizeof(double)*samples_per_segment);
        fftw_execute(plan_back);

        for (int i = 0; i < samples_per_segment; i++) {
            outbuffer[sample_number+i] = ((out_back[i])/samples_per_segment)*(65536.f/2.f);
            //std::cout << ((out_back[i])/(float)samples_per_segment)*(65536.f/2.f)<<"\n";//outbuffer[sample_number+i] << "\n";
        }

        sample_number += samples_per_segment;
    }
    WAVFile output_wav(outbuffer, length, 16);
    output_wav.writeToFile("output.wav");
}
Ejemplo n.º 4
0
void trainingThread::train()
{
    std::stringstream log;
    log << std::endl << " test started." << std::endl;

    const float learning_rate = netConfigPTR->learning_rate ;
    const unsigned int num_layers = netConfigPTR->num_layers;
    const unsigned int num_input = netConfigPTR->num_input;
    const unsigned int num_hidden = netConfigPTR->num_hidden;
    const unsigned int num_output = netConfigPTR->num_output;
    const float desired_error = netConfigPTR->desired_error;
    const unsigned int max_iterations = netConfigPTR->max_iterations;
    const unsigned int iterations_between_reports = netConfigPTR->iterations_between_reports;

    log << std::endl << "Creating network." << std::endl;

    FANN::neural_net net;
    if (netConfigPTR->leyersVector.size() > 1)
    {
        unsigned int vectorSize = netConfigPTR->leyersVector.size();
        unsigned int* leyers = new unsigned int[vectorSize+2];
        leyers[0] = num_input;
        for (unsigned int i = 0; i < vectorSize; ++i)
        {
            leyers[i+1] = netConfigPTR->leyersVector.at(i);
        }

        leyers[num_layers-1] = num_output;

        for ( unsigned int i = 0 ; i< vectorSize+2 ; ++i)
        {
            qDebug() << "vector size: "<< vectorSize+2<<" i:"<<i<< " leyers "<< leyers[i];
        }
        net.create_standard_array(vectorSize+2, leyers);
        //net.create_standard(vectorSize+2, leyers[0], leyers[2],leyers[3], leyers[1]);


        delete[] leyers;
    }
    else
    {
        net.create_standard(num_layers, num_input, num_hidden, num_output);
    }

    net.set_learning_rate(learning_rate);

    net.set_activation_steepness_hidden(1.0);
    net.set_activation_steepness_output(1.0);

    net.set_activation_function_hidden(FANN::SIGMOID_SYMMETRIC_STEPWISE);
    net.set_activation_function_output(FANN::SIGMOID_SYMMETRIC_STEPWISE);

    // Set additional properties such as the training algorithm

    net.set_training_algorithm(netConfigPTR->trainingAlgo);

    // Output network type and parameters
    log << std::endl << "Network Type                         :  ";
    switch (net.get_network_type())
    {
    case FANN::LAYER:
        log << "LAYER" << std::endl;
        break;
    case FANN::SHORTCUT:
        log << "SHORTCUT" << std::endl;
        break;
    default:
        log << "UNKNOWN" << std::endl;
        break;
    }
    //net.print_parameters();

    log << std::endl << "Training network." << std::endl;

    FANN::training_data data;
    if (data.read_train_from_file(netConfigPTR->trainingDataPatch))
    {
        // Initialize and train the network with the data
        net.init_weights(data);

        log << "Max Epochs " << std::setw(8) << max_iterations << ". "
            << "Desired Error: " << std::left << desired_error << std::right << std::endl;
         emit updateLog(QString::fromStdString(log.str()));
        log << "dupa";
        log.str("");
        log.clear();
        net.set_callback(print_callback, nullptr);
        net.train_on_data(data, max_iterations,
                          iterations_between_reports, desired_error);

        log << std::endl << "Testing network." << std::endl;

        for (unsigned int i = 0; i < data.length_train_data(); ++i)
        {
            // Run the network on the test data
            fann_type *calc_out = net.run(data.get_input()[i]);

            log << "test (";
            for (unsigned int j = 0; j < num_input; ++j)
            {
                log  << std::showpos << data.get_input()[i][j] << ", ";
                //qDebug()<< "jestem w log<<";
            }
            log <<  ") -> " ;
            for(unsigned int k = 0 ; k < num_output ; ++k)
            {
                log << calc_out[k] <<", ";
            }
            log << ",\t should be ";
            for(unsigned int k = 0 ; k < num_output ; ++k)
            {
                log << data.get_output()[i][k] <<", ";
            }
            log << std::endl ;
        }

        log << std::endl << "Saving network." << std::endl;

        // Save the network in floating point and fixed point
        net.save(netConfigPTR->netFloat);
        unsigned int decimal_point = net.save_to_fixed(netConfigPTR->netFixed);
        std::string path = netConfigPTR->trainingDataPatch.substr(0,netConfigPTR->trainingDataPatch.size()-5);
        data.save_train_to_fixed(path +"_fixed.data", decimal_point);

        log << std::endl << "test completed." << std::endl;
        emit updateLog(QString::fromStdString(log.str()));
        emit updateProgressBar(100);
    }
}