Exemplo n.º 1
0
struct network BA_algorithm (int init_node_nb, int incr_edge_nb, int total){
    int i, k, done;
    struct node * nodeptr;
    struct network n;
    n = new_network(init_node_nb);
    for(i = 0; i <= init_node_nb - 1; i ++){
        add_default_node(&n);
        if (i) add_edge(&n.nodes[i - 1], &n.nodes[i]);
    }
    for(i = init_node_nb; i <= total - 1; i++){
        add_default_node(&n);
        for (k = 0; k <= incr_edge_nb - 1; k++){
            done = 0;
            do {
                nodeptr = &n.nodes[(int)(rand() / (double)RAND_MAX * (n.node_nb - 1))];
                if ((rand() / (double)RAND_MAX < nodeptr->adj_node_nb / (double) n.node_nb) && nodeptr->id != i) { //Nodes are going to be linked
                        add_edge(&n.nodes[i], nodeptr);
                        done = 1;
                    }
            } while (!done);
        }
    }
    return n;
}
Exemplo n.º 2
0
// ------------------- MAIN ----------------- //
// ------------------------------------------ //
int main(){
    srand((unsigned int)time(NULL));
    
    Vector* training_data[MAX_INPUT_LENGHT];
    Vector* teaching_data[MAX_INPUT_LENGHT];
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        training_data[i] = new_vec(DIMENSION_INPUT+1);
        teaching_data[i] = new_vec(DIMENSION_OUTPUT);
    }
    size_t TRAINING_SET_SIZE = 0;
    TRAINING_SET_SIZE = read_input(training_data, teaching_data);
    
    // in_layer, out_layer, hid_layer_count, hid_layers
    Network* network = new_network(DIMENSION_INPUT, DIMENSION_OUTPUT, 2, 4, 4);
//    print_network(network);
    
    Vector*** best_weights = malloc((network->hidden_layers_count+1) * sizeof(Vector**));
    for (size_t layer = 0; layer < network->hidden_layers_count; layer++) {
        best_weights[layer] = malloc(network->hidden_layers[layer]->size * sizeof(Vector*));
        for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) {
            best_weights[layer][neuron_id] = new_vec(network->hidden_layers[layer]->neurons[neuron_id]->weights->length);
        }
    }
    best_weights[network->hidden_layers_count] = malloc(network->output_layer->size * sizeof(Vector*));
    for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) {
        best_weights[network->hidden_layers_count][neuron_id] = new_vec(network->output_layer->neurons[neuron_id]->weights->length);
    }
    
    time_t time_at_beginning = time(0);
    
    double total_error_old = FLOAT_MAX;
    double total_error = 1.0;
    double minimum_error_achieved = FLOAT_MAX;
    double epsilon = 0.0001;
    size_t epoch_count = 0;
    
    while ((time(0) - time_at_beginning) < 30 && (total_error = error_total(network, training_data, teaching_data, TRAINING_SET_SIZE)) > epsilon) {
        if (minimum_error_achieved > total_error) {
            minimum_error_achieved = total_error;
            dump_weights(network, best_weights);
//            print_detailed_layer(network->hidden_layers[1]);
        }
        for (size_t i = 0; i < TRAINING_SET_SIZE; i++) {
            train_network_with_backprop(network, training_data[i], teaching_data[i]);
        }
        
        if (epoch_count % 1000 == 0) {
            
//            printf("Epochs count: %ld\n",epoch_count);
            if (fabs(total_error - total_error_old) < 0.001) {
//                printf("Shaking Weights!\n");
                shake_weights(network);
            }
            total_error_old = total_error;
//            printf("Total error: %.15lf\n", total_error);
        }
        update_learning_rate(network, ++epoch_count);
        scramble_data(training_data, teaching_data, TRAINING_SET_SIZE);
    }
    
//    printf("Network training finished with a total error: %.15lf\n", total_error);
//    printf("Network training achieved a minimum total error: %.15lf\n", minimum_error_achieved);
//    print_detailed_layer(network->hidden_layers[1]);
    load_weights(network, best_weights);
//    print_detailed_layer(network->input_layer);
//    print_detailed_layer(network->hidden_layers[0]);
//    print_detailed_layer(network->hidden_layers[1]);
//    print_detailed_layer(network->output_layer);
    test_network(network);
    
    for (size_t layer = 0; layer < network->hidden_layers_count; layer++) {
        for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) {
            delete_vec(best_weights[layer][neuron_id]);
        }
    }
    for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) {
        delete_vec(best_weights[network->hidden_layers_count][neuron_id]);
    }
    
    delete_network(network);
    
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        delete_vec(training_data[i]);
        delete_vec(teaching_data[i]);
    }
    
    
    return EXIT_SUCCESS;
}