void test_network(Network* network){ char* input_line = malloc(50*sizeof(char)); Vector* testing_point_in = new_vec(DIMENSION_INPUT+1); Vector* testing_point_out = new_vec(DIMENSION_OUTPUT); size_t test_set_size = 0; while (scanf("%s\n", input_line) != EOF) { test_set_size++; #if REGRESSION sscanf(input_line, "%lf\n", &testing_point_in->scalars[0]); testing_point_in->scalars[0] = normalise_data(testing_point_in->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]); testing_point_in->scalars[DIMENSION_INPUT] = BIAS; testing_point_out = compute_output_network(network, testing_point_in); printf("%.7lf\n", denormalise_data(testing_point_out->scalars[0], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1])); #elif CLASSIFICATION sscanf(input_line, "%lf,%lf\n", &testing_point_in->scalars[0], &testing_point_in->scalars[1]); testing_point_in->scalars[DIMENSION_INPUT] = BIAS; testing_point_out = compute_output_network(network, testing_point_in); if (testing_point_out->scalars[0] >= 0) { printf("+1\n"); }else{ printf("-1\n"); } #endif delete_vec(testing_point_out); } delete_vec(testing_point_in); free(input_line); }
int main() { srand(time(NULL)); vector* v = new_vector(2, sizeof(int), compare_integer); vector* q = new_vector(2, sizeof(int), compare_integer); int **y = malloc(sizeof(int*)*8); int i; int predef[] = {1, 50, 12, 68, 3, 4, 78, 2}; for (i = 0; i < 8; i++) { y[i] = malloc(sizeof(int)); // *y[i] = rand()%1000; *y[i] = predef[i]; } int *z = malloc(sizeof(int)); *z = 888; add_all_vec(v, (void**) y, 8); add_vec(v, z); delete_vec(v, &y[0]); print_vector(v); return 0; }
double error_total(Network* network, Vector* training_data[], Vector* teaching_data[], size_t training_data_size){ double err_accumulator = 0.0; for (int i = 0; i < training_data_size; i++) { Vector* network_output = compute_output_network(network, training_data[i]); err_accumulator += error_function(teaching_data[i]->scalars[0], network_output->scalars[0]); delete_vec(network_output); } return err_accumulator; }
Vector* compute_output_network(Network* network, Vector* input){ // printf("Not-normalised input to network: "); // print_vector(input); Vector* prev_layer_output = compute_output_layer(network->input_layer, input); // printf("Normalised input to network: "); // print_vector(prev_layer_output); for (size_t layer_id = 0; layer_id < network->hidden_layers_count; layer_id++) { Vector* input_vector_for_next_layer = compute_output_layer(network->hidden_layers[layer_id], prev_layer_output); delete_vec(prev_layer_output); prev_layer_output = input_vector_for_next_layer; // printf("Output from %ld hidden layer: ", layer_id); // print_vector(prev_layer_output); } Vector* network_output = compute_output_layer(network->output_layer, prev_layer_output); // printf("Not-denormalised output from network: "); // print_vector(network_output); delete_vec(prev_layer_output); return network_output; }
void delete_layer(Layer* layer){ for (size_t neuron_id = 0; neuron_id < layer->size; neuron_id++) { if (layer->neurons[neuron_id] != NULL) { delete_neuron(layer->neurons[neuron_id]); } // free(layer->neurons[neuron_id]); } free(layer->neurons); delete_vec(layer->deltas); free(layer); }
Mat* create_look_at_mat(Vec* cam_pos, Vec* targ_pos, Vec* up) { Mat* p = create_translation_mat(-cam_pos->x, -cam_pos->y, -cam_pos->z); Vec* d = vec_minus_vec(targ_pos, cam_pos); Vec* f = normalize_vec(d); Vec* c1 = cross_vec(f, up); Vec* r = normalize_vec(c1); Vec* c2 = cross_vec(r, f); Vec* u = normalize_vec(c2); Mat* ori = identity_mat(); ori->m[0] = r->x; ori->m[4] = r->y; ori->m[8] = r->z; ori->m[1] = u->x; ori->m[5] = u->y; ori->m[9] = u->z; ori->m[2] = -f->x; ori->m[6] = -f->y; ori->m[10] = -f->z; Mat* ret = mat_times_mat(ori, p); delete_mat(p); delete_mat(ori); delete_vec(d); delete_vec(f); delete_vec(r); delete_vec(u); delete_vec(c1); delete_vec(c2); return ret; }
void train_network_with_backprop(Network* network, Vector* training_point, Vector* teaching_point){ Vector* network_output = compute_output_network(network, training_point); // printf("\t Network output: %.10lf\tTrue value: %lf\n", network_output->scalars[0], teaching_point->scalars[0]); update_parameters(network, teaching_point, network_output); delete_vec(network_output); }
size_t read_input(Vector* training_data[], Vector* teaching_data[]){ size_t input_size = 0; char* input_line = malloc(50*sizeof(char)); Vector* not_normalised_training[MAX_INPUT_LENGHT]; Vector* not_normalised_teaching[MAX_INPUT_LENGHT]; for (int i = 0; i < MAX_INPUT_LENGHT; i++) { not_normalised_training[i] = new_vec(DIMENSION_INPUT+1); // +bias not_normalised_teaching[i] = new_vec(DIMENSION_OUTPUT); } for (int i = 0; i < MAX_INPUT_LENGHT; i++) { if (scanf("%s\n", input_line) == EOF){ break; } if (!strncmp(input_line, TERMINATING_STR, (int)strlen(TERMINATING_STR)-1)) { break; } #if REGRESSION sscanf(input_line, "%lf,%lf\n", ¬_normalised_training[i]->scalars[0], ¬_normalised_teaching[i]->scalars[0]); not_normalised_training[i]->scalars[1] = BIAS; #elif CLASSIFICATION sscanf(input_line, "%lf,%lf,%lf\n", ¬_normalised_training[i]->scalars[0], ¬_normalised_training[i]->scalars[1], ¬_normalised_teaching[i]->scalars[0]); not_normalised_training[i]->scalars[2] = BIAS; #endif input_size++; } double min = FLOAT_MAX; double max = FLOAT_MIN; for (int i = 0; i < input_size; i++) { if (not_normalised_training[i]->scalars[0] > max) { max = not_normalised_training[i]->scalars[0]; } if (not_normalised_training[i]->scalars[0] < min) { min = not_normalised_training[i]->scalars[0]; } } MAX_VALUES_INPUT[0] = max; MIN_VALUES_INPUT[0] = min; #if REGRESSION min = FLOAT_MAX; max = FLOAT_MIN; for (int i = 0; i < input_size; i++) { if (not_normalised_teaching[i]->scalars[0] > max) { max = not_normalised_teaching[i]->scalars[0]; } if (not_normalised_teaching[i]->scalars[0] < min) { min = not_normalised_teaching[i]->scalars[0]; } } MAX_VALUES_INPUT[1] = max; MIN_VALUES_INPUT[1] = min; #elif CLASSIFICATION min = FLOAT_MAX; max = FLOAT_MIN; for (int i = 0; i < input_size; i++) { if (not_normalised_training[i]->scalars[1] > max) { max = not_normalised_training[i]->scalars[1]; } if (not_normalised_training[i]->scalars[1] < min) { min = not_normalised_training[i]->scalars[1]; } } MAX_VALUES_INPUT[1] = max; MIN_VALUES_INPUT[1] = min; #endif for (int i = 0; i < input_size; i++) { #if REGRESSION training_data[i]->scalars[0] = normalise_data(not_normalised_training[i]->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]); training_data[i]->scalars[1] = BIAS;//normalised_training->scalars[1]; teaching_data[i]->scalars[0] = normalise_data(not_normalised_teaching[i]->scalars[0], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1]); #elif CLASSIFICATION training_data[i]->scalars[0] = normalise_data(not_normalised_training[i]->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]); training_data[i]->scalars[1] = normalise_data(not_normalised_training[i]->scalars[1], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1]); training_data[i]->scalars[2] = BIAS;//normalised_training->scalars[2]; teaching_data[i]->scalars[0] = not_normalised_teaching[i]->scalars[0]; #endif } for (int i = 0; i < MAX_INPUT_LENGHT; i++) { delete_vec(not_normalised_training[i]); delete_vec(not_normalised_teaching[i]); } free(input_line); return input_size; }
// ------------------- MAIN ----------------- // // ------------------------------------------ // int main(){ srand((unsigned int)time(NULL)); Vector* training_data[MAX_INPUT_LENGHT]; Vector* teaching_data[MAX_INPUT_LENGHT]; for (int i = 0; i < MAX_INPUT_LENGHT; i++) { training_data[i] = new_vec(DIMENSION_INPUT+1); teaching_data[i] = new_vec(DIMENSION_OUTPUT); } size_t TRAINING_SET_SIZE = 0; TRAINING_SET_SIZE = read_input(training_data, teaching_data); // in_layer, out_layer, hid_layer_count, hid_layers Network* network = new_network(DIMENSION_INPUT, DIMENSION_OUTPUT, 2, 4, 4); // print_network(network); Vector*** best_weights = malloc((network->hidden_layers_count+1) * sizeof(Vector**)); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { best_weights[layer] = malloc(network->hidden_layers[layer]->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { best_weights[layer][neuron_id] = new_vec(network->hidden_layers[layer]->neurons[neuron_id]->weights->length); } } best_weights[network->hidden_layers_count] = malloc(network->output_layer->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { best_weights[network->hidden_layers_count][neuron_id] = new_vec(network->output_layer->neurons[neuron_id]->weights->length); } time_t time_at_beginning = time(0); double total_error_old = FLOAT_MAX; double total_error = 1.0; double minimum_error_achieved = FLOAT_MAX; double epsilon = 0.0001; size_t epoch_count = 0; while ((time(0) - time_at_beginning) < 30 && (total_error = error_total(network, training_data, teaching_data, TRAINING_SET_SIZE)) > epsilon) { if (minimum_error_achieved > total_error) { minimum_error_achieved = total_error; dump_weights(network, best_weights); // print_detailed_layer(network->hidden_layers[1]); } for (size_t i = 0; i < TRAINING_SET_SIZE; i++) { train_network_with_backprop(network, training_data[i], teaching_data[i]); } if (epoch_count % 1000 == 0) { // printf("Epochs count: %ld\n",epoch_count); if (fabs(total_error - total_error_old) < 0.001) { // printf("Shaking Weights!\n"); shake_weights(network); } total_error_old = total_error; // printf("Total error: %.15lf\n", total_error); } update_learning_rate(network, ++epoch_count); scramble_data(training_data, teaching_data, TRAINING_SET_SIZE); } // printf("Network training finished with a total error: %.15lf\n", total_error); // printf("Network training achieved a minimum total error: %.15lf\n", minimum_error_achieved); // print_detailed_layer(network->hidden_layers[1]); load_weights(network, best_weights); // print_detailed_layer(network->input_layer); // print_detailed_layer(network->hidden_layers[0]); // print_detailed_layer(network->hidden_layers[1]); // print_detailed_layer(network->output_layer); test_network(network); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { delete_vec(best_weights[layer][neuron_id]); } } for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { delete_vec(best_weights[network->hidden_layers_count][neuron_id]); } delete_network(network); for (int i = 0; i < MAX_INPUT_LENGHT; i++) { delete_vec(training_data[i]); delete_vec(teaching_data[i]); } return EXIT_SUCCESS; }