TEST(gogameab_basic_check, simple_ab_uniform) { uint8_t board_size = 5; int depth = 1; GoGame test_game(board_size); GoGameNN test_network(board_size, true); test_game.generate_moves(0); double best_move_value, temp_best_move_value = 0; GoMove best_move(test_game.get_board()); best_move_value = -std::numeric_limits<double>::infinity(); // For each possible move, calculate Alpha Beta for (const GoMove &element : test_game.get_move_list()) { GoGame temp_game(test_game); temp_game.make_move(element, 0); temp_best_move_value = scalable_go_ab_prune(test_network, temp_game, depth, -std::numeric_limits<double>::infinity(), std::numeric_limits<double>::infinity(), 1, false, 0); if (temp_best_move_value > best_move_value) { best_move_value = temp_best_move_value; best_move = element; } } EXPECT_NO_THROW(test_game.make_move(best_move, 0)); }
int main(int argc, char *argv[]) { char cmd[CMD_SIZE]; double test_in[IN]; int i; printf("********** Bpnetwork Console **********\n"); while (TRUE) { scanf("%s", cmd); if (!strcmp(cmd, "help")) { printf("read read neuron\n"); printf("train train network\n"); printf("test test network\n"); printf("exit exit program\n"); } else if (!strcmp(cmd, "read")) { read_neuron(); } else if (!strcmp(cmd, "train")) { read_data(); init_bpnetwork(); ga_interface(); train_network(); write_neuron(); } else if (!strcmp(cmd, "test")) { printf("input test data\n"); for (i = 0; i < IN; i++) { scanf("%lf", test_in + i); } test_network(test_in); } else if (!strcmp(cmd, "exit")) { break; } } return 0; }
void tcpcryptd(void) { _state.s_divert = divert_open(_conf.cf_port, packet_handler); open_unix(); drop_privs(); printf("Running\n"); if (!_conf.cf_disable && !_conf.cf_disable_network_test) test_network(); while (1) do_cycle(); }
static int do_test (void) { /* setdb ("db"); */ test_hosts (); test_network (); test_protocols (); test_services (); if (error_count) printf ("\n %d errors occurred!\n", error_count); else printf ("No visible errors occurred!\n"); return (error_count != 0); }
int main(int argc, char **argv) { int i, j, result = 0; // process flags for -v verbose, -h help for (i = 1; i < argc; i++) { if (argv[i] && argv[i][0] == '-') { int len = strlen(argv[i]); for (j = 1; j < len; j++) { switch (argv[i][j]) { case 'h': printf("testnetwork.c: possible arguments " "-q quiet (suppress output), " "-h help\n"); return 1; break; case 'q': verbose = 0; break; default: break; } } } } result = test_network(); if (result) { printf("Test FAILED.\n"); return 1; } printf("Test PASSED.\n"); return 0; }
// ------------------- MAIN ----------------- // // ------------------------------------------ // int main(){ srand((unsigned int)time(NULL)); Vector* training_data[MAX_INPUT_LENGHT]; Vector* teaching_data[MAX_INPUT_LENGHT]; for (int i = 0; i < MAX_INPUT_LENGHT; i++) { training_data[i] = new_vec(DIMENSION_INPUT+1); teaching_data[i] = new_vec(DIMENSION_OUTPUT); } size_t TRAINING_SET_SIZE = 0; TRAINING_SET_SIZE = read_input(training_data, teaching_data); // in_layer, out_layer, hid_layer_count, hid_layers Network* network = new_network(DIMENSION_INPUT, DIMENSION_OUTPUT, 2, 4, 4); // print_network(network); Vector*** best_weights = malloc((network->hidden_layers_count+1) * sizeof(Vector**)); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { best_weights[layer] = malloc(network->hidden_layers[layer]->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { best_weights[layer][neuron_id] = new_vec(network->hidden_layers[layer]->neurons[neuron_id]->weights->length); } } best_weights[network->hidden_layers_count] = malloc(network->output_layer->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { best_weights[network->hidden_layers_count][neuron_id] = new_vec(network->output_layer->neurons[neuron_id]->weights->length); } time_t time_at_beginning = time(0); double total_error_old = FLOAT_MAX; double total_error = 1.0; double minimum_error_achieved = FLOAT_MAX; double epsilon = 0.0001; size_t epoch_count = 0; while ((time(0) - time_at_beginning) < 30 && (total_error = error_total(network, training_data, teaching_data, TRAINING_SET_SIZE)) > epsilon) { if (minimum_error_achieved > total_error) { minimum_error_achieved = total_error; dump_weights(network, best_weights); // print_detailed_layer(network->hidden_layers[1]); } for (size_t i = 0; i < TRAINING_SET_SIZE; i++) { train_network_with_backprop(network, training_data[i], teaching_data[i]); } if (epoch_count % 1000 == 0) { // printf("Epochs count: %ld\n",epoch_count); if (fabs(total_error - total_error_old) < 0.001) { // printf("Shaking Weights!\n"); shake_weights(network); } total_error_old = total_error; // printf("Total error: %.15lf\n", total_error); } update_learning_rate(network, ++epoch_count); scramble_data(training_data, teaching_data, TRAINING_SET_SIZE); } // printf("Network training finished with a total error: %.15lf\n", total_error); // printf("Network training achieved a minimum total error: %.15lf\n", minimum_error_achieved); // print_detailed_layer(network->hidden_layers[1]); load_weights(network, best_weights); // print_detailed_layer(network->input_layer); // print_detailed_layer(network->hidden_layers[0]); // print_detailed_layer(network->hidden_layers[1]); // print_detailed_layer(network->output_layer); test_network(network); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { delete_vec(best_weights[layer][neuron_id]); } } for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { delete_vec(best_weights[network->hidden_layers_count][neuron_id]); } delete_network(network); for (int i = 0; i < MAX_INPUT_LENGHT; i++) { delete_vec(training_data[i]); delete_vec(teaching_data[i]); } return EXIT_SUCCESS; }
static void retest_network(void* ignored) { _conf.cf_disable = 0; test_network(); }