void reverse_step (int start_grid[][24], int stop_grid[][24], int vote_case[], double probability_case[]) { int i, j, n, idx; //int count_ambiguity[4] = {0, 0, 0, 0}; FLIP_STACK fs = {.size = 0}; for (i = 2; i < 22; ++i) { for (j = 2; j < 22; ++j) { idx = get_pattern_idx(stop_grid, i, j); start_grid[i][j] = vote_case[idx]; //if ((0.15 < probability_case[idx]) && (probability_case[idx] < 0.85)) ++count_ambiguity[0]; //if ((0.3 < probability_case[idx]) && (probability_case[idx] < 0.7)) ++count_ambiguity[1]; //if ((0.4 < probability_case[idx]) && (probability_case[idx] < 0.6)) ++count_ambiguity[2]; //if ((0.45 < probability_case[idx]) && (probability_case[idx] < 0.55)) ++count_ambiguity[3]; // put all the points with median probability into a stack if (fabs(0.5-probability_case[idx]) < 0.125) push_flip(&fs, i, j, fabs(0.5-probability_case[idx])); } } //printf("%d,%d,%d,%d,%d,\n", count_1s_grid(stop_grid), // count_ambiguity[0], count_ambiguity[1], count_ambiguity[2], count_ambiguity[3]); //printf("%d %d %f\n", imax, jmax, probability_max); quicksort_flips(fs.items, fs.size); // sort the stack FLIP_POINT ijp; double score_start, score_flip; for (n = fs.size; n > 0; --n) { score_start = difference_forward(start_grid, stop_grid); ijp = pop_flip(&fs); flip_point(ijp.i, ijp.j, start_grid); score_flip = difference_forward(start_grid, stop_grid); if (score_start <= score_flip) flip_point(ijp.i, ijp.j, start_grid); } } //////////////////////////////////////////////////////////////////////// int count_case_0[5][2097152], count_case_1[5][2097152]; // 2097152 = pow(2, 21) int vote_case[5][2097152]; double probability_case[5][2097152]; int main () { // utility parameters int n, i; clean_array((int*)count_case_0, 2097152*5); clean_array((int*)count_case_1, 2097152*5); // read the train.csv data FILE *train; train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line int id, delta, initial_grid[24][24], start_grid[24][24], stop_grid[24][24]; clean_array((int*)initial_grid, 24*24); clean_array((int*)start_grid, 24*24); clean_array((int*)stop_grid, 24*24); // read all the train set and make statistical table for them for (n = 0; n < 50000; ++n) { read_train(train, &id, &delta, (int*)start_grid); for (i = 0; i < delta; ++i) { conway_step(start_grid, stop_grid); vote_step(start_grid, stop_grid, count_case_0[i], count_case_1[i]); copy_grid(start_grid, stop_grid); } } // vote for statistical table for (i = 0; i < 5; ++i) { for (n = 0; n < 2097152; ++n) { if (count_case_0[i][n] < count_case_1[i][n]) vote_case[i][n] = 1; else vote_case[i][n] = 0; if (count_case_0[i][n]+count_case_1[i][n] == 0) probability_case[i][n] = 0; // for the patterns happen really less frequently, we assume the original center is definitely 0. else probability_case[i][n] = (double)count_case_1[i][n] / (double)(count_case_0[i][n]+count_case_1[i][n]); } } //------------------------------------------------------------------ // check prediction accuracy ///* train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *pred; pred = fopen ("pred.csv", "w"); fprintf(pred, "id,"); for (n = 1; n < 401; ++n) fprintf(pred, "start.%d,", n); for (n = 1; n < 400; ++n) fprintf(pred, "diff.%d,", n); fprintf(pred, "diff.400\n"); double difference_all = 0; for (n = 0; n < 50000; ++n) { read_train (train, &id, &delta, (int*)initial_grid); copy_grid(start_grid, initial_grid); for (i = 0; i < delta; ++i) { //for (i = 0; i < 1; ++i) { conway_step(start_grid, stop_grid); copy_grid(start_grid, stop_grid); } for (i = delta-1; i >= 0; --i) { //for (i = 0; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i], probability_case[i]); copy_grid(stop_grid, start_grid); } write_pred(pred, id, initial_grid, start_grid); difference_all += difference_grids(initial_grid, start_grid); } printf("training set score: %f\n", difference_all/50000); fclose(pred); //*/ fclose(train); //------------------------------------------------------------------ // make submission file /* FILE *test; test = fopen ("test.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *submission; submission = fopen ("submission.csv", "w"); fprintf(submission, "id,"); for (n = 1; n < 400; ++n) fprintf(submission, "start.%d,", n); fprintf(submission, "start.400\n"); for (n = 0; n < 50000; ++n) { read_test(test, &id, &delta, stop_grid); for (i = delta-1; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i], probability_case[i]); copy_grid(stop_grid, start_grid); } write_submission(submission, id, start_grid); } fclose(test); fclose(submission); */ return 0; // goes the reverse check (see everything after vote_case in reverse_step). // however, the imporvement seems really tiny. // for the cutoff of points which need to be checked reversely, // I use all the predicted data (50000+50000) and got the following table. // threshold | score // 0 | 0.128226 // 0.05 | 0.127253 // 0.1 | 0.126986 // 0.12 | 0.126851 // 0.125 | 0.126803 <-- // 0.127 | 0.126826 // 0.13 | 0.126824 // 0.135 | 0.126889 // 0.14 | 0.126914 // 0.15 | 0.127000 // so I use 0.127 (should use 0.125) to predict the test set. // predicted set score: 0.12957 (-0.0003) // the improvement is even smaller than I thought, I think it should be around 0.0014. // :-( // probably the imporvement of the training set, is because we use the training set // itself to calculate the scores, which gives bias. //--------------------- // then use 0.125, and set the probably to be 0.5 if the pattern never appear. // predicted set score: 0.13034 (+0.0004) -- even go worse~~ //--------------------- // if ruling out the unfrequent patterns, it doesn't seem help. // 0.125 | 0.127049 (rule out < 2) // 0.125 | 0.127095 (rule out < 4) }
void reverse_step (int start_grid[][24], int stop_grid[][24], int vote_case[]) { int i, j, n, idx; FLIP_STACK fs = {.size = 0}; for (i = 2; i < 22; ++i) { for (j = 2; j < 22; ++j) { idx = get_pattern_idx(stop_grid, i, j); start_grid[i][j] = vote_case[idx]%2; if (vote_case[idx] >= 2) push_flip(&fs, i, j); } } FLIP_POINT ijp; double score_start, score_flip; for (n = fs.size; n > 0; --n) { score_start = difference_forward(start_grid, stop_grid); ijp = pop_flip(&fs); flip_point(ijp.i, ijp.j, start_grid); score_flip = difference_forward(start_grid, stop_grid); if (score_start <= score_flip) flip_point(ijp.i, ijp.j, start_grid); } } //////////////////////////////////////////////////////////////////////// unsigned int count_case_0[5][2097152], count_case_1[5][2097152]; // 2097152 = pow(2, 21) int vote_case[5][2097152]; //double probability_case[5][2097152]; int main () { // utility parameters int n, i; clean_array((int*)count_case_0, 2097152*5); clean_array((int*)count_case_1, 2097152*5); // read the train.csv data FILE *train; train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line int id, delta, initial_grid[24][24], start_grid[24][24], stop_grid[24][24]; clean_array((int*)initial_grid, 24*24); clean_array((int*)start_grid, 24*24); clean_array((int*)stop_grid, 24*24); // read all the train set and make statistical table for them for (n = 0; n < 50000; ++n) { read_train(train, &id, &delta, (int*)start_grid); for (i = 0; i < delta; ++i) { conway_step(start_grid, stop_grid); vote_step(start_grid, stop_grid, count_case_0[i], count_case_1[i]); copy_grid(start_grid, stop_grid); } } // vote for statistical table for (i = 0; i < 5; ++i) { for (n = 0; n < 2097152; ++n) { vote_case[i][n] = vote_pattern(n, count_case_0[i][n], count_case_1[i][n], i); //if (count_case_0[i][n]+count_case_1[i][n] == 0) probability_case[i][n] = 0; //else probability_case[i][n] = (double)count_case_1[i][n] / (double)(count_case_0[i][n]+count_case_1[i][n]); } } //------------------------------------------------------------------ // check prediction accuracy /* train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *pred; pred = fopen ("pred.csv", "w"); fprintf(pred, "id,"); for (n = 1; n < 401; ++n) fprintf(pred, "start.%d,", n); for (n = 1; n < 400; ++n) fprintf(pred, "diff.%d,", n); fprintf(pred, "diff.400\n"); double difference_all = 0; for (n = 0; n < 50000; ++n) { read_train (train, &id, &delta, (int*)initial_grid); copy_grid(start_grid, initial_grid); for (i = 0; i < delta; ++i) { conway_step(start_grid, stop_grid); copy_grid(start_grid, stop_grid); } for (i = delta-1; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i]); copy_grid(stop_grid, start_grid); } write_pred(pred, id, initial_grid, start_grid); difference_all += difference_grids(initial_grid, start_grid); } printf("training set score: %f\n", difference_all/50000); fclose(pred); */ fclose(train); //------------------------------------------------------------------ // make submission file ///* FILE *test; test = fopen ("test.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *submission; submission = fopen ("submission.csv", "w"); fprintf(submission, "id,"); for (n = 1; n < 400; ++n) fprintf(submission, "start.%d,", n); fprintf(submission, "start.400\n"); for (n = 0; n < 50000; ++n) { read_test(test, &id, &delta, stop_grid); for (i = delta-1; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i]); copy_grid(stop_grid, start_grid); } write_submission(submission, id, start_grid); } fclose(test); fclose(submission); //*/ return 0; // training set score: 0.124948 (-0.00054) // real score: 0.12685 (-0.00002) // actual improvement is really tiny compare to the training set one. }
int pPathfind(struct game *g, int checkonly) { int i = 0, x, y; struct queue_t *queue; struct grid_t *grid; printf("Copying grid\n"); grid = copy_grid(g); printf("Allocating queue\n"); queue = alloc_queue(grid); /* printf("Have we crashed yet?\n"); for(i=0;i< (signed int)grid->size;i++) { if ( i % grid->w == 0 ) printf("\n"); switch(grid->ar[i]) { case 0: printf("%3d",grid->path[i]); break; case 1: printf("###"); break; case 2: printf("XXX"); break; case 254: printf("SSS"); break; case 255: printf("EEE"); break; } } printf("\n\n");*/ for(i=0; i < (signed int)grid->size; i++) { if (grid->ar[i] == 255) { push(queue, grid, i, 1); } } while (grid->marked[HEAD(queue).id] != 0) { checkfield(queue, grid); queue->head++; if ( queue->head >= (signed int)grid->size ) break; // printf("Gonna check: %d (%d)\n", HEAD(queue).id, queue->head); } /* for(i=0;i< (signed int)grid->size;i++) { if ( i % grid->w == 0 ) printf("\n"); switch(grid->ar[i]) { case 0: printf("%3d",grid->path[i]); break; case 1: printf("###"); break; case 2: printf("XXX"); break; case 254: printf("SSS"); break; case 255: printf("EEE"); break; } } printf("\n\n");*/ if ( checkonly ) { int ret = 0; // puts("We need to check if any of the entrances are blocked."); for(i=0;i<g->startN;i++) { if ( grid->path[g->start[i][0]+(g->start[i][1]*G_WIDTH)] > 0 ) { continue; } // printf("Start position no: %d, cell %d x %d, is value %d\n", i, g->start[i][0], g->start[i][1]*G_WIDTH, grid->path[g->start[i][0]+(g->start[i][1]*G_WIDTH)]); ret++; } free_queue(queue); free_grid(grid); return ret; } i = 0; for(y=0; y < G_HEIGHT; y++) { for(x=0; x < G_WIDTH; x++) { g->path[y][x] = grid->path[i]; i++; } } free_queue(queue); free_grid(grid); return 0; }
int main () { // utility parameters int n, i; clean_array((int*)count_case_0, 2097152*5); clean_array((int*)count_case_1, 2097152*5); // read the train.csv data FILE *train; train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line int id, delta, initial_grid[24][24], start_grid[24][24], stop_grid[24][24]; clean_array((int*)initial_grid, 24*24); clean_array((int*)start_grid, 24*24); clean_array((int*)stop_grid, 24*24); // read all the train set and make statistical table for them for (n = 0; n < 50000; ++n) { read_train(train, &id, &delta, (int*)start_grid); for (i = 0; i < delta; ++i) { conway_step(start_grid, stop_grid); vote_step(start_grid, stop_grid, count_case_0[i], count_case_1[i]); copy_grid(start_grid, stop_grid); } } // vote for statistical table for (i = 0; i < 5; ++i) { for (n = 0; n < 2097152; ++n) vote_case[i][n] = vote_pattern(n, count_case_0[i][n], count_case_1[i][n], i); } //------------------------------------------------------------------ // check prediction accuracy ///* train = fopen ("train.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *pred; pred = fopen ("pred.csv", "w"); fprintf(pred, "id,"); for (n = 1; n < 401; ++n) fprintf(pred, "start.%d,", n); for (n = 1; n < 400; ++n) fprintf(pred, "diff.%d,", n); fprintf(pred, "diff.400\n"); double difference_all = 0; for (n = 0; n < 50000; ++n) { read_train (train, &id, &delta, (int*)initial_grid); copy_grid(start_grid, initial_grid); for (i = 0; i < delta; ++i) { conway_step(start_grid, stop_grid); copy_grid(start_grid, stop_grid); } for (i = delta-1; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i]); copy_grid(stop_grid, start_grid); } write_pred(pred, id, initial_grid, start_grid); difference_all += difference_grid(initial_grid, start_grid); } printf("training set score: %f\n", difference_all/50000); fclose(pred); //*/ fclose(train); //------------------------------------------------------------------ // make submission file ///* FILE *test; test = fopen ("test.csv", "r"); while (fgetc(train) != '\n') ; // skip the head line FILE *submission; submission = fopen ("submission.csv", "w"); fprintf(submission, "id,"); for (n = 1; n < 400; ++n) fprintf(submission, "start.%d,", n); fprintf(submission, "start.400\n"); for (n = 0; n < 50000; ++n) { read_test(test, &id, &delta, stop_grid); for (i = delta-1; i >= 0; --i) { reverse_step (start_grid, stop_grid, vote_case[i]); copy_grid(stop_grid, start_grid); } write_submission(submission, id, start_grid); } fclose(test); fclose(submission); //*/ return 0; // training set score: 0.12549 // real score: 0.12687 // I also tried set up vote_case table cutoff based on different delta number. // it may helps (the entire table parameter need a long time to be fixed), // but a naive setup of the final step (5->6) to 0.5 makes everything goes worse. }
int main ( int argc, char ** argv ) { // Parse command line options // int use_gpu = 1; int use_interop = 0; for(int i = 0; i < argc && argv; i++) { if(!argv[i]) continue; if(strstr(argv[i], "cpu")) use_gpu = 0; else if(strstr(argv[i], "gpu")) use_gpu = 1; else if(strstr(argv[i], "interop")) use_interop = 1; } printf("Parameter detect %s device (%s)\n",use_gpu==1?"GPU":"CPU",use_interop==1?"Share OpenGL":"Not Sharing OpenGL"); OPENCL_SHARE_WITH_OPENGL = use_interop; //testCG(); win_x = 512; win_y = 512; glutInit ( &argc, argv ); open_glut_window (); //test_opencl_opengl_interop(); dt = 0.1f; force = 10.0f; source = 10.0f; printf ( "\n\nHow to use this demo:\n\n" ); printf ( "\t Add densities with the left mouse button\n" ); printf ( "\t Add velocities with the left mouse button and dragging the mouse\n" ); printf ( "\t Toggle density/velocity display with the 'v' key\n" ); printf ( "\t Clear the simulation by pressing the 'x' key\n" ); printf ( "\t switch poisson solvers from jacobi to conjugate gradient by pressing the 'c' key\n" ); printf ( "\t switch advection scheme from RK2 to MacCormack by pressing the 'm' key\n" ); printf ( "\t toggle vorticity confinement by pressing the 'o' key\n" ); printf ( "\t Quit by pressing the 'q' key\n" ); dvel = 0; step = 0; maccormack = 0; vorticity = 0; useCG = 0; if ( !allocate_data () ) exit ( 1 ); clear_data (); //setupMatrix(g_laplacian_matrix); // FOR_EACH_FACE // { // //if(i < NX - NX*0.4 && i > NX*0.4 // // && // // j < NY - NY*0.4 && j > NY*0.4 ) // { // g_u_prev[IX(i,j,0)] = -0.01 * cosf(3.14159 * 2.0 * i/NX); // g_v_prev[IX(i,j,0)] = 0.01 * sinf(3.14159 * 2.0 * j/NY); // } // } #if RUN_TIMINGS runTimings(use_gpu); exit(0); #endif copy_grid(g_u_prev, g_u); copy_grid(g_v_prev, g_v); g_dens_prev[IX(16,3,0)] = 10.0f; //g_u_prev[IX(16,3,0)] = 10.0f; /* calculate_divergence(g_divergence, g_u_prev, g_v_prev, g_w_prev, dt); pressure_solve(g_pressure,g_pressure_prev, g_divergence, dt); pressure_apply(g_u_prev, g_v_prev, g_w_prev, g_pressure, dt); //project(dt,g_u_prev,g_v_prev, g_w_prev, g_divergence, g_pressure, g_pressure_prev); SWAP(g_u_prev,g_u); SWAP(g_v_prev,g_v); SWAP(g_w_prev,g_w); if(!check_divergence(g_u_prev, g_v_prev, g_w_prev)) { printf("Initial field wasn't divergence free!\n"); } */ //print_platforms_devices(); // run_opencl_test(use_gpu); // run_tests(); #if USE_OPENCL init_opencl(use_gpu); load_cl_kernels(&clData); allocate_cl_buffers(&clData); transfer_buffers_to_gpu(); flush_cl_queue(); #endif glutMainLoop (); #if USE_OPENCL cleanup_cl(&clData); #endif exit ( 0 ); }