void display_open(void) { switch(DisplayMode) { case DisplayReport: report_open(); break; case DisplayTXT: txt_open(); break; case DisplayXML: xml_open(); break; case DisplayJSON: json_open(); break; case DisplayCSV: csv_open(); break; case DisplayCurses: mtr_curses_open(); #ifdef IPINFO if (ipinfo_no >= 0) asn_open(); #endif break; case DisplaySplit: split_open(); break; case DisplayGTK: gtk_open(); break; } }
void display_open(void) { switch(DisplayMode) { case DisplayReport: report_open(); break; case DisplayTXT: txt_open(); break; case DisplayXML: xml_open(); break; case DisplayCSV: csv_open(); break; case DisplayCurses: mtr_curses_open(); break; case DisplaySplit: /* BL */ split_open(); break; case DisplayGTK: gtk_open(); break; } }
int main(int argc, char **argv) { const int maxThreads = omp_get_max_threads(); if (argc < 4) { fprintf(stderr, "Usage: bench <csv file> <input size> <num threads> [<num threads> ...]\n"); return -1; } FILE *const csvFile = csv_open(argv[1]); if (csvFile == NULL) { return -1; } const int len = safe_strtol(argv[2]); if (len < 1) { fprintf(stderr, "Input size must be positive\n"); return -1; } TYPE *nrs = random_array(len, time(NULL)); if (nrs == NULL) { return -1; } for (int i = 3; i < argc; i++) { int threads = safe_strtol(argv[i]); if (threads < 1) { threads = maxThreads; } omp_set_num_threads(threads); printf("%s. omp_get_max_threads() == %d\n", algorithm_name, threads); /* Bench the parallel implementation. */ double start = omp_get_wtime(); if (prefix_sums(nrs, len, NULL) != 0) { return -1; } double par_time = omp_get_wtime() - start; printf("elements: %d; par time: %f\n\n", len, par_time); fprintf(csvFile, "%s,%d,%d,%f\n", algorithm_name, threads, len, par_time); } free(nrs); csv_close(csvFile); return 0; }
/* ** This function is the implementation of both the xConnect and xCreate ** methods of the CSV virtual table. ** ** argv[0] -> module name ** argv[1] -> database name ** argv[2] -> table name ** argv[3] -> csv file name ** argv[4] -> custom delimiter ** argv[5] -> optional: use header row for column names ** ** TODO ** File encoding problem ** Column/Cell type (in declaration and in result) */ static int csvInit( sqlite3 *db, /* Database connection */ void *pAux, /* Unused */ int argc, const char *const*argv, /* Parameters to CREATE TABLE statement */ sqlite3_vtab **ppVtab, /* OUT: New virtual table */ char **pzErr, /* OUT: Error message, if any */ int isCreate /* True for xCreate, false for xConnect */ ){ int rc = SQLITE_OK; int i; CSV *pCSV; char *zSql; char cDelim = ','; /* Default col delimiter */ int bUseHeaderRow = 0; /* Default to not use zRow headers */ size_t nDb; /* Length of string argv[1] */ size_t nName; /* Length of string argv[2] */ size_t nFile; /* Length of string argv[3] */ CSVCursor csvCsr; /* Used for calling csvNext */ const char *aErrMsg[] = { 0, /* 0 */ "No CSV file specified", /* 1 */ "Error opening CSV file: '%s'", /* 2 */ "No columns found", /* 3 */ "No column name found", /* 4 */ "Out of memory", /* 5 */ }; UNUSED_PARAMETER(pAux); UNUSED_PARAMETER(isCreate); if( argc < 4 ){ *pzErr = sqlite3_mprintf("%s", aErrMsg[1]); return SQLITE_ERROR; } /* allocate space for the virtual table object */ nDb = strlen(argv[1]); nName = strlen(argv[2]); nFile = strlen(argv[3]); pCSV = (CSV *)sqlite3_malloc( (int)(sizeof(CSV)+nDb+nName+nFile+3) ); if( !pCSV ){ /* out of memory */ *pzErr = sqlite3_mprintf("%s", aErrMsg[5]); return SQLITE_NOMEM; } /* intialize virtual table object */ memset(pCSV, 0, sizeof(CSV)+nDb+nName+nFile+3); pCSV->db = db; pCSV->nBusy = 1; pCSV->base.pModule = &csvModule; pCSV->cDelim = cDelim; pCSV->zDb = (char *)&pCSV[1]; pCSV->zName = &pCSV->zDb[nDb+1]; pCSV->zFile = &pCSV->zName[nName+1]; memcpy(pCSV->zDb, argv[1], nDb); memcpy(pCSV->zName, argv[2], nName); /* pull out name of csv file (remove quotes) */ if( argv[3][0] == '\'' ){ memcpy( pCSV->zFile, argv[3]+1, nFile-2 ); pCSV->zFile[nFile-2] = '\0'; }else{ memcpy( pCSV->zFile, argv[3], nFile ); } /* if a custom delimiter specified, pull it out */ if( argc > 4 ){ if( argv[4][0] == '\'' ){ pCSV->cDelim = argv[4][1]; }else{ pCSV->cDelim = argv[4][0]; } } /* should the header zRow be used */ if( argc > 5 ){ if( !strcmp(argv[5], "USE_HEADER_ROW") ){ bUseHeaderRow = -1; } } /* open the source csv file */ pCSV->f = csv_open( pCSV ); if( !pCSV->f ){ *pzErr = sqlite3_mprintf(aErrMsg[2], pCSV->zFile); csvRelease( pCSV ); return SQLITE_ERROR; } /* Read first zRow to obtain column names/number */ csvCsr.base.pVtab = (sqlite3_vtab *)pCSV; rc = csvNext( (sqlite3_vtab_cursor *)&csvCsr ); if( (SQLITE_OK!=rc) || (pCSV->nCol<=0) ){ *pzErr = sqlite3_mprintf("%s", aErrMsg[3]); csvRelease( pCSV ); return SQLITE_ERROR; } if( bUseHeaderRow ){ pCSV->offsetFirstRow = csv_tell( pCSV ); } /* Create the underlying relational database schema. If ** that is successful, call sqlite3_declare_vtab() to configure ** the csv table schema. */ zSql = sqlite3_mprintf("CREATE TABLE x("); for(i=0; zSql && i<pCSV->nCol; i++){ const char *zTail = (i+1<pCSV->nCol) ? ", " : ");"; char *zTmp = zSql; if( bUseHeaderRow ){ const char *zCol = pCSV->aCols[i]; if( !zCol ){ *pzErr = sqlite3_mprintf("%s", aErrMsg[4]); sqlite3_free(zSql); csvRelease( pCSV ); return SQLITE_ERROR; } zSql = sqlite3_mprintf("%s\"%s\"%s", zTmp, zCol, zTail); // FIXME Column type (INT/REAL/TEXT) }else{ zSql = sqlite3_mprintf("%scol%d%s", zTmp, i+1, zTail); // FIXME Column type (INT/REAL/TEXT) } sqlite3_free(zTmp); } if( !zSql ){ *pzErr = sqlite3_mprintf("%s", aErrMsg[5]); csvRelease( pCSV ); return SQLITE_NOMEM; } rc = sqlite3_declare_vtab( db, zSql ); sqlite3_free(zSql); if( SQLITE_OK != rc ){ *pzErr = sqlite3_mprintf("%s", sqlite3_errmsg(db)); csvRelease( pCSV ); return SQLITE_ERROR; } *ppVtab = (sqlite3_vtab *)pCSV; *pzErr = NULL; return SQLITE_OK; }
int main(int argc, char **argv) { int ret = 0; MPI_Init(&argc, &argv); if (argc < 4) { fprintf(stderr, "Usage: bench <csv file> <input size> <input upper bound>\n"); ret = -1; goto out; } const int size = safe_strtol(argv[2]); if (size < 1) { fprintf(stderr, "Input size must be greater than 0\n"); ret = -1; goto out; } const int upper_bound = safe_strtol(argv[3]); if (size < 1) { fprintf(stderr, "Input upper bound must be greater than 0\n"); ret = -1; goto out; } int processes; MPI_Comm_size(MPI_COMM_WORLD, &processes); int rank; MPI_Comm_rank(MPI_COMM_WORLD, &rank); /* We need to generate the same array in each process. The master process * determines a seed and broadcasts it to all others. */ int seed = time(NULL); MPI_Bcast(&seed, 1, MPI_INT, MASTER, MPI_COMM_WORLD); TYPE *a = random_array(size, upper_bound, seed); DEBUG("%s. MPI_Comm_size %d, MPI_Comm_rank %d, seed %d\n", algorithm_name, processes, rank, seed); /* Everything is set up, start sorting and time how long it takes. */ MPI_Barrier(MPI_COMM_WORLD); double start = MPI_Wtime(); TYPE *c = bucket_sort(a, size, upper_bound); double end = MPI_Wtime(); double localElapsed = end - start; double totalElapsed; MPI_Reduce(&localElapsed, &totalElapsed, 1, MPI_DOUBLE, MPI_MAX, MASTER, MPI_COMM_WORLD); free(a); free(c); /* Only the master process (rank 0) outputs information. */ if (rank == MASTER) { printf("processes: %d, elements: %d; upper bound: %d; time: %f\n", processes, size, upper_bound, totalElapsed); /* Persist this run in our csv file. */ FILE *const csvFile = csv_open(argv[1]); if (csvFile == NULL) { return -1; } fprintf(csvFile, "%s,%d,%d,%f\n", algorithm_name, processes, size, totalElapsed); csv_close(csvFile); } out: MPI_Finalize(); return ret; }
static void add_generic_csv(meta_parameters *meta, const char *csv_file, int auto_close_polys) { int num_meta_cols, num_data_cols; csv_meta_column_t *meta_column_info; csv_data_column_t *data_column_info; if (!meta_supports_meta_get_latLon(meta)) { asfPrintStatus("No geolocation info - can't add CSV: %s\n", csv_file); return; } asfPrintStatus("Adding: %s\n", csv_file); if (!fileExists(csv_file)) { asfPrintWarning("File not found: %s\n", csv_file); return; } FILE *ifp = csv_open(csv_file, &num_meta_cols, &meta_column_info, &num_data_cols, &data_column_info); // csv_open() returns NULL if the file can't be processed if (!ifp) return; // this is just for debugging //csv_info(num_meta_cols, meta_column_info, num_data_cols, data_column_info); // start line counter at 1 (header line is not part of this loop) int i,line_num=1; int num = 0; char line[1024]; while (fgets(line, 1023, ifp)) { ++line_num; char **column_data; double *lats, *lons; int ok = csv_line_parse(line, line_num, num_meta_cols, meta_column_info, num_data_cols, data_column_info, &column_data, &lats, &lons); // csv_line_parse() will return FALSE when the line is invalid if (!ok) continue; ++num; csv_free(num_meta_cols, column_data, lats, lons); } FCLOSE(ifp); FREE(meta_column_info); meta_column_info = NULL; FREE(data_column_info); data_column_info = NULL; // Use line_num-1 so we do not count the header line asfPrintStatus("File had %d line%s, %d valid line%s.\n", line_num-1, line_num-1==1?"":"s", num, num==1?"":"s"); // free pre-existing loaded shapes if (g_shapes) free_shapes(); // set up global array of shapes num_shapes = num; g_shapes = MALLOC(sizeof(Shape*)*num_shapes); for (i=0; i<num_shapes; ++i) g_shapes[i] = NULL; // now open file again, this time we will actually process! ifp = csv_open(csv_file, &num_meta_cols, &meta_column_info, &num_data_cols, &data_column_info); assert(ifp); line_num = 1; num = 0; while (fgets(line, 1023, ifp)) { ++line_num; char **column_data; double *lats, *lons; int ok = csv_line_parse(line, line_num, num_meta_cols, meta_column_info, num_data_cols, data_column_info, &column_data, &lats, &lons); // csv_line_parse() will return FALSE when the line is invalid if (!ok) continue; // dealing with metadata Shape *s = MALLOC(sizeof(Shape)); s->num_meta_cols = num_meta_cols; s->meta_cols = meta_column_info; s->meta_info = MALLOC(sizeof(char*)*num_meta_cols); for (i=0; i<num_meta_cols; ++i) s->meta_info[i] = STRDUP(column_data[i]); // dealing with data s->num_points = num_data_cols; if (auto_close_polys) s->num_points++; s->lines = MALLOC(sizeof(double)*s->num_points); s->samps = MALLOC(sizeof(double)*s->num_points); for (i=0; i<num_data_cols; ++i) { double line, samp; meta_get_lineSamp(meta, lats[i], lons[i], 0, &line, &samp); s->lines[i] = line; s->samps[i] = samp; } if (auto_close_polys) { s->lines[num_data_cols] = s->lines[0]; s->samps[num_data_cols] = s->samps[0]; } // display info s->color_code = num%27 + 10; s->marker_code = 1; // square g_shapes[num] = s; ++num; csv_free(num_meta_cols, column_data, lats, lons); } FCLOSE(ifp); FREE(data_column_info); // do not free meta_column_info -- pointed to by g_shape now }