コード例 #1
0
int main() {
  create_dataset(0, 16, 16, 16);
  create_dataset(1, 64, 64, 64);
  create_dataset(2, 64, 128, 64);
  create_dataset(3, 112, 48, 16);
  create_dataset(4, 84, 84, 84);
  create_dataset(5, 80, 99, 128);
  create_dataset(6, 67, 53, 64);
  create_dataset(7, 29, 117, 85);
  create_dataset(8, 191, 19, 241);
  return 0;
}
コード例 #2
0
bool prepare_query(const char* entry) {
  position = 0;
  auto parsed_entry = parseEntry(entry);
  std::vector<std::vector<bool>> queryset;
  queryset.push_back(parsed_entry);
  if (querypoint != nullptr)
      delete[] querypoint;
  if (numres != nullptr) {
      delete numres[0];
      delete[] numres;
  }
  if (results != nullptr) {
      delete[] results[0];
      delete[] results;
  }
  results = new UINT32*[1];
  numres = new UINT32*[1];
  numres[0] = new UINT32[B + 1];
  querypoint = create_dataset(queryset);
  if (r > 0) {
      UINT8* new_query = new UINT8[B/8];
      reorder(new_query, querypoint, 1, B, order);
      delete[] querypoint;
      querypoint = new_query;
  }
  return true;
}
コード例 #3
0
ファイル: HDF5DataWriter.cpp プロジェクト: csiki/MOOSE
/**
   Traverse the path of an object in HDF5 file, checking existence of
   groups in the path and creating them if required.  */
hid_t HDF5DataWriter::get_dataset(string path)
{
    if (filehandle_ < 0){
        return -1;
    }
    herr_t status = H5Eset_auto2(H5E_DEFAULT, NULL, NULL);
    // Create the groups corresponding to this path We are not
    // taking care of Table object containing Table
    // objects. That's an unusual possibility.
    vector<string> path_tokens;
    tokenize(path, "/", path_tokens);
    hid_t prev_id = filehandle_;
    hid_t id = -1;
    for ( unsigned int ii = 0; ii < path_tokens.size()-1; ++ii ){
        // check if object exists
        htri_t exists = H5Lexists(prev_id, path_tokens[ii].c_str(), H5P_DEFAULT);
        if (exists > 0){
            // try to open existing group
            id = H5Gopen2(prev_id, path_tokens[ii].c_str(), H5P_DEFAULT);
        } else if (exists == 0) {
            // If that fails, try to create a group
            id = H5Gcreate2(prev_id, path_tokens[ii].c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        } 
        if ((exists < 0) || (id < 0)){
            // Failed to open/create a group, print the
            // offending path (for debugging; the error is
            // perhaps at the level of hdf5 or file system).
            cerr << "Error: failed to open/create group: ";
            for (unsigned int jj = 0; jj <= ii; ++jj){
                cerr << "/" << path_tokens[jj];
            }
            cerr << endl;
            prev_id = -1;            
        }
        if (prev_id >= 0  && prev_id != filehandle_){
            // Successfully opened/created new group, close the old group
            status = H5Gclose(prev_id);
            assert( status >= 0 );
        }
        prev_id = id;
    }
    string name = path_tokens[path_tokens.size()-1];
    htri_t exists = H5Lexists(prev_id, name.c_str(), H5P_DEFAULT);
    hid_t dataset_id = -1;
    if (exists > 0){
        dataset_id = H5Dopen2(prev_id, name.c_str(), H5P_DEFAULT);
    } else if (exists == 0){
        dataset_id = create_dataset(prev_id, name);
    } else {
        cerr << "Error: H5Lexists returned " << exists << " for path \"" << path << "\"" << endl;
    }
    return dataset_id;
}
コード例 #4
0
ファイル: create5.c プロジェクト: blippy/hdf5-fuse
int main(int argc, char *argv[])
{
	openlog("hdf5-fuse", LOG_CONS | LOG_PID | LOG_NDELAY, LOG_LOCAL1);
	syslog(LOG_DEBUG, "Programm create5 starting");
	int status = EXIT_SUCCESS;
	static struct option longopts[] = {
		//{ "buffy",	     no_argument,	     NULL,	     'b' },
		//{ "fluoride",   required_argument,	     NULL,	     'f' },
		//{ "daggerset",  no_argument,	     &daggerset,     1 },
		{"help", no_argument, NULL, 'h'},
		{ NULL,	     0,			     NULL,	     0 }
	};

	int ch;
	while ((ch = getopt_long(argc, argv, "h", longopts, NULL)) != -1) { 
	switch(ch) {
		case 'h': printf("create5 FILE DATASET\n"); goto cleanup;
		case '?' : break; // getopt_long already printed an error message
		default: 
			  fprintf(stderr, "Didn't understand option '%c'. Exiting\n", ch); 
			  status= EXIT_FAILURE;
			  goto cleanup;
	}}

	if(optind+2 != argc ){
		fprintf(stderr, "Incorrect number of arguments. Existing\n");
		status = EXIT_FAILURE;
		goto cleanup;
	}

	char *h5fname = argv[optind];
	char *dataset_name = argv[optind+1];

	hid_t rid = H5Fopen(h5fname, H5F_ACC_RDWR, H5P_DEFAULT);
	status = create_dataset(rid, dataset_name);
	// TODO check status
	status = H5Fclose(rid);

	syslog(LOG_DEBUG, "File: %s, dataset %s\n", h5fname, dataset_name);

cleanup:
	//if(dset) free(dset);
	syslog(LOG_DEBUG, "Programme exiting with status %d", status);
	closelog();
	return status;
}
コード例 #5
0
ファイル: SST.cpp プロジェクト: codeaudit/Dali
    vector<SentimentBatch<R>> SentimentBatch<R>::create_dataset(
            const vector<SST::AnnotatedParseTree::shared_tree>& trees,
            const Vocab& vocab,
            size_t minibatch_size,
            bool add_start_symbol) {

        utils::tokenized_uint_labeled_dataset dataset;
        for (auto& tree : trees) {
            dataset.emplace_back(tree->to_labeled_pair());
            for (auto& child : tree->general_children) {
                if (((int)child->label) > 4)
                    utils::exit_with_message("Error: One of the trees's children has a label other than 0-4");
                dataset.emplace_back(child->to_labeled_pair());
            }
        }
        return create_dataset(dataset, vocab, minibatch_size, add_start_symbol);
    }
コード例 #6
0
void end_train(void) {
  size_t b = pointset[0].size();
  B = b;
  size_t n = pointset.size();
  dataset = create_dataset(pointset);
  pointset.clear();
  pointset.shrink_to_fit();
  if (r > 0) { 
    r = n / r; // use r-fraction of the dataset for ordering.
	int* order = new int[B];
	greedyorder(order, dataset, r, B, chunks);
    UINT8* new_dataset = new UINT8[n * B/8];
    reorder(new_dataset, dataset, n, B, order);
    delete[] dataset;
    dataset = new_dataset;
  }
  ds = new mihasher(b, chunks);
  ds->populate(dataset, n, b/8);
  stats = new qstat[1];
}
コード例 #7
0
hid_t set_up_rnw() {
    // Set up driver to use ram instead of on-disk files.
    hid_t fapl;
    fapl = set_core();

    create_file(fapl, FILE_NAME_RNW);
    hid_t file_id;
    file_id = open_file(fapl, FILE_NAME_RNW);

    size_t dim0, dim1;
    for (dim0 = DIM0_START; dim0 <= DIM0_LIM; dim0 *= 2) {
        for (dim1 = DIM1_START; dim1 <= DIM1_LIM; dim1 *= 2) {
            // get name of dataset
            char name[25];
            sprintf(name, "dataset_%dx%d", dim0, dim1); // puts string into buffer

            create_dataset(file_id, dim0, dim1, name);

            /* Initialise with values */
            hid_t dataset_id;
            dataset_id = open_dataset(file_id, name);

            double matrix[dim0][dim1];
            int i, j;

            // fill matrix with some elements
            for (i = 0; i < dim0; i++) {
                for (j = 0; j < dim1; j++) {
                    matrix[i][j] = i * dim1 + j;
                }
            }
            write_matrix(dataset_id, matrix);

            close_dataset(dataset_id);
        }
    }

    /* Done initializing values */
    close_file(file_id);
    return fapl;
}
コード例 #8
0
ファイル: chunk.c プロジェクト: ArielleBassanelli/gempak
/*-------------------------------------------------------------------------
 * Function:	main
 *
 * Purpose:	See file prologue.
 *
 * Return:	Success:
 *
 *		Failure:
 *
 * Programmer:	Robb Matzke
 *              Thursday, May 14, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
int
main (void)
{
    size_t	io_size;
    double	effic, io_percent;
    FILE	*f, *d;
    size_t	cache_size;
    double	w0;

    /*
     * Create a global file access property list.
     */
    fapl_g = H5Pcreate (H5P_FILE_ACCESS);
    H5Pget_cache (fapl_g, NULL, NULL, NULL, &w0);

    /* Create the file */
    create_dataset ();
    f = fopen ("x-gnuplot", "w");

    printf("Test      %8s %8s %8s\n", "CacheSz", "ChunkSz",  "Effic");
    printf("--------- -------- -------- --------\n");

#if 1
    /*
     * Test row-major reading of the dataset with various sizes of request
     * windows.
     */
    if (RM_CACHE_STRT==RM_CACHE_END) {
	fprintf (f, "set yrange [0:1.2]\n");
	fprintf (f, "set ytics 0, 0.1, 1\n");
	fprintf (f, "set xlabel \"%s\"\n",
		 "Request size as a fraction of chunk size");
	fprintf (f, "set ylabel \"Efficiency\"\n");
	fprintf (f, "set title \"Cache %d chunks, w0=%g, "
		 "Size=(total=%d, chunk=%d)\"\n",
		 RM_CACHE_STRT, w0, DS_SIZE*CH_SIZE, CH_SIZE);
    } else {
	fprintf (f, "set autoscale\n");
	fprintf (f, "set hidden3d\n");
    }

    fprintf (f, "set terminal postscript\nset output \"x-rowmaj-rd.ps\"\n");
    fprintf (f, "%s \"x-rowmaj-rd.dat\" title \"RowMaj-Read\" with %s\n",
	     RM_CACHE_STRT==RM_CACHE_END?"plot":"splot",
	     LINESPOINTS);
    fprintf (f, "set terminal x11\nreplot\n");
    d = fopen ("x-rowmaj-rd.dat", "w");
    for (cache_size=RM_CACHE_STRT;
	 cache_size<=RM_CACHE_END;
	 cache_size+=RM_CACHE_DELT) {
	for (io_percent=RM_START; io_percent<=RM_END; io_percent+=RM_DELTA) {
	    io_size = MAX (1, (size_t)(CH_SIZE*io_percent));
	    printf ("Rowmaj-rd %8d %8.2f", (int)cache_size, io_percent);
	    fflush (stdout);
	    effic = test_rowmaj (READ, cache_size, io_size);
	    printf (" %8.2f\n", effic);
	    if (RM_CACHE_STRT==RM_CACHE_END) {
		fprintf (d, "%g %g\n", io_percent, effic);
	    } else {
		fprintf (d, "%g\n", effic);
	    }
	}
	fprintf (d, "\n");
    }
    fclose (d);
    fprintf (f, "pause -1\n");
#endif

#if 1
    /*
     * Test row-major writing of the dataset with various sizes of request
     * windows.
     */
    if (RM_CACHE_STRT==RM_CACHE_END) {
	fprintf (f, "set yrange [0:1.2]\n");
	fprintf (f, "set ytics 0, 0.1, 1\n");
	fprintf (f, "set xlabel \"%s\"\n",
		 "Request size as a fraction of chunk size");
	fprintf (f, "set ylabel \"Efficiency\"\n");
	fprintf (f, "set title \"Cache %d chunks,w0=%g, "
		 "Size=(total=%d, chunk=%d)\"\n",
		 RM_CACHE_STRT, w0, DS_SIZE*CH_SIZE, CH_SIZE);
    } else {
	fprintf (f, "set autoscale\n");
	fprintf (f, "set hidden3d\n");
    }

    fprintf (f, "set terminal postscript\nset output \"x-rowmaj-wr.ps\"\n");
    fprintf (f, "%s \"x-rowmaj-wr.dat\" title \"RowMaj-Write\" with %s\n",
	     RM_CACHE_STRT==RM_CACHE_END?"plot":"splot",
	     LINESPOINTS);
    fprintf (f, "set terminal x11\nreplot\n");
    d = fopen ("x-rowmaj-wr.dat", "w");
    for (cache_size=RM_CACHE_STRT;
	 cache_size<=RM_CACHE_END;
	 cache_size+=RM_CACHE_DELT) {
	for (io_percent=RM_START; io_percent<=RM_END; io_percent+=RM_DELTA) {
	    io_size = MAX (1, (size_t)(CH_SIZE*io_percent));
	    printf ("Rowmaj-wr %8d %8.2f", (int)cache_size, io_percent);
	    fflush (stdout);
	    effic = test_rowmaj (WRITE, cache_size, io_size);
	    printf (" %8.2f\n", effic);
	    if (RM_CACHE_STRT==RM_CACHE_END) {
		fprintf (d, "%g %g\n", io_percent, effic);
	    } else {
		fprintf (d, "%g\n", effic);
	    }
	}
	fprintf (d, "\n");
    }
    fclose (d);
    fprintf (f, "pause -1\n");
#endif

#if 1
    /*
     * Test diagonal read
     */
    if (DIAG_CACHE_STRT==DIAG_CACHE_END) {
	fprintf (f, "set yrange [0:1.2]\n");
	fprintf (f, "set ytics 0, 0.1, 1\n");
	fprintf (f, "set xlabel \"%s\"\n",
		 "Request size as a fraction of chunk size");
	fprintf (f, "set ylabel \"Efficiency\"\n");
	fprintf (f, "set title \"Cache %d chunks,w0=%g, "
		 "Size=(total=%d, chunk=%d)\"\n",
		 DIAG_CACHE_STRT, w0, DS_SIZE*CH_SIZE, CH_SIZE);
    } else {
	fprintf (f, "set autoscale\n");
	fprintf (f, "set hidden3d\n");
    }
    fprintf (f, "set terminal postscript\nset output \"x-diag-rd.ps\"\n");
    fprintf (f, "%s \"x-diag-rd.dat\" title \"Diag-Read\" with %s\n",
	     DIAG_CACHE_STRT==DIAG_CACHE_END?"plot":"splot", LINESPOINTS);
    fprintf (f, "set terminal x11\nreplot\n");
    d = fopen ("x-diag-rd.dat", "w");
    for (cache_size=DIAG_CACHE_STRT;
	 cache_size<=DIAG_CACHE_END;
	 cache_size+=DIAG_CACHE_DELT) {
	for (io_percent=DIAG_START;
	     io_percent<=DIAG_END;
	     io_percent+=DIAG_DELTA) {
	    io_size = MAX (1, (size_t)(CH_SIZE*io_percent));
	    printf ("Diag-rd   %8d %8.2f", (int)cache_size, io_percent);
	    fflush (stdout);
	    effic = test_diag (READ, cache_size, io_size, MAX (1, io_size/2));
	    printf (" %8.2f\n", effic);
	    if (DIAG_CACHE_STRT==DIAG_CACHE_END) {
		fprintf (d, "%g %g\n", io_percent, effic);
	    } else {
		fprintf (d, "%g\n", effic);
	    }
	}
	fprintf (d, "\n");
    }
    fclose (d);
    fprintf (f, "pause -1\n");
#endif

#if 1
    /*
     * Test diagonal write
     */
    if (DIAG_CACHE_STRT==DIAG_CACHE_END) {
	fprintf (f, "set yrange [0:1.2]\n");
	fprintf (f, "set ytics 0, 0.1, 1\n");
	fprintf (f, "set xlabel \"%s\"\n",
		 "Request size as a fraction of chunk size");
	fprintf (f, "set ylabel \"Efficiency\"\n");
	fprintf (f, "set title \"Cache %d chunks, w0=%g, "
		 "Size=(total=%d, chunk=%d)\"\n",
		 DIAG_CACHE_STRT, w0, DS_SIZE*CH_SIZE, CH_SIZE);
    } else {
	fprintf (f, "set autoscale\n");
	fprintf (f, "set hidden3d\n");
    }
    fprintf (f, "set terminal postscript\nset output \"x-diag-wr.ps\"\n");
    fprintf (f, "%s \"x-diag-wr.dat\" title \"Diag-Write\" with %s\n",
	     DIAG_CACHE_STRT==DIAG_CACHE_END?"plot":"splot", LINESPOINTS);
    fprintf (f, "set terminal x11\nreplot\n");
    d = fopen ("x-diag-wr.dat", "w");
    for (cache_size=DIAG_CACHE_STRT;
	 cache_size<=DIAG_CACHE_END;
	 cache_size+=DIAG_CACHE_DELT) {
	for (io_percent=DIAG_START;
	     io_percent<=DIAG_END;
	     io_percent+=DIAG_DELTA) {
	    io_size = MAX (1, (size_t)(CH_SIZE*io_percent));
	    printf ("Diag-wr   %8d %8.2f", (int)cache_size, io_percent);
	    fflush (stdout);
	    effic = test_diag (WRITE, cache_size, io_size, MAX (1, io_size/2));
	    printf (" %8.2f\n", effic);
	    if (DIAG_CACHE_STRT==DIAG_CACHE_END) {
		fprintf (d, "%g %g\n", io_percent, effic);
	    } else {
		fprintf (d, "%g\n", effic);
	    }
	}
	fprintf (d, "\n");
    }
    fclose (d);
    fprintf (f, "pause -1\n");
#endif


    H5Pclose (fapl_g);
    fclose (f);
    return 0;
}
コード例 #9
0
ファイル: test_octree.c プロジェクト: mentekid/octree
int main(int argc, char** argv){

    // Time counting variables
    struct timeval startwtime, endwtime;
    double hashAvg=0, mortonAvg=0, sortingAvg=0, rearrangeAvg=0;
    extern int num_threads;


    if (argc != 7) { // Check if the command line arguments are correct
        printf("Usage: %s N dist pop rep L numThreads\n where\n N:number of points\n dist: distribution code (0-cube, 1-Plummer)\n pop: population threshold\n rep: repetitions\n L: maximum tree height.\n numThreads: number of threads to run this code with\n", argv[0]);
        return (1);
    }

    // Input command line arguments
    int N = atoi(argv[1]); // Number of points
    int dist = atoi(argv[2]); // Distribution identifier
    int population_threshold = atoi(argv[3]); // populatiton threshold
    int repeat = atoi(argv[4]); // number of independent runs
    int maxlev = atoi(argv[5]); // maximum tree height
    num_threads = atoi(argv[6]); //number of threads

    printf("Running for %d particles with maximum height: %d\n", N, maxlev);

    float *X = (float *) malloc(N*DIM*sizeof(float));
    float *Y = (float *) malloc(N*DIM*sizeof(float));

    unsigned int *hash_codes = (unsigned int *) malloc(DIM*N*sizeof(unsigned int));
    unsigned long int *morton_codes = (unsigned long int *) malloc(N*sizeof(unsigned long int));
    unsigned long int *sorted_morton_codes = (unsigned long int *) malloc(N*sizeof(unsigned long int));
    unsigned int *permutation_vector = (unsigned int *) malloc(N*sizeof(unsigned int));
    unsigned int *index = (unsigned int *) malloc(N*sizeof(unsigned int));
    unsigned int *level_record = (unsigned int *) calloc(N,sizeof(unsigned int)); // record of the leaf of the tree and their level

    // initialize the index
    int i = 0;
    for(i=0; i<N; i++){
        index[i] = i;
    }

    /* Generate a 3-dimensional data distribution */
    create_dataset(X, N, dist);

    /* Find the boundaries of the space */
    float max[DIM], min[DIM];
    find_max(max, X, N);
    find_min(min, X, N);

    int nbins = (1 << maxlev); // maximum number of boxes at the leaf level

    int it = 0;
    // Independent runs
    for(it = 0; it<repeat; it++){

        gettimeofday (&startwtime, NULL);

        compute_hash_codes(hash_codes, X, N, nbins, min, max); // compute the hash codes

        gettimeofday (&endwtime, NULL);

        double hash_time = (double)((endwtime.tv_usec - startwtime.tv_usec)
                /1.0e6 + endwtime.tv_sec - startwtime.tv_sec);
        hashAvg+=hash_time;

        printf("Time to compute the hash codes: %f\n", hash_time);


        gettimeofday (&startwtime, NULL);

        morton_encoding(morton_codes, hash_codes, N, maxlev); // computes the Morton codes of the particles

        gettimeofday (&endwtime, NULL);


        double morton_encoding_time = (double)((endwtime.tv_usec - startwtime.tv_usec)
                /1.0e6 + endwtime.tv_sec - startwtime.tv_sec);
        mortonAvg+=morton_encoding_time;

        printf("Time to compute the morton encoding: %f\n", morton_encoding_time);


        gettimeofday (&startwtime, NULL);

        // Truncated msd radix sort
        truncated_radix_sort(morton_codes, sorted_morton_codes,
                permutation_vector,
                index, level_record, N,
                population_threshold, 3*(maxlev-1), 0, num_threads);

        gettimeofday (&endwtime, NULL);


        double sort_time = (double)((endwtime.tv_usec - startwtime.tv_usec)
                /1.0e6 + endwtime.tv_sec - startwtime.tv_sec);
        sortingAvg+=sort_time;
        printf("Time for the truncated radix sort: %f\n", sort_time);

        gettimeofday (&startwtime, NULL);

        // Data rearrangement
        data_rearrangement(Y, X, permutation_vector, N);

        gettimeofday (&endwtime, NULL);


        double rearrange_time = (double)((endwtime.tv_usec - startwtime.tv_usec)
                /1.0e6 + endwtime.tv_sec - startwtime.tv_sec);
        rearrangeAvg+=rearrange_time;

        printf("Time to rearrange the particles in memory: %f\n", rearrange_time);

        /* The following code is for verification */
        // Check if every point is assigned to one leaf of the tree
        int pass = check_index(permutation_vector, N);
        printf("%d of %d on INDEX TEST\n", pass, N);
        /*
        if(pass){
            printf("Index test PASS\n");
        }
        else{
            printf("Index test FAIL\n");
        }
*/
        // Check is all particles that are in the same box have the same encoding.
        pass = check_codes(Y, sorted_morton_codes,
                level_record, N, maxlev);
        printf("%d of %d on ENCODING TEST\n", pass, N);
        /*
        if(pass){
            printf("Encoding test PASS\n");
        }
        else{
            printf("Encoding test FAIL\n");
        }
*/
    }
//compute and print average time for each stage
    hashAvg=hashAvg/repeat;
    printf("Average time for hashing: %f\n", hashAvg);

    mortonAvg=mortonAvg/repeat;
    printf("Average time for encoding: %f\n", mortonAvg);

    sortingAvg=sortingAvg/repeat;
    printf("Average time for sorting: %f\n", sortingAvg);

    rearrangeAvg=rearrangeAvg/repeat;
    printf("Average time for rearranging: %f\n", rearrangeAvg);

    /* clear memory */
    free(X);
    free(Y);
    free(hash_codes);
    free(morton_codes);
    free(sorted_morton_codes);
    free(permutation_vector);
    free(index);
    free(level_record);
}
コード例 #10
0
ファイル: boost_multi_array.hpp プロジェクト: KaiSzuttor/h5xx
inline typename boost::enable_if<is_multi_array<T>, dataset>::type
create_dataset(h5xxObject const& object, std::string const& name, T const& value)
{
    return create_dataset(object, name, value, h5xx::policy::storage::contiguous());
}