Exemplo n.º 1
0
//--------------------------------------------------------------------------
// Function:	DSetCreatPropList::setShuffle
///\brief	Sets method of the shuffle filter
///
///\exception	H5::PropListIException
///\par Description
///		Please refer to the Reference Manual of \c H5Pset_shuffle for
///		details.
/// http://www.hdfgroup.org/HDF5/doc/RM/RM_H5P.html#Property-SetShuffle
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DSetCreatPropList::setShuffle() const
{
   herr_t ret_value = H5Pset_shuffle(id);
   if( ret_value < 0 )
   {
      throw PropListIException("DSetCreatPropList::setShuffle",
                "H5Pset_shuffle failed");
   }
}
Exemplo n.º 2
0
 void DCDataSet::setCompression()
 throw (DCException)
 {
     if (this->compression && getPhysicalSize().getScalarSize() != 0)
     {
         // shuffling reorders bytes for better compression
         // set gzip compression level (1=lowest - 9=highest)
         if (H5Pset_shuffle(this->dsetProperties) < 0 ||
                 H5Pset_deflate(this->dsetProperties, 1) < 0)
             throw DCException(getExceptionString("setCompression: Failed to set compression"));
     }
 }
Exemplo n.º 3
0
void fh5_prepare_write_(int *ndims,int *dims,int *hdferr)
{

extern hid_t fileid, dsetid, dspcid, mspcid, propid;
int i,j;
herr_t herr;

hsize_t dimsc[7]  = {1,1,1,1,1,1,1};
hsize_t maxdims[7]  = {1,1,1,1,1,1,1};
hsize_t chunk_size[7]  = {0,0,0,0,0,0,0};

for (i = 0; i < *ndims; i++)
  {  
    /* reverse dimensions for fortan */
    j = *ndims-i-1;
    dimsc[i] = dims[j];
    chunk_size[i] = dims[j];  
    maxdims[i] = dims[j];
  }

/*  Create the data space for the dataset. */
mspcid = H5Screate_simple(*ndims, dimsc, maxdims);
//printf("fh5_prepw - create 1: %d\n",mspcid);

/* Create properties for gzip compression.*/
propid = H5Pcreate(H5P_DATASET_CREATE);
//printf("fh5_prepw - propid: %d\n",propid);

herr = H5Pset_chunk  (propid, *ndims, chunk_size);
herr = H5Pset_shuffle(propid);
herr = H5Pset_deflate(propid, 5);

*hdferr = herr;
//printf("fh5_prepw - compress: %d\n",mspcid);

return;
}
Exemplo n.º 4
0
/*****************************************************************************
  This function generates attributes, groups, and datasets of many types. 

  Parameters:
            fname:	file_name.
            ngrps:	number of top level groups.
            ndsets:	number of datasets.
            attrs:	number of attributes.
            nrow:	number of rows in a dataset.
            chunk:	chunk size (single number).
            vlen:	max vlen size.
            comp:	use latest format.
            latest:	use gzip comnpression.
			
  Return:  Non-negative on success/Negative on failure
  
  Programmer:  Peter Cao <*****@*****.**>, Jan. 2013
 ****************************************************************************/
herr_t create_perf_test_file(const char *fname, int ngrps, int ndsets, 
       int nattrs, hsize_t nrows, hsize_t dim0, hsize_t chunk, int vlen, 
	   int compressed, int latest)
{
    int         i, j, k;
    hid_t       fid, sid_null, sid_scalar, sid_1d, sid_2d, did, aid, sid_2, sid_large, 
	            fapl=H5P_DEFAULT, dcpl=H5P_DEFAULT, gid1, gid2, cmp_tid, tid_str, 
				tid_enum, tid_array_f, tid_vlen_i, tid_vlen_s;
    char        name[32], tmp_name1[32], tmp_name2[32], tmp_name3[32];
    hsize_t     dims[1]={dim0}, dims2d[2]={dim0, (dim0/4+1)}, dims_array[1]={FIXED_LEN}, 
	            dim1[1]={2};
    char        *enum_names[4] = {"SOLID", "LIQUID", "GAS", "PLASMA"};
    test_comp_t *buf_comp=NULL, *buf_comp_large=NULL;
    int         *buf_int=NULL;
    float       (*buf_float_a)[FIXED_LEN]=NULL;
    double      **buf_double2d=NULL;
    hvl_t       *buf_vlen_i=NULL;
	char        (*buf_str)[FIXED_LEN];
	char        **buf_vlen_s=NULL;
	hobj_ref_t  buf_ref[2];
	hdset_reg_ref_t buf_reg_ref[2];
    size_t      offset, len;
    herr_t      status;
    char        *names[NTYPES] = { "int", "ulong", "float", "double", "fixed string", 
	            "enum", "fixed float array", "vlen int array", "vlen strings"};
    hid_t       types[NTYPES] = { H5T_NATIVE_INT, H5T_NATIVE_UINT64, H5T_NATIVE_FLOAT, 
                H5T_NATIVE_DOUBLE, tid_str, tid_enum, tid_array_f, tid_vlen_i, tid_vlen_s};
	hsize_t     coords[4][2] = { {0,  1}, {3, 5}, {1,  0}, {2,  4}}, start=0, stride=1, count=1;
	
	if (nrows < NROWS) nrows = NROWS; 
    if (ngrps<NGROUPS) ngrps=NGROUPS;
	if (ndsets<NDSETS) ndsets=NDSETS;
	if (nattrs<NATTRS) nattrs=NATTRS;
	if (dim0<DIM0) dim0=DIM0;
    if (chunk>dim0) chunk=dim0/4;
    if (chunk<1) chunk = 1;
	if (vlen<1) vlen = MAXVLEN;

    /* create fixed string datatype */                                   
    types[4] = tid_str =  H5Tcopy (H5T_C_S1);
    H5Tset_size (tid_str, FIXED_LEN);

    /* create enum datatype */
    types[5] = tid_enum = H5Tenum_create(H5T_NATIVE_INT);
    for (i = (int) SOLID; i <= (int) PLASMA; i++) {
        phase_t val = (phase_t) i;
        status = H5Tenum_insert (tid_enum, enum_names[i], &val);
    }

    /* create float array datatype */
    types[6] = tid_array_f = H5Tarray_create (H5T_NATIVE_FLOAT, 1, dims_array);
 
    /* create variable length integer datatypes */
    types[7] = tid_vlen_i = H5Tvlen_create (H5T_NATIVE_INT);
    	
    /* create variable length string datatype */
    types[8] = tid_vlen_s =  H5Tcopy (H5T_C_S1);
    H5Tset_size (tid_vlen_s, H5T_VARIABLE);
                   
    /* create compound datatypes */    
    cmp_tid = H5Tcreate (H5T_COMPOUND, sizeof (test_comp_t));
    offset = 0;
    for (i=0; i<NTYPES-2; i++) {
        H5Tinsert(cmp_tid, names[i], offset, types[i]);
        offset += H5Tget_size(types[i]);
    }

	H5Tinsert(cmp_tid, names[7], offset, types[7]);
	offset += sizeof (hvl_t);
	H5Tinsert(cmp_tid, names[8], offset, types[8]);

	/* create dataspace */
    sid_1d = H5Screate_simple (1, dims, NULL);
    sid_2d = H5Screate_simple (2, dims2d, NULL);
    sid_2 = H5Screate_simple  (1, dim1, NULL);
	sid_large = H5Screate_simple  (1, &nrows, NULL);
    sid_null = H5Screate (H5S_NULL);	
	sid_scalar = H5Screate (H5S_SCALAR);
	
	/* create fid access property */
	fapl = H5Pcreate (H5P_FILE_ACCESS);
    H5Pset_libver_bounds (fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);

	/* create dataset creation property */
    dcpl = H5Pcreate (H5P_DATASET_CREATE);

	/* set dataset chunk */
    if (chunk>0) {
        H5Pset_chunk (dcpl, 1, &chunk);
    }

	/* set dataset compression */
    if (compressed) {
        if (chunk<=0) {
            chunk = dim0/10+1;;
            H5Pset_chunk (dcpl, 1, &chunk);
        }
        H5Pset_shuffle (dcpl);
        H5Pset_deflate (dcpl, 6);
    }	

	/* allocate buffers */
    buf_comp   = (test_comp_t *)calloc(dim0, sizeof(test_comp_t));
    buf_comp_large   = (test_comp_t *)calloc(nrows, sizeof(test_comp_t));
    buf_int    = (int *)calloc(dim0, sizeof(int));
    buf_float_a  = malloc(dim0*sizeof(*buf_float_a));
	buf_vlen_i = (hvl_t *)calloc(dim0, sizeof (hvl_t));
    buf_vlen_s = (char **)calloc(dim0, sizeof(char *));
	buf_str    =  malloc(dim0*sizeof (*buf_str));

	/* allocate array of doulbe pointers */
	buf_double2d  = (double **)calloc(dims2d[0],sizeof(double *));
	/* allocate a contigous chunk of memory for the data */
	buf_double2d[0] = (double *)calloc( dims2d[0]*dims2d[1],sizeof(double) );
	/* assign memory city to pointer array */
	for (i=1; i <dims2d[0]; i++) buf_double2d[i] = buf_double2d[0]+i*dims2d[1];

	/* fill buffer values */
	len = 1;
    for (i=0; i<dims[0]; i++) {
        buf_comp[i].i = buf_int[i] = i-2147483648;
        buf_comp[i].l = 0xffffffffffffffff-i;
        buf_comp[i].f = 1.0/(i+1.0);
        buf_comp[i].d = 987654321.0*i+1.0/(i+1.0);
        buf_comp[i].e = (phase_t) (i % (int) (PLASMA + 1));
		
		for (j=0; j<FIXED_LEN; j++) {
		    buf_comp[i].f_array[j] = buf_float_a[i][j] = i*100+j;
			buf_str[i][j] = 'a' + (i%26);
		}
		buf_str[i][FIXED_LEN-1] = 0;
        strcpy(buf_comp[i].s, buf_str[i]);
		
		len = (1-cos(i/8.0))/2*vlen+1;
		if (!i) len = vlen;
		buf_vlen_i[i].len = len;
		buf_vlen_i[i].p = (int *)calloc(len, sizeof(int));
		for (j=0; j<len; j++) ((int*)(buf_vlen_i[i].p))[j] = i*100+j;
		buf_comp[i].i_vlen = buf_vlen_i[i];
		
		buf_vlen_s[i] = (char *)calloc(len, sizeof(char));
		for (j=0; j<len-1; j++)
		    buf_vlen_s[i][j] = j%26+'A';
		buf_comp[i].s_vlen = buf_vlen_s[i];
		
		for (j=0; j<dims2d[1]; j++)
		    buf_double2d[i][j] = i+j/10000.0;
    }

    for (i=0; i<nrows; i++) {
        buf_comp_large[i].i = i-2147483648;
        buf_comp_large[i].l = 0xffffffffffffffff-i;
        buf_comp_large[i].f = 1.0/(i+1.0);
        buf_comp_large[i].d = 987654321.0*i+1.0/(i+1.0);
        buf_comp_large[i].e = (phase_t) (i % (int) (PLASMA + 1));
        for (j=0; j<FIXED_LEN-1; j++) {
            buf_comp_large[i].f_array[j] = i*100+j;
            buf_comp_large[i].s[j] = 'a' + (i%26);
        }
		len = i%vlen+1;
        buf_comp_large[i].i_vlen.len = len;
        buf_comp_large[i].i_vlen.p = (int *)calloc(len, sizeof(int));
        for (j=0; j<len; j++) ((int*)(buf_comp_large[i].i_vlen.p))[j] = i*100+j;
        buf_comp_large[i].s_vlen = (char *)calloc(i+2, sizeof(char));
        for (j=0; j<i+1; j++) (buf_comp_large[i].s_vlen)[j] = j%26+'A';
    }
	
	/* create file */
    if (latest)
        fid = H5Fcreate (fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    else
        fid = H5Fcreate (fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
	add_attrs(fid, 0);

	sprintf(name, "a cmp ds of %d rows", nrows);
	did = H5Dcreate (fid, name, cmp_tid, sid_large, H5P_DEFAULT, dcpl, H5P_DEFAULT);
	H5Dwrite (did, cmp_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_comp_large);
	add_attrs(did, 0); 
	H5Dclose(did);

	// /* add attributes*/
    gid1 = H5Gcreate (fid, "attributes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
	if (nattrs<1) nattrs = 1;
	i=0;
	while (i<nattrs) i += add_attrs(gid1, i);
	H5Gclose(gid1);
		
	/* add many sub groups to a group*/
    gid1 = H5Gcreate (fid, "groups", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
	add_attrs(gid1, 0);
    for (i=0; i<ngrps; i++) {
	    /* create sub groups */
        sprintf(name, "g%02d", i);
        gid2 = H5Gcreate (gid1, name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
		if (i<10) add_attrs(gid2, 0);
		H5Gclose(gid2);
	}
	H5Gclose(gid1);

	/* add many datasets to a group */
	gid1 = H5Gcreate (fid, "datasets", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
	add_attrs(gid1, 0);
    for (j=0; j<ndsets; j+=12) {
		/* 1 add a null dataset */
		sprintf(name, "%05d null dataset", j);
        did = H5Dcreate (gid1, name, H5T_STD_I32LE, sid_null, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
		if (!j) add_attrs(did, j); 
        H5Dclose(did);	

		/* 2 add scalar int point */
	    sprintf(name, "%05d scalar int point", j);
        did = H5Dcreate (gid1, name, H5T_NATIVE_INT, sid_scalar, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        H5Dwrite (did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &j);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);		
		
		/* 3 scalar vlen string */
	    sprintf(name, "%05d scalar vlen string", j);
        did = H5Dcreate (gid1, name, tid_vlen_s, sid_scalar, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        H5Dwrite (did, tid_vlen_s, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf_vlen_s[0]);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);			
	
	    /* 4 add fixed-length float array */
		sprintf(name, "%05d fixed-length float array", j);
		did = H5Dcreate (gid1, name, tid_array_f, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
		H5Dwrite (did, tid_array_f, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_float_a);
		if (!j) add_attrs(did, j); 
		H5Dclose(did);
	
		/* 5 add fixed-length strings */
		sprintf(name, "%05d fixed-length strings", j);
		did = H5Dcreate (gid1, name, tid_str, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
		H5Dwrite (did, tid_str, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_str);
		if (!j) add_attrs(did, j); 
		H5Dclose(did);
		
		/* 6 add compound data */
	    sprintf(name, "%05d compund data", j);
	    did = H5Dcreate (gid1, name, cmp_tid, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
	    H5Dwrite (did, cmp_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_comp);
		if (!j) add_attrs(did, j); 
		H5Dclose(did);

		/* 7 add 2D double */
	    sprintf(name, "%05d 2D double", j);
		strcpy (tmp_name1, name);
	    did = H5Dcreate (gid1, name, H5T_NATIVE_DOUBLE, sid_2d, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
	    H5Dwrite (did, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_double2d[0]);
		if (!j) add_attrs(did, j); 
		H5Dclose(did);
		
		/* 8 add 1D int array */
	    sprintf(name, "%05d 1D int array", j);
        did = H5Dcreate (gid1, name, H5T_NATIVE_INT, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
        H5Dwrite (did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_int);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);
		
		/* 9 add vlen int array */
	    sprintf(name, "%05d vlen int array", j);
		strcpy (tmp_name2, name);
        did = H5Dcreate (gid1, name, tid_vlen_i, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
        H5Dwrite (did, tid_vlen_i, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_vlen_i);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);	

		/* 10 add vlen strings */
	    sprintf(name, "%05d vlen strings", j);
		strcpy (tmp_name3, name);
        did = H5Dcreate (gid1, name, tid_vlen_s, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT);
        H5Dwrite (did, tid_vlen_s, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_vlen_s);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);	
		
		/* 11 add object refs */
		H5Rcreate(&buf_ref[0],gid1, ".", H5R_OBJECT, -1); 
		H5Rcreate(&buf_ref[1],gid1, tmp_name3, H5R_OBJECT, -1); 
	    sprintf(name, "%05d obj refs", j);
        did = H5Dcreate (gid1, name, H5T_STD_REF_OBJ, sid_2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        H5Dwrite (did, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_ref);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);

		/* 12 add region refs */
		H5Sselect_elements (sid_2d, H5S_SELECT_SET, 4, coords[0]);
		H5Rcreate(&buf_reg_ref[0],gid1, tmp_name1, H5R_DATASET_REGION, sid_2d); 
		H5Sselect_none(sid_2d);
		count = dims[0]/2+1;
		H5Sselect_hyperslab (sid_1d, H5S_SELECT_SET, &start, &stride, &count,NULL);
		H5Rcreate(&buf_reg_ref[1],gid1, tmp_name2, H5R_DATASET_REGION, sid_1d); 
		H5Sselect_none(sid_1d);
	    sprintf(name, "%05d region refs", j);
        did = H5Dcreate (gid1, name, H5T_STD_REF_DSETREG, sid_2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        H5Dwrite (did, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_reg_ref);		
		if (!j) add_attrs(did, j); 
		H5Dclose(did);
	}
	H5Gclose(gid1);			
	
    H5Tclose (tid_array_f);
    H5Tclose (tid_vlen_i);
    H5Tclose (tid_vlen_s);
    H5Tclose (tid_enum);
    H5Tclose (tid_str);
    H5Tclose (cmp_tid);
    H5Pclose (dcpl);
    H5Pclose (fapl);
    H5Sclose (sid_1d);
    H5Sclose (sid_2d);
    H5Sclose (sid_2);
    H5Sclose (sid_large);
    H5Sclose (sid_null);
    H5Sclose (sid_scalar);
    H5Fclose (fid);

    for (i=0; i<dims[0]; i++) {
		if (buf_vlen_i[i].p) free(buf_vlen_i[i].p);
		if (buf_vlen_s[i]) free(buf_vlen_s[i]);
	}

    for (i=0; i<nrows; i++) {
	    if (buf_comp_large[i].i_vlen.p)  free(buf_comp_large[i].i_vlen.p);
		if (buf_comp_large[i].s_vlen) free(buf_comp_large[i].s_vlen);
	}
	
    free (buf_comp);
    free (buf_comp_large);
    free (buf_int);
    free (buf_float_a);
    free (buf_double2d[0]);
    free (buf_double2d);
	free (buf_str);
    free(buf_vlen_i);
    free(buf_vlen_s);

    return 0;
}
Exemplo n.º 5
0
int apply_filters(const char* name,    /* object name from traverse list */
                  int rank,            /* rank of dataset */
                  hsize_t *dims,       /* dimensions of dataset */
                  size_t msize,        /* size of type */
                  hid_t dcpl_id,       /* dataset creation property list */
                  pack_opt_t *options, /* repack options */
                  int *has_filter)     /* (OUT) object NAME has a filter */


{
    int          nfilters;       /* number of filters in DCPL */
    hsize_t      chsize[64];     /* chunk size in elements */
    H5D_layout_t layout;
    int          i;
    pack_info_t  obj;

    *has_filter = 0;

    if (rank==0) /* scalar dataset, do not apply */
        return 0;

   /*-------------------------------------------------------------------------
    * initialize the assigment object
    *-------------------------------------------------------------------------
    */
    init_packobject(&obj);

   /*-------------------------------------------------------------------------
    * find options
    *-------------------------------------------------------------------------
    */
    if (aux_assign_obj(name,options,&obj)==0)
        return 0;

    /* get information about input filters */
    if ((nfilters = H5Pget_nfilters(dcpl_id))<0)
        return -1;

   /*-------------------------------------------------------------------------
    * check if we have filters in the pipeline
    * we want to replace them with the input filters
    * only remove if we are inserting new ones
    *-------------------------------------------------------------------------
    */
    if (nfilters && obj.nfilters )
    {
        *has_filter = 1;
        if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0)
            return -1;
    }

   /*-------------------------------------------------------------------------
    * check if there is an existent chunk
    * read it only if there is not a requested layout
    *-------------------------------------------------------------------------
    */
    if (obj.layout == -1 )
    {
        if ((layout = H5Pget_layout(dcpl_id))<0)
            return -1;

        if (layout == H5D_CHUNKED)
        {
            if ((rank = H5Pget_chunk(dcpl_id,NELMTS(chsize),chsize/*out*/))<0)
                return -1;
            obj.layout = H5D_CHUNKED;
            obj.chunk.rank = rank;
            for ( i = 0; i < rank; i++)
                obj.chunk.chunk_lengths[i] = chsize[i];
        }
    }

    /*-------------------------------------------------------------------------
    * the type of filter and additional parameter
    * type can be one of the filters
    * H5Z_FILTER_NONE        0 , uncompress if compressed
    * H5Z_FILTER_DEFLATE     1 , deflation like gzip
    * H5Z_FILTER_SHUFFLE     2 , shuffle the data
    * H5Z_FILTER_FLETCHER32  3 , fletcher32 checksum of EDC
    * H5Z_FILTER_SZIP        4 , szip compression
    * H5Z_FILTER_NBIT        5 , nbit compression
    * H5Z_FILTER_SCALEOFFSET 6 , scaleoffset compression
    *-------------------------------------------------------------------------
    */

    if (obj.nfilters)
    {

   /*-------------------------------------------------------------------------
    * filters require CHUNK layout; if we do not have one define a default
    *-------------------------------------------------------------------------
    */
        if (obj.layout==-1)
        {

            /* stripmine info */
            hsize_t sm_size[H5S_MAX_RANK]; /*stripmine size */
            hsize_t sm_nbytes;             /*bytes per stripmine */

            obj.chunk.rank = rank;

            /*
            * determine the strip mine size. The strip mine is
            * a hyperslab whose size is manageable.
            */



            sm_nbytes = msize;
            for ( i = rank; i > 0; --i)
            {
                hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
                if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */
                    size = 1;
                sm_size[i - 1] = MIN(dims[i - 1], size);
                sm_nbytes *= sm_size[i - 1];
                assert(sm_nbytes > 0);

            }

            for ( i = 0; i < rank; i++)
            {
                obj.chunk.chunk_lengths[i] = sm_size[i];
            }

        }

        for ( i=0; i<obj.nfilters; i++)
        {
            switch (obj.filter[i].filtn)
            {
            default:
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_DEFLATE       1 , deflation like gzip
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_DEFLATE:
                {
                    unsigned     aggression;     /* the deflate level */

                    aggression = obj.filter[i].cd_values[0];
                    /* set up for deflated data */
                    if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                        return -1;
                    if(H5Pset_deflate(dcpl_id,aggression)<0)
                        return -1;
                }
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_SZIP       4 , szip compression
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_SZIP:
                {
                    unsigned  options_mask;
                    unsigned  pixels_per_block;

                    options_mask     = obj.filter[i].cd_values[0];
                    pixels_per_block = obj.filter[i].cd_values[1];

                    /* set up for szip data */
                    if(H5Pset_chunk(dcpl_id,obj.chunk.rank,obj.chunk.chunk_lengths)<0)
                        return -1;
                    if (H5Pset_szip(dcpl_id,options_mask,pixels_per_block)<0)
                        return -1;

                }
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_SHUFFLE    2 , shuffle the data
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_SHUFFLE:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_shuffle(dcpl_id)<0)
                    return -1;
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_FLETCHER32:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_fletcher32(dcpl_id)<0)
                    return -1;
                break;
           /*----------- -------------------------------------------------------------
            * H5Z_FILTER_NBIT , NBIT compression
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_NBIT:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_nbit(dcpl_id)<0)
                    return -1;
                break;
            /*----------- -------------------------------------------------------------
             * H5Z_FILTER_SCALEOFFSET , scale+offset compression
             *-------------------------------------------------------------------------
             */

            case H5Z_FILTER_SCALEOFFSET:
                {
                    H5Z_SO_scale_type_t scale_type;
                    int                 scale_factor;

                    scale_type   = (H5Z_SO_scale_type_t)obj.filter[i].cd_values[0];
                    scale_factor = obj.filter[i].cd_values[1];

                    if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                        return -1;
                    if (H5Pset_scaleoffset(dcpl_id,scale_type,scale_factor)<0)
                        return -1;
                }
                break;
            } /* switch */
        }/*i*/

    }
    /*obj.nfilters*/

    /*-------------------------------------------------------------------------
    * layout
    *-------------------------------------------------------------------------
    */

    if (obj.layout>=0)
    {
        /* a layout was defined */
        if (H5Pset_layout(dcpl_id, obj.layout)<0)
            return -1;

        if (H5D_CHUNKED == obj.layout)
        {
            if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                return -1;
        }
        else if (H5D_COMPACT == obj.layout)
        {
            if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY)<0)
                return -1;
        }
        /* remove filters for the H5D_CONTIGUOUS case */
        else if (H5D_CONTIGUOUS == obj.layout)
        {
            if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0)
                return -1;
        }

    }

 return 0;
}
Exemplo n.º 6
0
/*-------------------------------------------------------------------------
 * Function:    create_shuffle_dsets_float
 *
 * Purpose:     Create a dataset of FLOAT datatype with shuffle filter
 *
 * Return:      Success:        0
 *              Failure:        -1
 *
 * Programmer:  Raymond Lu
 *              29 March 2011
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
int
create_shuffle_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
{
    hid_t       dataset         = -1;   /* dataset handles */
    hid_t       dcpl            = -1;
    float       data[NX][NY];           /* data to write */
    float       fillvalue = -2.2f;
    hsize_t     chunk[RANK] = {CHUNK0, CHUNK1};
    int         i, j;

    /*
     * Data and output buffer initialization.
     */
    for (j = 0; j < NX; j++) {
        for (i = 0; i < NY; i++)
            data[j][i] = ((float)(i + j + 1))/3;
    }

    /*
     * Create the dataset creation property list, add the Scale-Offset
     * filter, set the chunk size, and set the fill value.
     */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR
    if(H5Pset_shuffle (dcpl) < 0)
        TEST_ERROR
    if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
        TEST_ERROR
    if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
        TEST_ERROR

    /*
     * Create a new dataset within the file using defined dataspace, little
     * endian datatype and default dataset creation properties.
     */
    if((dataset = H5Dcreate2(fid, DATASETNAME20, H5T_IEEE_F32LE, fsid,
            H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        TEST_ERROR

    /*
     * Write the data to the dataset using default transfer properties.
     */
    if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
        TEST_ERROR

    /* Close dataset */
    if(H5Dclose(dataset) < 0)
        TEST_ERROR

    /* Now create a dataset with a big-endian type */
    if((dataset = H5Dcreate2(fid, DATASETNAME21, H5T_IEEE_F32BE, fsid,
            H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        TEST_ERROR
    if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
        TEST_ERROR
    if(H5Dclose(dataset) < 0)
        TEST_ERROR

    /*
     * Close/release resources.
     */
    if(H5Pclose(dcpl) < 0)
        TEST_ERROR

    return 0;

error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dataset);
    } H5E_END_TRY;

    return -1;
}
Exemplo n.º 7
0
herr_t H5ARRAYmake( hid_t loc_id,
		    const char *dset_name,
		    const char *obversion,
		    const int rank,
		    const hsize_t *dims,
		    int   extdim,
		    hid_t type_id,
		    hsize_t *dims_chunk,
		    void  *fill_data,
		    int   compress,
		    char  *complib,
		    int   shuffle,
		    int   fletcher32,
		    const void *data)
{

 hid_t   dataset_id, space_id;
 hsize_t *maxdims = NULL;
 hid_t   plist_id = 0;
 unsigned int cd_values[6];
 int     chunked = 0;
 int     i;

 /* Check whether the array has to be chunked or not */
 if (dims_chunk) {
   chunked = 1;
 }

 if(chunked) {
   maxdims = malloc(rank*sizeof(hsize_t));
   if(!maxdims) return -1;

   for(i=0;i<rank;i++) {
     if (i == extdim) {
       maxdims[i] = H5S_UNLIMITED;
     }
     else {
       maxdims[i] = dims[i] < dims_chunk[i] ? dims_chunk[i] : dims[i];
     }
   }
 }

 /* Create the data space for the dataset. */
 if ( (space_id = H5Screate_simple( rank, dims, maxdims )) < 0 )
   return -1;

 if (chunked) {
   /* Modify dataset creation properties, i.e. enable chunking  */
   plist_id = H5Pcreate (H5P_DATASET_CREATE);
   if ( H5Pset_chunk ( plist_id, rank, dims_chunk ) < 0 )
     return -1;

   /* Set the fill value using a struct as the data type. */
   if (fill_data) {
       if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
	 return -1;
   }
   else {
     if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
       return -1;
   }

   /*
      Dataset creation property list is modified to use
   */

   /* Fletcher must be first */
   if (fletcher32) {
     if ( H5Pset_fletcher32( plist_id) < 0 )
       return -1;
   }
   /* Then shuffle (not if blosc is activated) */
   if ((shuffle) && (strcmp(complib, "blosc") != 0)) {
     if ( H5Pset_shuffle( plist_id) < 0 )
       return -1;
   }
   /* Finally compression */
   if (compress) {
     cd_values[0] = compress;
     cd_values[1] = (int)(atof(obversion) * 10);
     if (extdim <0)
       cd_values[2] = CArray;
     else
       cd_values[2] = EArray;

     /* The default compressor in HDF5 (zlib) */
     if (strcmp(complib, "zlib") == 0) {
       if ( H5Pset_deflate( plist_id, compress) < 0 )
	 return -1;
     }
     /* The Blosc compressor does accept parameters */
     else if (strcmp(complib, "blosc") == 0) {
       cd_values[4] = compress;
       cd_values[5] = shuffle;
       if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
	 return -1;
     }
     /* The LZO compressor does accept parameters */
     else if (strcmp(complib, "lzo") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     /* The bzip2 compress does accept parameters */
     else if (strcmp(complib, "bzip2") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     else {
       /* Compression library not supported */
       fprintf(stderr, "Compression library not supported\n");
       return -1;
     }
   }

   /* Create the (chunked) dataset */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, plist_id )) < 0 )
     goto out;
 }
 else {  			/* Not chunked case */
   /* Create the dataset. */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, H5P_DEFAULT )) < 0 )
     goto out;
 }

 /* Write the dataset only if there is data to write */

 if (data)
 {
   if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
   goto out;
 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  return -1;

 /* End access to the property list */
 if (plist_id)
   if ( H5Pclose( plist_id ) < 0 )
     goto out;

 /* Release resources */
 if (maxdims)
   free(maxdims);

 return dataset_id;

out:
 H5Dclose( dataset_id );
 H5Sclose( space_id );
 if (maxdims)
   free(maxdims);
 if (dims_chunk)
   free(dims_chunk);
 return -1;

}
Exemplo n.º 8
0
/*+++++++++++++++++++++++++
.IDENTifer   PYTABLE_make_array
.PURPOSE     create extensible HDF5 dataset
.INPUT/OUTPUT
  call as    stat = PYTABLE_make_array( locID, dset_name, title, rank, dims,
			                extdim, typeID, dims_chunk, fill_data,
			                compress, shuffle, fletcher32, buff );
     input:
            hid_t locID           :  HDF5 identifier of file or group
	    char *dset_name       :  name of dataset
	    char *title           :
	    int rank              :  number of dimensions
	    hsize_t *dims         :  size of each dimension
	    int extdim            :  index of expendable dimension
	    hid_t typeID          :  data type (HDF5 identifier)
	    hsize_t *dims_chunk   :  chunk sizes
	    void *fill_data       :  Fill value for data
	    unsigned int compress :  compression level (zero for no compression)
	    bool shuffle          :  shuffel data for better compression
	    bool fletcher32       :  
	    void *buffer          :  buffer with data to write (or NULL)
	    
.RETURNS     A negative value is returned on failure. 
.COMMENTS    none
-------------------------*/
herr_t PYTABLE_make_array( hid_t locID, const char *dset_name, 
			   const char *title, const int rank, 
			   const hsize_t *dims, int extdim, hid_t typeID,
			   const hsize_t *dims_chunk, void *fill_data,
			   unsigned int compress, bool shuffle, 
			   bool fletcher32, const void *buffer )
{
     register int ni;

     hid_t   dataID = -1, spaceID = -1;
     herr_t  stat;

/* check if the array has to be chunked or not */
     if ( dims_chunk != NULL ) {
	  hid_t   plistID;

	  hsize_t *maxdims = (hsize_t *) malloc( rank * sizeof(hsize_t) );
	  if ( maxdims == NULL )
	       NADC_GOTO_ERROR( NADC_ERR_ALLOC, "maxdims" );

	  for ( ni = 0; ni < rank; ni++ ) {
	       if ( ni == extdim )
		    maxdims[ni] = H5S_UNLIMITED;
	       else
		    maxdims[ni] = 
			 dims[ni] < dims_chunk[ni] ? dims_chunk[ni] : dims[ni];
	  }
	  spaceID = H5Screate_simple( rank, dims, maxdims );
	  free( maxdims );
	  if ( spaceID < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_SPACE, "" );

	  /* Modify dataset creation properties, i.e. enable chunking  */
	  plistID = H5Pcreate( H5P_DATASET_CREATE );
	  if ( H5Pset_chunk( plistID, rank, dims_chunk ) < 0 ) goto done;

          /* set the fill value using a struct as the data type */
	  if ( fill_data != NULL 
	       && H5Pset_fill_value( plistID, typeID, fill_data ) < 0 )
	       goto done;

          /* dataset creation property list is modified to use */
          /* fletcher must be first */
	  if ( fletcher32 ) {
	       if ( H5Pset_fletcher32( plistID ) < 0 ) goto done;
	  }
          /* then shuffle */
	  if ( shuffle ) {
	       if ( H5Pset_shuffle( plistID ) < 0 ) goto done;
	  }
          /* finally compression */
	  if ( compress > 0 ) {
	       if ( H5Pset_deflate( plistID, compress ) < 0 ) goto done;
	  }
          /* create the (chunked) dataset */
	  dataID = H5Dcreate( locID, dset_name, typeID, spaceID, 
			      H5P_DEFAULT, plistID, H5P_DEFAULT );
	  if ( dataID < 0 ) 
	       NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name );

          /* end access to the property list */
	  if ( H5Pclose( plistID ) < 0 ) goto done;
     } else {
	  spaceID = H5Screate_simple( rank, dims, NULL );
	  if ( spaceID < 0 ) return -1;

          /* create the dataset (not chunked) */
	  dataID = H5Dcreate( locID, dset_name, typeID, spaceID, 
			      H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
	  if ( dataID < 0 ) 
	       NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name );
     }
/*
 * write the data
 */
     stat = H5Dwrite( dataID, typeID, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer );
     if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_WR, "" );

     (void) H5Dclose( dataID );
     (void) H5Sclose( spaceID );
/*
 * Set the conforming array attributes
 *
 * attach the CLASS attribute
 */
     (void) H5LTset_attribute_string( locID, dset_name, "CLASS", 
				      PY_ARRAY_CLASS );

/* attach the EXTDIM attribute in case of enlargeable arrays */
     (void) H5LTset_attribute_int( locID, dset_name, "EXTDIM", &extdim, 1 );

/* attach the FLAVOR attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "FLAVOR", 
				      PY_ARRAY_FLAVOR );
/* attach the VERSION attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "VERSION", 
				      PY_ARRAY_VERSION );

/* attach the TITLE attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "TITLE", title );

     return 0;
 done:
     if ( dataID > 0 ) (void) H5Dclose( dataID );
     if ( spaceID > 0 ) (void) H5Sclose( spaceID );
     return -1;
}
Exemplo n.º 9
0
AccessTraceWriter::AccessTraceWriter(g_string _fname, uint32_t numChildren) : fname(_fname) {
    // Create record structure
    hid_t accType = H5Tenum_create(H5T_NATIVE_USHORT);
    uint16_t val;
    H5Tenum_insert(accType, "GETS", (val=GETS,&val));
    H5Tenum_insert(accType, "GETX", (val=GETX,&val));
    H5Tenum_insert(accType, "PUTS", (val=PUTS,&val));
    H5Tenum_insert(accType, "PUTX", (val=PUTX,&val));

    size_t offset = 0;
    size_t size = H5Tget_size(H5T_NATIVE_ULONG)*2 + H5Tget_size(H5T_NATIVE_UINT) + H5Tget_size(H5T_NATIVE_USHORT) + H5Tget_size(accType);
    hid_t recType = H5Tcreate(H5T_COMPOUND, size);
    auto insertType = [&](const char* name, hid_t type) {
        H5Tinsert(recType, name, offset, type);
        offset += H5Tget_size(type);
    };

    insertType("lineAddr", H5T_NATIVE_ULONG);
    insertType("cycle", H5T_NATIVE_ULONG);
    insertType("lat", H5T_NATIVE_UINT);
    insertType("childId", H5T_NATIVE_USHORT);
    insertType("accType", accType);

    hid_t fid = H5Fcreate(fname.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    if (fid == H5I_INVALID_HID) panic("Could not create HDF5 file %s", fname.c_str());
    
    // HACK: We want to use the SHUF filter... create the raw dataset instead of the packet table
    // hid_t table = H5PTcreate_fl(fid, "accs", recType, PT_CHUNKSIZE, 9);
    // if (table == H5I_INVALID_HID) panic("Could not create HDF5 packet table");
    hsize_t dims[1] = {0};
    hsize_t dims_chunk[1] = {PT_CHUNKSIZE};
    hsize_t maxdims[1] = {H5S_UNLIMITED};
    hid_t space_id = H5Screate_simple(1, dims, maxdims);

    hid_t plist_id = H5Pcreate(H5P_DATASET_CREATE);
    H5Pset_chunk(plist_id, 1, dims_chunk);
    H5Pset_shuffle(plist_id);
    H5Pset_deflate(plist_id, 9);

    hid_t table = H5Dcreate2(fid, "accs", recType, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT);
    if (table == H5I_INVALID_HID) panic("Could not create HDF5 dataset");
    H5Dclose(table);

    // info("%ld %ld %ld %ld", sizeof(PackedAccessRecord), size, offset, H5Tget_size(recType));
    assert(offset == size);
    assert(size == sizeof(PackedAccessRecord));

    hid_t ncAttr = H5Acreate2(fid, "numChildren", H5T_NATIVE_UINT, H5Screate(H5S_SCALAR), H5P_DEFAULT, H5P_DEFAULT);
    H5Awrite(ncAttr, H5T_NATIVE_UINT, &numChildren);
    H5Aclose(ncAttr);

    hid_t fAttr = H5Acreate2(fid, "finished", H5T_NATIVE_UINT, H5Screate(H5S_SCALAR), H5P_DEFAULT, H5P_DEFAULT);
    uint32_t finished = 0;
    H5Awrite(fAttr, H5T_NATIVE_UINT, &finished);
    H5Aclose(fAttr);

    H5Fclose(fid);

    // Initialize buffer
    buf = gm_calloc<PackedAccessRecord>(PT_CHUNKSIZE);
    cur = 0;
    max = PT_CHUNKSIZE;
    assert((uint32_t)(((char*) &buf[1]) - ((char*) &buf[0])) == sizeof(PackedAccessRecord));
}
Exemplo n.º 10
0
herr_t H5VLARRAYmake( hid_t loc_id,
                      const char *dset_name,
                      const char *obversion,
                      const int rank,
                      const hsize_t *dims,
                      hid_t type_id,
                      hsize_t chunk_size,
                      void  *fill_data,
                      int   compress,
                      char  *complib,
                      int   shuffle,
                      int   fletcher32,
                      const void *data)
{

    hvl_t   vldata;
    hid_t   dataset_id, space_id, datatype, tid1;
    hsize_t dataset_dims[1];
    hsize_t maxdims[1] = { H5S_UNLIMITED };
    hsize_t dims_chunk[1];
    hid_t   plist_id;
    unsigned int cd_values[6];

    if (data)
        /* if data, one row will be filled initially */
        dataset_dims[0] = 1;
    else
        /* no data, so no rows on dataset initally */
        dataset_dims[0] = 0;

    dims_chunk[0] = chunk_size;

    /* Fill the vldata estructure with the data to write */
    /* This is currectly not used */
    vldata.p = (void *)data;
    vldata.len = 1;        /* Only one array type to save */

    /* Create a VL datatype */
    if (rank == 0) {
        datatype = H5Tvlen_create(type_id);
    }
    else {
        tid1 = H5Tarray_create(type_id, rank, dims);
        datatype = H5Tvlen_create(tid1);
        H5Tclose( tid1 );   /* Release resources */
    }

    /* The dataspace */
    space_id = H5Screate_simple( 1, dataset_dims, maxdims );

    /* Modify dataset creation properties, i.e. enable chunking  */
    plist_id = H5Pcreate (H5P_DATASET_CREATE);
    if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
        return -1;

    /*
       Dataset creation property list is modified to use
    */

    /* Fletcher must be first */
    if (fletcher32) {
        if ( H5Pset_fletcher32( plist_id) < 0 )
            return -1;
    }
    /* Then shuffle (blosc shuffles inplace) */
    if (shuffle && (strcmp(complib, "blosc") != 0)) {
        if ( H5Pset_shuffle( plist_id) < 0 )
            return -1;
    }
    /* Finally compression */
    if (compress) {
        cd_values[0] = compress;
        cd_values[1] = (int)(atof(obversion) * 10);
        cd_values[2] = VLArray;
        /* The default compressor in HDF5 (zlib) */
        if (strcmp(complib, "zlib") == 0) {
            if ( H5Pset_deflate( plist_id, compress) < 0 )
                return -1;
        }
        /* The Blosc compressor does accept parameters */
        else if (strcmp(complib, "blosc") == 0) {
            cd_values[4] = compress;
            cd_values[5] = shuffle;
            if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
                return -1;
        }
        /* The LZO compressor does accept parameters */
        else if (strcmp(complib, "lzo") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        /* The bzip2 compress does accept parameters */
        else if (strcmp(complib, "bzip2") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        else {
            /* Compression library not supported */
            fprintf(stderr, "Compression library not supported\n");
            return -1;
        }
    }

    /* Create the dataset. */
    if ((dataset_id = H5Dcreate(loc_id, dset_name, datatype, space_id,
                                H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
        goto out;

    /* Write the dataset only if there is data to write */
    if (data)
        if ( H5Dwrite( dataset_id, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, &vldata ) < 0 )
            goto out;

    /* Terminate access to the data space. */
    if ( H5Sclose( space_id ) < 0 )
        return -1;

    /* Release the datatype in the case that it is not an atomic type */
    if ( H5Tclose( datatype ) < 0 )
        return -1;

    /* End access to the property list */
    if ( H5Pclose( plist_id ) < 0 )
        goto out;

    return dataset_id;

out:

    return -1;

}
Exemplo n.º 11
0
herr_t H5TBOmake_table( const char *table_title,
                        hid_t loc_id,
                        const char *dset_name,
                        char *version,
                        const char *class_,
                        hid_t type_id,
                        hsize_t nrecords,
                        hsize_t chunk_size,
                        void  *fill_data,
                        int compress,
                        char *complib,
                        int shuffle,
                        int fletcher32,
                        const void *data )
{

 hid_t   dataset_id;
 hid_t   space_id;
 hid_t   plist_id;
 hsize_t dims[1];
 hsize_t dims_chunk[1];
 hsize_t maxdims[1] = { H5S_UNLIMITED };
 unsigned int cd_values[7];
 int     blosc_compcode;
 char    *blosc_compname = NULL;

 dims[0]       = nrecords;
 dims_chunk[0] = chunk_size;

 /* Create a simple data space with unlimited size */
 if ( (space_id = H5Screate_simple( 1, dims, maxdims )) < 0 )
  return -1;

 /* Modify dataset creation properties, i.e. enable chunking  */
 plist_id = H5Pcreate (H5P_DATASET_CREATE);
 if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
  return -1;

 /* Set the fill value using a struct as the data type. */
 if ( fill_data)
   {
     if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
       return -1;
   }
 else {
   if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
     return -1;
 }

 /*
  Dataset creation property list is modified to use filters
  */

 /* Fletcher must be first */
 if (fletcher32) {
   if ( H5Pset_fletcher32( plist_id) < 0 )
     return -1;
 }
 /* Then shuffle (blosc shuffles inplace) */
 if ((shuffle && compress) && (strncmp(complib, "blosc", 5) != 0)) {
   if ( H5Pset_shuffle( plist_id) < 0 )
     return -1;
 }
 /* Finally compression */
 if ( compress )
 {
   cd_values[0] = compress;
   cd_values[1] = (int)(atof(version) * 10);
   cd_values[2] = Table;
   /* The default compressor in HDF5 (zlib) */
   if (strcmp(complib, "zlib") == 0) {
     if ( H5Pset_deflate( plist_id, compress) < 0 )
       return -1;
   }
   /* The Blosc compressor does accept parameters */
   else if (strcmp(complib, "blosc") == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
       return -1;
   }
   /* The Blosc compressor can use other compressors */
   else if (strncmp(complib, "blosc:", 6) == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     blosc_compname = complib + 6;
     blosc_compcode = blosc_compname_to_compcode(blosc_compname);
     cd_values[6] = blosc_compcode;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 7, cd_values) < 0 )
       return -1;
   }
   /* The LZO compressor does accept parameters */
   else if (strcmp(complib, "lzo") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   /* The bzip2 compress does accept parameters */
   else if (strcmp(complib, "bzip2") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   else {
     /* Compression library not supported */
     return -1;
   }

 }

 /* Create the dataset. */
 if ( (dataset_id = H5Dcreate( loc_id, dset_name, type_id, space_id,
                               H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
  goto out;

 /* Only write if there is something to write */
 if ( data )
 {

 /* Write data to the dataset. */
 if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
  goto out;

 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  goto out;

 /* End access to the property list */
 if ( H5Pclose( plist_id ) < 0 )
  goto out;

 /* Return the object unique ID for future references */
 return dataset_id;

/* error zone, gracefully close */
out:
 H5E_BEGIN_TRY {
  H5Dclose(dataset_id);
  H5Sclose(space_id);
  H5Pclose(plist_id);
 } H5E_END_TRY;
 return -1;

}