static ndio_hdf5_t set_deflate(ndio_hdf5_t self) { hid_t out; HTRY(H5Pset_deflate(out=dataset_creation_properties(self),0)); // use lowest level return self; Error: return 0; }
void ls2_hdf5_write_locbased(const char *filename, const vector2 *anchors, const size_t no_anchors, float **results, const uint16_t width, const uint16_t height) { hid_t file_id, grp, dataset, dataspace, plist_id; hsize_t dims[2]; hsize_t chunk_dims[2] = { width, height }; file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); grp = H5Gcreate(file_id, "/Result", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); ls2_hdf_write_anchors(file_id, anchors, no_anchors); dims[0] = height; dims[1] = width; for (ls2_output_variant k = 0; k < NUM_VARIANTS; k++) { if (results[k] == NULL) continue; char name[256]; dataspace = H5Screate_simple(2, dims, NULL); plist_id = H5Pcreate(H5P_DATASET_CREATE); H5Pset_chunk(plist_id, 2, chunk_dims); H5Pset_deflate (plist_id, 9); snprintf(name, 256, "/Result/%s", ls2_hdf5_variant_name(k)); dataset = H5Dcreate(file_id, name, H5T_NATIVE_FLOAT, dataspace, H5P_DEFAULT, plist_id, H5P_DEFAULT); H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, results[k]); H5Sclose(dataspace); H5Dclose(dataset); } H5Gclose(grp); H5Fclose(file_id); }
extern "C" bool write_h5(char* fname, dtype* data_array){ #ifdef _HDF5_H hid_t out_type_id = H5T_NATIVE_FLOAT; hid_t file_id = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); hid_t dataspace_id, dataset_id; int ndims = 2; hsize_t dimsH[2]; dimsH[0] = 1679; dimsH[1] = 1475; hid_t gid2 = H5Gcreate1(file_id,"data",0); dataspace_id = H5Screate_simple(ndims, dimsH, NULL); bool compress = false; hid_t dcpl = H5Pcreate (H5P_DATASET_CREATE); hsize_t chunk[2] = {64, 64}; if (compress) { H5Pset_deflate (dcpl, 9); H5Pset_chunk (dcpl, 2, chunk); } dataset_id = H5Dcreate(file_id, "/data/data", out_type_id, dataspace_id, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite(dataset_id, out_type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_array); H5Dclose(dataset_id); H5Sclose(dataspace_id); H5Gclose(gid2); H5Fclose(file_id); return true; #endif return false; }
/*------------------------------------------------------------------------- * Function: gent_compressed * * Purpose: Generate a compressed dataset in LOC_ID * *------------------------------------------------------------------------- */ static void gent_compressed(hid_t loc_id) { hid_t sid, did, pid; hsize_t dims[1] = {6}; hsize_t chunk_dims[1] = {2}; int buf[6] = {1,2,3,4,5,6}; /* create dataspace */ sid = H5Screate_simple(1, dims, NULL); /* create property plist for chunk*/ pid = H5Pcreate(H5P_DATASET_CREATE); H5Pset_chunk(pid, 1, chunk_dims); /* set the deflate filter */ #if defined (H5_HAVE_FILTER_DEFLATE) H5Pset_deflate(pid, 1); #endif /* create dataset */ did = H5Dcreate2(loc_id, DATASET_COMPRESSED, H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT); /* write */ H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf); /* close */ H5Sclose(sid); H5Dclose(did); H5Pclose(pid); }
//-------------------------------------------------------------------------- // Function: DSetCreatPropList::setDeflate ///\brief Sets compression method and compression level ///\param level - IN: Compression level, should [0..9], inclusive ///\exception H5::PropListIException ///\par Description /// The function sets the compression method for this property /// list to \c H5D_COMPRESS_DEFLATE and the compression level to /// \a level. Lower compression levels are faster but result in /// less compression. // Programmer Binh-Minh Ribler - 2000 //-------------------------------------------------------------------------- void DSetCreatPropList::setDeflate( int level ) const { herr_t ret_value = H5Pset_deflate( id, level ); if( ret_value < 0 ) { throw PropListIException("DSetCreatPropList::setDeflate", "H5Pset_deflate failed"); } }
/*------------------------------------------------------------------------- * Function: main * * Purpose: * * Return: Success: * * Failure: * * Programmer: Quincey Koziol * Thursday, November 14, 2002 * * Modifications: * *------------------------------------------------------------------------- */ int main(void) { hid_t file, space, dset, dcpl; hsize_t dims[SPACE_RANK]={SPACE_DIM1,SPACE_DIM2}; hsize_t chunk_dims[SPACE_RANK]={CHUNK_DIM1,CHUNK_DIM2}; size_t i,j; /* Local index variables */ /* Initialize the data */ /* (Try for something easily compressible) */ for(i=0; i<SPACE_DIM1; i++) for(j=0; j<SPACE_DIM2; j++) data[i][j] = (int)(j % 5); /* Create the file */ file = H5Fcreate(TESTFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); if(file<0) printf("file<0!\n"); /* Create the dataspace */ space = H5Screate_simple(SPACE_RANK, dims, NULL); if(space<0) printf("space<0!\n"); /* Create the dataset creation property list */ dcpl = H5Pcreate(H5P_DATASET_CREATE); if(dcpl<0) printf("dcpl<0!\n"); /* Set up for deflated data */ if(H5Pset_chunk(dcpl, 2, chunk_dims)<0) printf("H5Pset_chunk() failed!\n"); if(H5Pset_deflate(dcpl, 9)<0) printf("H5Pset_deflate() failed!\n"); /* Create the compressed dataset */ dset = H5Dcreate2(file, "Dataset1", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT); if(dset<0) printf("dset<0!\n"); /* Write the data to the dataset */ if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data)<0) printf("H5Dwrite() failed!\n"); /* Close everything */ if(H5Pclose(dcpl)<0) printf("H5Pclose() failed!\n"); if(H5Dclose(dset)<0) printf("H5Dclose() failed!\n"); if(H5Sclose(space)<0) printf("H5Sclose() failed!\n"); if(H5Fclose(file)<0) printf("H5Fclose() failed!\n"); return 0; }
void HDF5Output::open(const std::string& filename) { file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); sid = H5Tcreate(H5T_COMPOUND, sizeof(OutputRow)); H5Tinsert(sid, "D", HOFFSET(OutputRow, D), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "z", HOFFSET(OutputRow, z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN", HOFFSET(OutputRow, SN), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID", HOFFSET(OutputRow, ID), H5T_NATIVE_INT32); H5Tinsert(sid, "E", HOFFSET(OutputRow, E), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X", HOFFSET(OutputRow, X), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y", HOFFSET(OutputRow, Y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z", HOFFSET(OutputRow, Z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Px", HOFFSET(OutputRow, Px), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Py", HOFFSET(OutputRow, Py), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Pz", HOFFSET(OutputRow, Pz), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN0", HOFFSET(OutputRow, SN0), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID0", HOFFSET(OutputRow, ID0), H5T_NATIVE_INT32); H5Tinsert(sid, "E0", HOFFSET(OutputRow, E0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X0", HOFFSET(OutputRow, X0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y0", HOFFSET(OutputRow, Y0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z0", HOFFSET(OutputRow, Z0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0x", HOFFSET(OutputRow, P0x), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0y", HOFFSET(OutputRow, P0y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0z", HOFFSET(OutputRow, P0z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN1", HOFFSET(OutputRow, SN1), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID1", HOFFSET(OutputRow, ID1), H5T_NATIVE_INT32); H5Tinsert(sid, "E1", HOFFSET(OutputRow, E1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X1", HOFFSET(OutputRow, X1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y1", HOFFSET(OutputRow, Y1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z1", HOFFSET(OutputRow, Z1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1x", HOFFSET(OutputRow, P1x), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1y", HOFFSET(OutputRow, P1y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1z", HOFFSET(OutputRow, P1z), H5T_NATIVE_DOUBLE); // chunked prop hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); hsize_t chunk_dims[RANK] = {BUFFER_SIZE}; H5Pset_chunk(plist, RANK, chunk_dims); H5Pset_deflate(plist, 5); hsize_t dims[RANK] = {0}; hsize_t max_dims[RANK] = {H5S_UNLIMITED}; dataspace = H5Screate_simple(RANK, dims, max_dims); dset = H5Dcreate2(file, "CRPROPA3", sid, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); buffer.reserve(BUFFER_SIZE); }
void DCDataSet::setCompression() throw (DCException) { if (this->compression && getPhysicalSize().getScalarSize() != 0) { // shuffling reorders bytes for better compression // set gzip compression level (1=lowest - 9=highest) if (H5Pset_shuffle(this->dsetProperties) < 0 || H5Pset_deflate(this->dsetProperties, 1) < 0) throw DCException(getExceptionString("setCompression: Failed to set compression")); } }
herr_t data_to_h5( const std::vector<T>& str_vec, hid_t group_id, const std::string& dataset_name, bool release_type, uint compression_level = 6 ) { herr_t status; hsize_t dims[1] = {str_vec.size()}; // create the propery which allows for compression hid_t prop_id = H5Pcreate(H5P_DATASET_CREATE); // chunk size is same size as vector status = H5Pset_chunk(prop_id, 1, dims); assert( status >= 0 ); status = H5Pset_deflate(prop_id, compression_level); assert( status >= 0 ); // create the data type hid_t datatype_id = get_datatype_id(str_vec); // create the dataspace hid_t dataspace_id = H5Screate_simple(1, dims, NULL); // create the dataset hid_t dataset_id = H5Dcreate(group_id, dataset_name.c_str(), datatype_id, dataspace_id, H5P_DEFAULT, prop_id, H5P_DEFAULT); // get the ptrs from the string and write out auto ptr = vec_to_ptr(str_vec); status = H5Dwrite(dataset_id, datatype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, ptr); assert( status >= 0 ); status = H5Pclose(prop_id); assert( status >= 0 ); status = H5Dclose(dataset_id); assert( status >= 0 ); status = H5Sclose(dataspace_id); assert( status >= 0 ); if (release_type) { status = H5Tclose(datatype_id); assert( status >= 0 ); delete [] ptr; } return status; }
/** * Create the dataset. * @throw ios::failure * * @param [in] DatasetName - Dataset name * @param [in] DimensionSizes - Dataset dimension sizes * @param [in] ChunkSizes - 3D Chunk size * @param [in] CompressionLevel - Compression level * @return Dataset_id */ hid_t THDF5_File::CreateFloatDataset(const char * DatasetName, const TDimensionSizes & DimensionSizes, const TDimensionSizes & ChunkSizes, const int CompressionLevel){ const int RANK = 3; hsize_t Dims [RANK] = {DimensionSizes.Z, DimensionSizes.Y, DimensionSizes.X}; hsize_t Chunk[RANK] = {ChunkSizes.Z, ChunkSizes.Y, ChunkSizes.X}; hid_t Property_list; herr_t Status; hid_t dataspace_id = H5Screate_simple (RANK, Dims, NULL); // set chunk size Property_list = H5Pcreate (H5P_DATASET_CREATE); Status = H5Pset_chunk(Property_list, RANK, Chunk); if (Status < 0 ){ char ErrorMessage[256]; sprintf(ErrorMessage,HDF5_ERR_FMT_DatasetNotOpened,FileName.c_str(), DatasetName); throw ios::failure(ErrorMessage); } // set compression level Status = H5Pset_deflate (Property_list, CompressionLevel); if (Status < 0 ){ char ErrorMessage[256]; sprintf(ErrorMessage,HDF5_ERR_FMT_CouldNotSetCompression,FileName.c_str(), DatasetName, CompressionLevel); throw ios::failure(ErrorMessage); } // create dataset hid_t HDF5_dataset_id = H5Dcreate (HDF5_FileId, DatasetName, H5T_NATIVE_FLOAT, dataspace_id, H5P_DEFAULT, Property_list, H5P_DEFAULT); if (HDF5_dataset_id == H5I_INVALID_HID ){ char ErrorMessage[256]; sprintf(ErrorMessage,HDF5_ERR_FMT_DatasetNotOpened,FileName.c_str(), DatasetName); throw ios::failure(ErrorMessage); } H5Pclose (Property_list); return HDF5_dataset_id; }// end of CreateDataset
/** Create a new 1D dataset. Make it extensible. */ hid_t HDF5DataWriter::create_dataset(hid_t parent_id, string name) { herr_t status; hsize_t dims[1] = {0}; hsize_t maxdims[] = {H5S_UNLIMITED}; hsize_t chunk_dims[] = {chunkSize_}; hid_t chunk_params = H5Pcreate(H5P_DATASET_CREATE); status = H5Pset_chunk(chunk_params, 1, chunk_dims); assert( status >= 0 ); if (compressor_ == "zlib"){ status = H5Pset_deflate(chunk_params, compression_); } else if (compressor_ == "szip"){ // this needs more study unsigned sz_opt_mask = H5_SZIP_NN_OPTION_MASK; status = H5Pset_szip(chunk_params, sz_opt_mask, HDF5WriterBase::CHUNK_SIZE); } hid_t dataspace = H5Screate_simple(1, dims, maxdims); hid_t dataset_id = H5Dcreate2(parent_id, name.c_str(), H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, chunk_params, H5P_DEFAULT); return dataset_id; }
//-***************************************************************************** //-***************************************************************************** // GZIP COMPRESSION FOR DATASETS //-***************************************************************************** //-***************************************************************************** hid_t DsetGzipCreatePlist( const Dimensions &dims, int level ) { herr_t status; hid_t ID = H5Pcreate( H5P_DATASET_CREATE ); ABCA_ASSERT( ID >= 0, "DsetGzipCreatePlist: H5Pcreate failed" ); // Chunking. HDimensions hdims( dims ); status = H5Pset_chunk( ID, hdims.rank(), hdims.rootPtr() ); ABCA_ASSERT( status >= 0, "DsetGzipCreatePlist: " "H5Pset_chunk() failed" ); level = level < 0 ? 0 : level > 9 ? 9 : level; status = H5Pset_deflate( ID, ( unsigned int )level ); ABCA_ASSERT( status >= 0, "DsetGzipCreatePlist: " "H5Pset_link_creation_order() failed" ); return ID; }
void fh5_prepare_write_(int *ndims,int *dims,int *hdferr) { extern hid_t fileid, dsetid, dspcid, mspcid, propid; int i,j; herr_t herr; hsize_t dimsc[7] = {1,1,1,1,1,1,1}; hsize_t maxdims[7] = {1,1,1,1,1,1,1}; hsize_t chunk_size[7] = {0,0,0,0,0,0,0}; for (i = 0; i < *ndims; i++) { /* reverse dimensions for fortan */ j = *ndims-i-1; dimsc[i] = dims[j]; chunk_size[i] = dims[j]; maxdims[i] = dims[j]; } /* Create the data space for the dataset. */ mspcid = H5Screate_simple(*ndims, dimsc, maxdims); //printf("fh5_prepw - create 1: %d\n",mspcid); /* Create properties for gzip compression.*/ propid = H5Pcreate(H5P_DATASET_CREATE); //printf("fh5_prepw - propid: %d\n",propid); herr = H5Pset_chunk (propid, *ndims, chunk_size); herr = H5Pset_shuffle(propid); herr = H5Pset_deflate(propid, 5); *hdferr = herr; //printf("fh5_prepw - compress: %d\n",mspcid); return; }
/*------------------------------------------------------------------------- * Function: test_skip_compress_write2 * * Purpose: Test skipping compression filter when there are three filters * for the dataset * * Return: Success: 0 * * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 * *------------------------------------------------------------------------- */ static int test_skip_compress_write2(hid_t file) { hid_t dataspace = -1, dataset = -1; hid_t mem_space = -1; hid_t cparms = -1, dxpl = -1; hsize_t dims[2] = {NX, NY}; hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY}; herr_t status; int i, j, n; unsigned filter_mask = 0; int origin_direct_buf[CHUNK_NX][CHUNK_NY]; int direct_buf[CHUNK_NX][CHUNK_NY]; int check_chunk[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); int aggression = 9; /* Compression aggression setting */ hsize_t start[2]; /* Start of hyperslab */ hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ TESTING("skipping compression filters but keep two other filters"); /* * Create the data space with unlimited dimensions. */ if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0) goto error; if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0) goto error; /* * Modify dataset creation properties, i.e. enable chunking and compression. * The order of filters is bogus 1 + deflate + bogus 2. */ if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0) goto error; /* Register and enable first bogus filter */ if(H5Zregister (H5Z_BOGUS1) < 0) goto error; if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS1, 0, (size_t)0, NULL) < 0) goto error; /* Enable compression filter */ if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0) goto error; /* Register and enable second bogus filter */ if(H5Zregister (H5Z_BOGUS2) < 0) goto error; if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS2, 0, (size_t)0, NULL) < 0) goto error; /* * Create a new dataset within the file using cparms * creation properties. */ if((dataset = H5Dcreate2(file, DATASETNAME3, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) goto error; if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error; /* Initialize data for one chunk. Apply operations of two bogus filters to the chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) { origin_direct_buf[i][j] = n++; direct_buf[i][j] = (origin_direct_buf[i][j] + ADD_ON) * FACTOR; } /* write the uncompressed chunk data repeatedly to dataset, using the direct writing function. * Indicate skipping the compression filter but keep the other two bogus filters */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY; /* compression filter is the middle one to be skipped */ filter_mask = 0x00000002; if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0) goto error; if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) goto error; if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0) goto error; /* * Select hyperslab for one chunk in the file */ start[0] = CHUNK_NX; start[1] = CHUNK_NY; stride[0] = 1; stride[1] = 1; count[0] = 1; count[1] = 1; block[0] = CHUNK_NX; block[1] = CHUNK_NY; if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; /* Read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; /* Check that the values read are the same as the values written */ for(i = 0; i < CHUNK_NX; i++) { for(j = 0; j < CHUNK_NY; j++) { if(origin_direct_buf[i][j] != check_chunk[i][j]) { printf(" 1. Read different values than written."); printf(" At index %d,%d\n", i, j); printf(" origin_direct_buf=%d, check_chunk=%d\n", origin_direct_buf[i][j], check_chunk[i][j]); goto error; } } } /* * Close/release resources. */ H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); PASSED(); return 0; error: H5E_BEGIN_TRY { H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); } H5E_END_TRY; return 1; }
static int test_direct_chunk_write (hid_t file) { hid_t dataspace = -1, dataset = -1; hid_t mem_space = -1; hid_t cparms = -1, dxpl = -1; hsize_t dims[2] = {NX, NY}; hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY}; herr_t status; int ret; int data[NX][NY]; int i, j, n; unsigned filter_mask = 0; int direct_buf[CHUNK_NX][CHUNK_NY]; int check_chunk[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); const Bytef *z_src = (const Bytef*)(direct_buf); Bytef *z_dst; /*destination buffer */ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size); uLong z_src_nbytes = (uLong)buf_size; int aggression = 9; /* Compression aggression setting */ void *outbuf = NULL; /* Pointer to new buffer */ hsize_t start[2]; /* Start of hyperslab */ hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ TESTING("basic functionality of H5DOwrite_chunk"); /* * Create the data space with unlimited dimensions. */ if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0) goto error; if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0) goto error; /* * Modify dataset creation properties, i.e. enable chunking and compression */ if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0) goto error; if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0) goto error; /* * Create a new dataset within the file using cparms * creation properties. */ if((dataset = H5Dcreate2(file, DATASETNAME1, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) goto error; /* Initialize the dataset */ for(i = n = 0; i < NX; i++) for(j = 0; j < NY; j++) data[i][j] = n++; if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error; /* * Write the data for the dataset. It should stay in the chunk cache. * It will be evicted from the cache by the H5DOwrite_chunk calls. */ if((status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, data)) < 0) goto error; /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) direct_buf[i][j] = n++; /* Allocate output (compressed) buffer */ outbuf = HDmalloc(z_dst_nbytes); z_dst = (Bytef *)outbuf; /* Perform compression from the source to the destination buffer */ ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression); /* Check for various zlib errors */ if(Z_BUF_ERROR == ret) { fprintf(stderr, "overflow"); goto error; } else if(Z_MEM_ERROR == ret) { fprintf(stderr, "deflate memory error"); goto error; } else if(Z_OK != ret) { fprintf(stderr, "other deflate error"); goto error; } /* Write the compressed chunk data repeatedly to cover all the chunks in the * dataset, using the direct writing function. */ for(i=0; i<NX/CHUNK_NX; i++) { for(j=0; j<NY/CHUNK_NY; j++) { status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf); offset[1] += CHUNK_NY; } offset[0] += CHUNK_NX; offset[1] = 0; } if(outbuf) HDfree(outbuf); if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) goto error; if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0) goto error; /* * Select hyperslab for one chunk in the file */ start[0] = CHUNK_NX; start[1] = CHUNK_NY; stride[0] = 1; stride[1] = 1; count[0] = 1; count[1] = 1; block[0] = CHUNK_NX; block[1] = CHUNK_NY; if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; /* Read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; /* Check that the values read are the same as the values written */ for(i = 0; i < CHUNK_NX; i++) { for(j = 0; j < CHUNK_NY; j++) { if(direct_buf[i][j] != check_chunk[i][j]) { printf(" 1. Read different values than written."); printf(" At index %d,%d\n", i, j); printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); goto error; } } } /* Reinitialize different data for one chunk */ for(i = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) direct_buf[i][j] = i + j; /* Allocate output (compressed) buffer */ outbuf = HDmalloc(z_dst_nbytes); z_dst = (Bytef *)outbuf; /* Perform compression from the source to the destination buffer */ ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression); /* Check for various zlib errors */ if(Z_BUF_ERROR == ret) { fprintf(stderr, "overflow"); goto error; } else if(Z_MEM_ERROR == ret) { fprintf(stderr, "deflate memory error"); goto error; } else if(Z_OK != ret) { fprintf(stderr, "other deflate error"); goto error; } /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the * dataset, using the direct writing function. */ offset[0] = offset[1] = 0; for(i=0; i<NX/CHUNK_NX; i++) { for(j=0; j<NY/CHUNK_NY; j++) { status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf); offset[1] += CHUNK_NY; } offset[0] += CHUNK_NX; offset[1] = 0; } if(outbuf) HDfree(outbuf); if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) goto error; if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0) goto error; /* Read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; /* Check that the values read are the same as the values written */ for(i = 0; i < CHUNK_NX; i++) { for(j = 0; j < CHUNK_NY; j++) { if(direct_buf[i][j] != check_chunk[i][j]) { printf(" 2. Read different values than written."); printf(" At index %d,%d\n", i, j); printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); goto error; } } } /* * Close/release resources. */ H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); PASSED(); return 0; error: H5E_BEGIN_TRY { H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); } H5E_END_TRY; if(outbuf) HDfree(outbuf); return 1; }
void HpData::writeH5(hid_t &file_id, string group_name) { herr_t status; hid_t group_id = H5CreateOrOpenGroup(file_id, group_name); hid_t dataset_id; hid_t dataspace_id; vector<unsigned int> coord_buf(2,0); // Write the region origin & dimensions hsize_t region_dims[1]; // origin coord_buf[0] = origin_.first; coord_buf[1] = origin_.second; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_origin", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // dim coord_buf[0] = dim_.first; coord_buf[1] = dim_.second; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); unsigned int n_row = 0; unsigned int n_col = 0; if(hp_count_.begin() != hp_count_.end()) { n_row = hp_count_.begin()->second.size(); n_col = (n_row > 0) ? hp_count_.begin()->second[0].size() : 0; } // dim coord_buf[0] = n_row; coord_buf[1] = n_col; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "hp_data_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // now write the per-nuc error tables vector<uint64_t> hp_table_buf; map< char, vector< vector<uint64_t> > >::iterator it; for(it = hp_count_.begin(); it != hp_count_.end(); ++it) { string nuc; stringstream ss; ss << it->first; ss >> nuc; LoadHpDataBuffer(n_col,n_row,hp_table_buf,it->second); // Create compressed dataset hsize_t dims[2]; dims[0] = n_row; dims[1] = n_col; dataspace_id = H5Screate_simple (2, dims, NULL); hid_t plist_id = H5Pcreate (H5P_DATASET_CREATE); hsize_t cdims[2]; cdims[0] = min(n_row,(unsigned int) 20); cdims[1] = min(n_col,(unsigned int) 20); status = H5Pset_chunk (plist_id, 2, cdims); status = H5Pset_deflate (plist_id, 9); dataset_id = H5Dcreate2 (group_id, nuc.c_str(), H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, plist_id, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &hp_table_buf[0]); status = H5Dclose (dataset_id); status = H5Pclose (plist_id); status = H5Sclose (dataspace_id); } status = H5Gclose (group_id); }
hid_t H5PTcreate_fl ( hid_t loc_id, const char *dset_name, hid_t dtype_id, hsize_t chunk_size, int compression ) { htbl_t * table = NULL; hid_t dset_id = H5I_BADID; hid_t space_id = H5I_BADID; hid_t plist_id = H5I_BADID; hsize_t dims[1]; hsize_t dims_chunk[1]; hsize_t maxdims[1]; hid_t ret_value; /* Register the packet table ID type if this is the first table created */ if(H5PT_ptable_id_type < 0) if((H5PT_ptable_id_type = H5Iregister_type((size_t)H5PT_HASH_TABLE_SIZE, 0, (H5I_free_t)free)) < 0) goto out; /* Get memory for the table identifier */ table = (htbl_t *)malloc(sizeof(htbl_t)); /* Create a simple data space with unlimited size */ dims[0] = 0; dims_chunk[0] = chunk_size; maxdims[0] = H5S_UNLIMITED; if((space_id = H5Screate_simple(1, dims, maxdims)) < 0) goto out; /* Modify dataset creation properties to enable chunking */ plist_id = H5Pcreate(H5P_DATASET_CREATE); if(H5Pset_chunk(plist_id, 1, dims_chunk) < 0) goto out; if(compression >= 0 && compression <= 9) if(H5Pset_deflate(plist_id, (unsigned)compression) < 0) goto out; /* Create the dataset. */ if((dset_id = H5Dcreate2(loc_id, dset_name, dtype_id, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) goto out; /* Terminate access to the data space. */ if(H5Sclose(space_id) < 0) goto out; /* End access to the property list */ if(H5Pclose(plist_id) < 0) goto out; /* Create the table identifier */ table->dset_id = dset_id; if((table->type_id = H5Tcopy(dtype_id)) < 0) goto out; H5PT_create_index(table); table->size = 0; /* Get an ID for this table */ ret_value = H5Iregister(H5PT_ptable_id_type, table); if(ret_value != H5I_INVALID_HID) H5PT_ptable_count++; else H5PT_close(table); return ret_value; out: H5E_BEGIN_TRY H5Sclose(space_id); H5Pclose(plist_id); H5Dclose(dset_id); if(table) free(table); H5E_END_TRY return H5I_INVALID_HID; }
void RegionalSummary::writeH5(hid_t &file_id, string group_name) { herr_t status; hid_t group_id = H5CreateOrOpenGroup(file_id, group_name); hid_t dataset_id; hid_t dataspace_id; hsize_t h5_dims[1]; // region_origin vector<unsigned int> coord_buf(2,0); coord_buf[0] = origin_.first; coord_buf[1] = origin_.second; h5_dims[0] = 2; dataspace_id = H5Screate_simple (1, h5_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_origin", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // region_dim coord_buf[0] = dim_.first; coord_buf[1] = dim_.second; h5_dims[0] = 2; dataspace_id = H5Screate_simple (1, h5_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // n_err_ h5_dims[0] = 1; dataspace_id = H5Screate_simple (1, h5_dims, NULL); dataset_id = H5Dcreate2 (group_id, "n_err", H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &n_err_); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // n_aligned_ h5_dims[0] = 1; dataspace_id = H5Screate_simple (1, h5_dims, NULL); dataset_id = H5Dcreate2 (group_id, "n_aligned", H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &n_aligned_); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // data_dim AssertDims(); coord_buf[0] = n_flow_; coord_buf[1] = max_hp_; h5_dims[0] = 2; dataspace_id = H5Screate_simple (1, h5_dims, NULL); dataset_id = H5Dcreate2 (group_id, "data_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &coord_buf[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // hp_count_ and hp_err_ hsize_t dims[2]; dims[0] = n_flow_; dims[1] = max_hp_; vector<uint64_t> data_buf; hsize_t cdims[2]; hid_t plist_id; // hp_count_ LoadDataBuffer(max_hp_,n_flow_,data_buf,hp_count_); dataspace_id = H5Screate_simple (2, dims, NULL); plist_id = H5Pcreate (H5P_DATASET_CREATE); cdims[0] = min(n_flow_,(unsigned int) 20); cdims[1] = min(max_hp_,(unsigned int) 20); status = H5Pset_chunk (plist_id, 2, cdims); status = H5Pset_deflate (plist_id, 9); dataset_id = H5Dcreate2 (group_id, "hp_count", H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, plist_id, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data_buf[0]); status = H5Dclose (dataset_id); status = H5Pclose (plist_id); status = H5Sclose (dataspace_id); // hp_err_ LoadDataBuffer(max_hp_,n_flow_,data_buf,hp_err_); dataspace_id = H5Screate_simple (2, dims, NULL); plist_id = H5Pcreate (H5P_DATASET_CREATE); cdims[0] = min(n_flow_,(unsigned int) 20); cdims[1] = min(max_hp_,(unsigned int) 20); status = H5Pset_chunk (plist_id, 2, cdims); status = H5Pset_deflate (plist_id, 9); dataset_id = H5Dcreate2 (group_id, "hp_err", H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, plist_id, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data_buf[0]); status = H5Dclose (dataset_id); status = H5Pclose (plist_id); status = H5Sclose (dataspace_id); status = H5Gclose (group_id); }
int main (void) { hid_t file; hid_t grp; hid_t dataset, dataspace; hid_t plist; herr_t status; hsize_t dims[2]; hsize_t cdims[2]; int idx_f, idx_g; /* * Create a file. */ file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); /* * Create a group in the file. */ grp = H5Gcreate(file, "/Data", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* * Create dataset "Compressed Data" in the group using absolute * name. Dataset creation property list is modified to use * GZIP compression with the compression effort set to 6. * Note that compression can be used only when dataset is chunked. */ dims[0] = 1000; dims[1] = 20; cdims[0] = 20; cdims[1] = 20; dataspace = H5Screate_simple(RANK, dims, NULL); plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_chunk(plist, 2, cdims); H5Pset_deflate( plist, 6); dataset = H5Dcreate(file, "/Data/Compressed_Data", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT); /* * Close the first dataset . */ H5Sclose(dataspace); H5Dclose(dataset); /* * Create the second dataset. */ dims[0] = 500; dims[1] = 20; dataspace = H5Screate_simple(RANK, dims, NULL); dataset = H5Dcreate(file, "/Data/Float_Data", H5T_NATIVE_FLOAT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* *Close the second dataset and file. */ H5Sclose(dataspace); H5Dclose(dataset); H5Pclose(plist); H5Gclose(grp); H5Fclose(file); /* * Now reopen the file and group in the file. */ file = H5Fopen(H5FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT); grp = H5Gopen(file, "Data", H5P_DEFAULT); /* * Access "Compressed_Data" dataset in the group. */ dataset = H5Dopen(grp, "Compressed_Data", H5P_DEFAULT); if( dataset < 0) printf(" Dataset 'Compressed-Data' is not found. \n"); printf("\"/Data/Compressed_Data\" dataset is open \n"); /* * Close the dataset. */ status = H5Dclose(dataset); /* * Create hard link to the Data group. */ status = H5Lcreate_hard(file, "Data", H5L_SAME_LOC, "Data_new", H5P_DEFAULT, H5P_DEFAULT); /* * We can access "Compressed_Data" dataset using created * hard link "Data_new". */ dataset = H5Dopen(file, "/Data_new/Compressed_Data", H5P_DEFAULT); if( dataset < 0) printf(" Dataset is not found. \n"); printf("\"/Data_new/Compressed_Data\" dataset is open \n"); /* * Close the dataset. */ status = H5Dclose(dataset); /* * Use iterator to see the names of the objects in the root group. */ idx_f = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL); /* * Unlink name "Data" and use iterator to see the names * of the objects in the file root direvtory. */ if(H5Ldelete(file, "Data", H5P_DEFAULT) < 0) printf(" H5Ldelete failed \n"); else printf("\"Data\" is unlinked \n"); idx_f = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL); /* * Use iterator to see the names of the objects in the group * /Data_new. */ idx_g = H5Literate_by_name(grp, "/Data_new", H5_INDEX_NAME, H5_ITER_INC, NULL, group_info, NULL, H5P_DEFAULT); /* * Close the file. */ H5Gclose(grp); H5Fclose(file); return 0; }
void ErrorData::writeH5(hid_t &file_id, string group_name) { herr_t status; hid_t group_id = H5CreateOrOpenGroup(file_id, group_name); hid_t dataset_id; hid_t dataspace_id; vector<unsigned int> buf32; // Write the region origin & dimensions buf32.resize(2); hsize_t region_dims[1]; // origin buf32[0] = region_origin_.first; buf32[1] = region_origin_.second; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_origin", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf32[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // dim buf32[0] = region_dim_.first; buf32[1] = region_dim_.second; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "region_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf32[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // error_data_dim unsigned int n_row=5; unsigned int n_col=ins_.Size(); buf32[0] = n_row; buf32[1] = n_col; region_dims[0] = 2; dataspace_id = H5Screate_simple (1, region_dims, NULL); dataset_id = H5Dcreate2 (group_id, "error_data_dim", H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf32[0]); status = H5Dclose (dataset_id); status = H5Sclose (dataspace_id); // make buffer for Nx5 matrix of error data to write vector<uint64_t> buf64; LoadErrorDataBuffer(n_col,n_row,buf64); // Create compressed dataset hsize_t dims[2]; dims[0] = n_row; dims[1] = n_col; dataspace_id = H5Screate_simple (2, dims, NULL); hid_t plist_id = H5Pcreate (H5P_DATASET_CREATE); hsize_t cdims[2]; cdims[0] = min(n_row,(unsigned int) 20); cdims[1] = min(n_col,(unsigned int) 100); status = H5Pset_chunk (plist_id, 2, cdims); status = H5Pset_deflate (plist_id, 9); dataset_id = H5Dcreate2 (group_id, "error_data", H5T_NATIVE_UINT_LEAST64, dataspace_id, H5P_DEFAULT, plist_id, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_UINT_LEAST64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf64[0]); status = H5Dclose (dataset_id); status = H5Pclose (plist_id); status = H5Sclose (dataspace_id); status = H5Gclose (group_id); }
/***************************************************************************** This function generates attributes, groups, and datasets of many types. Parameters: fname: file_name. ngrps: number of top level groups. ndsets: number of datasets. attrs: number of attributes. nrow: number of rows in a dataset. chunk: chunk size (single number). vlen: max vlen size. comp: use latest format. latest: use gzip comnpression. Return: Non-negative on success/Negative on failure Programmer: Peter Cao <*****@*****.**>, Jan. 2013 ****************************************************************************/ herr_t create_perf_test_file(const char *fname, int ngrps, int ndsets, int nattrs, hsize_t nrows, hsize_t dim0, hsize_t chunk, int vlen, int compressed, int latest) { int i, j, k; hid_t fid, sid_null, sid_scalar, sid_1d, sid_2d, did, aid, sid_2, sid_large, fapl=H5P_DEFAULT, dcpl=H5P_DEFAULT, gid1, gid2, cmp_tid, tid_str, tid_enum, tid_array_f, tid_vlen_i, tid_vlen_s; char name[32], tmp_name1[32], tmp_name2[32], tmp_name3[32]; hsize_t dims[1]={dim0}, dims2d[2]={dim0, (dim0/4+1)}, dims_array[1]={FIXED_LEN}, dim1[1]={2}; char *enum_names[4] = {"SOLID", "LIQUID", "GAS", "PLASMA"}; test_comp_t *buf_comp=NULL, *buf_comp_large=NULL; int *buf_int=NULL; float (*buf_float_a)[FIXED_LEN]=NULL; double **buf_double2d=NULL; hvl_t *buf_vlen_i=NULL; char (*buf_str)[FIXED_LEN]; char **buf_vlen_s=NULL; hobj_ref_t buf_ref[2]; hdset_reg_ref_t buf_reg_ref[2]; size_t offset, len; herr_t status; char *names[NTYPES] = { "int", "ulong", "float", "double", "fixed string", "enum", "fixed float array", "vlen int array", "vlen strings"}; hid_t types[NTYPES] = { H5T_NATIVE_INT, H5T_NATIVE_UINT64, H5T_NATIVE_FLOAT, H5T_NATIVE_DOUBLE, tid_str, tid_enum, tid_array_f, tid_vlen_i, tid_vlen_s}; hsize_t coords[4][2] = { {0, 1}, {3, 5}, {1, 0}, {2, 4}}, start=0, stride=1, count=1; if (nrows < NROWS) nrows = NROWS; if (ngrps<NGROUPS) ngrps=NGROUPS; if (ndsets<NDSETS) ndsets=NDSETS; if (nattrs<NATTRS) nattrs=NATTRS; if (dim0<DIM0) dim0=DIM0; if (chunk>dim0) chunk=dim0/4; if (chunk<1) chunk = 1; if (vlen<1) vlen = MAXVLEN; /* create fixed string datatype */ types[4] = tid_str = H5Tcopy (H5T_C_S1); H5Tset_size (tid_str, FIXED_LEN); /* create enum datatype */ types[5] = tid_enum = H5Tenum_create(H5T_NATIVE_INT); for (i = (int) SOLID; i <= (int) PLASMA; i++) { phase_t val = (phase_t) i; status = H5Tenum_insert (tid_enum, enum_names[i], &val); } /* create float array datatype */ types[6] = tid_array_f = H5Tarray_create (H5T_NATIVE_FLOAT, 1, dims_array); /* create variable length integer datatypes */ types[7] = tid_vlen_i = H5Tvlen_create (H5T_NATIVE_INT); /* create variable length string datatype */ types[8] = tid_vlen_s = H5Tcopy (H5T_C_S1); H5Tset_size (tid_vlen_s, H5T_VARIABLE); /* create compound datatypes */ cmp_tid = H5Tcreate (H5T_COMPOUND, sizeof (test_comp_t)); offset = 0; for (i=0; i<NTYPES-2; i++) { H5Tinsert(cmp_tid, names[i], offset, types[i]); offset += H5Tget_size(types[i]); } H5Tinsert(cmp_tid, names[7], offset, types[7]); offset += sizeof (hvl_t); H5Tinsert(cmp_tid, names[8], offset, types[8]); /* create dataspace */ sid_1d = H5Screate_simple (1, dims, NULL); sid_2d = H5Screate_simple (2, dims2d, NULL); sid_2 = H5Screate_simple (1, dim1, NULL); sid_large = H5Screate_simple (1, &nrows, NULL); sid_null = H5Screate (H5S_NULL); sid_scalar = H5Screate (H5S_SCALAR); /* create fid access property */ fapl = H5Pcreate (H5P_FILE_ACCESS); H5Pset_libver_bounds (fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST); /* create dataset creation property */ dcpl = H5Pcreate (H5P_DATASET_CREATE); /* set dataset chunk */ if (chunk>0) { H5Pset_chunk (dcpl, 1, &chunk); } /* set dataset compression */ if (compressed) { if (chunk<=0) { chunk = dim0/10+1;; H5Pset_chunk (dcpl, 1, &chunk); } H5Pset_shuffle (dcpl); H5Pset_deflate (dcpl, 6); } /* allocate buffers */ buf_comp = (test_comp_t *)calloc(dim0, sizeof(test_comp_t)); buf_comp_large = (test_comp_t *)calloc(nrows, sizeof(test_comp_t)); buf_int = (int *)calloc(dim0, sizeof(int)); buf_float_a = malloc(dim0*sizeof(*buf_float_a)); buf_vlen_i = (hvl_t *)calloc(dim0, sizeof (hvl_t)); buf_vlen_s = (char **)calloc(dim0, sizeof(char *)); buf_str = malloc(dim0*sizeof (*buf_str)); /* allocate array of doulbe pointers */ buf_double2d = (double **)calloc(dims2d[0],sizeof(double *)); /* allocate a contigous chunk of memory for the data */ buf_double2d[0] = (double *)calloc( dims2d[0]*dims2d[1],sizeof(double) ); /* assign memory city to pointer array */ for (i=1; i <dims2d[0]; i++) buf_double2d[i] = buf_double2d[0]+i*dims2d[1]; /* fill buffer values */ len = 1; for (i=0; i<dims[0]; i++) { buf_comp[i].i = buf_int[i] = i-2147483648; buf_comp[i].l = 0xffffffffffffffff-i; buf_comp[i].f = 1.0/(i+1.0); buf_comp[i].d = 987654321.0*i+1.0/(i+1.0); buf_comp[i].e = (phase_t) (i % (int) (PLASMA + 1)); for (j=0; j<FIXED_LEN; j++) { buf_comp[i].f_array[j] = buf_float_a[i][j] = i*100+j; buf_str[i][j] = 'a' + (i%26); } buf_str[i][FIXED_LEN-1] = 0; strcpy(buf_comp[i].s, buf_str[i]); len = (1-cos(i/8.0))/2*vlen+1; if (!i) len = vlen; buf_vlen_i[i].len = len; buf_vlen_i[i].p = (int *)calloc(len, sizeof(int)); for (j=0; j<len; j++) ((int*)(buf_vlen_i[i].p))[j] = i*100+j; buf_comp[i].i_vlen = buf_vlen_i[i]; buf_vlen_s[i] = (char *)calloc(len, sizeof(char)); for (j=0; j<len-1; j++) buf_vlen_s[i][j] = j%26+'A'; buf_comp[i].s_vlen = buf_vlen_s[i]; for (j=0; j<dims2d[1]; j++) buf_double2d[i][j] = i+j/10000.0; } for (i=0; i<nrows; i++) { buf_comp_large[i].i = i-2147483648; buf_comp_large[i].l = 0xffffffffffffffff-i; buf_comp_large[i].f = 1.0/(i+1.0); buf_comp_large[i].d = 987654321.0*i+1.0/(i+1.0); buf_comp_large[i].e = (phase_t) (i % (int) (PLASMA + 1)); for (j=0; j<FIXED_LEN-1; j++) { buf_comp_large[i].f_array[j] = i*100+j; buf_comp_large[i].s[j] = 'a' + (i%26); } len = i%vlen+1; buf_comp_large[i].i_vlen.len = len; buf_comp_large[i].i_vlen.p = (int *)calloc(len, sizeof(int)); for (j=0; j<len; j++) ((int*)(buf_comp_large[i].i_vlen.p))[j] = i*100+j; buf_comp_large[i].s_vlen = (char *)calloc(i+2, sizeof(char)); for (j=0; j<i+1; j++) (buf_comp_large[i].s_vlen)[j] = j%26+'A'; } /* create file */ if (latest) fid = H5Fcreate (fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); else fid = H5Fcreate (fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); add_attrs(fid, 0); sprintf(name, "a cmp ds of %d rows", nrows); did = H5Dcreate (fid, name, cmp_tid, sid_large, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, cmp_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_comp_large); add_attrs(did, 0); H5Dclose(did); // /* add attributes*/ gid1 = H5Gcreate (fid, "attributes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (nattrs<1) nattrs = 1; i=0; while (i<nattrs) i += add_attrs(gid1, i); H5Gclose(gid1); /* add many sub groups to a group*/ gid1 = H5Gcreate (fid, "groups", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); add_attrs(gid1, 0); for (i=0; i<ngrps; i++) { /* create sub groups */ sprintf(name, "g%02d", i); gid2 = H5Gcreate (gid1, name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (i<10) add_attrs(gid2, 0); H5Gclose(gid2); } H5Gclose(gid1); /* add many datasets to a group */ gid1 = H5Gcreate (fid, "datasets", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); add_attrs(gid1, 0); for (j=0; j<ndsets; j+=12) { /* 1 add a null dataset */ sprintf(name, "%05d null dataset", j); did = H5Dcreate (gid1, name, H5T_STD_I32LE, sid_null, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (!j) add_attrs(did, j); H5Dclose(did); /* 2 add scalar int point */ sprintf(name, "%05d scalar int point", j); did = H5Dcreate (gid1, name, H5T_NATIVE_INT, sid_scalar, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite (did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &j); if (!j) add_attrs(did, j); H5Dclose(did); /* 3 scalar vlen string */ sprintf(name, "%05d scalar vlen string", j); did = H5Dcreate (gid1, name, tid_vlen_s, sid_scalar, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite (did, tid_vlen_s, H5S_ALL, H5S_ALL, H5P_DEFAULT, &buf_vlen_s[0]); if (!j) add_attrs(did, j); H5Dclose(did); /* 4 add fixed-length float array */ sprintf(name, "%05d fixed-length float array", j); did = H5Dcreate (gid1, name, tid_array_f, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, tid_array_f, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_float_a); if (!j) add_attrs(did, j); H5Dclose(did); /* 5 add fixed-length strings */ sprintf(name, "%05d fixed-length strings", j); did = H5Dcreate (gid1, name, tid_str, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, tid_str, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_str); if (!j) add_attrs(did, j); H5Dclose(did); /* 6 add compound data */ sprintf(name, "%05d compund data", j); did = H5Dcreate (gid1, name, cmp_tid, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, cmp_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_comp); if (!j) add_attrs(did, j); H5Dclose(did); /* 7 add 2D double */ sprintf(name, "%05d 2D double", j); strcpy (tmp_name1, name); did = H5Dcreate (gid1, name, H5T_NATIVE_DOUBLE, sid_2d, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite (did, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_double2d[0]); if (!j) add_attrs(did, j); H5Dclose(did); /* 8 add 1D int array */ sprintf(name, "%05d 1D int array", j); did = H5Dcreate (gid1, name, H5T_NATIVE_INT, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_int); if (!j) add_attrs(did, j); H5Dclose(did); /* 9 add vlen int array */ sprintf(name, "%05d vlen int array", j); strcpy (tmp_name2, name); did = H5Dcreate (gid1, name, tid_vlen_i, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, tid_vlen_i, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_vlen_i); if (!j) add_attrs(did, j); H5Dclose(did); /* 10 add vlen strings */ sprintf(name, "%05d vlen strings", j); strcpy (tmp_name3, name); did = H5Dcreate (gid1, name, tid_vlen_s, sid_1d, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Dwrite (did, tid_vlen_s, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_vlen_s); if (!j) add_attrs(did, j); H5Dclose(did); /* 11 add object refs */ H5Rcreate(&buf_ref[0],gid1, ".", H5R_OBJECT, -1); H5Rcreate(&buf_ref[1],gid1, tmp_name3, H5R_OBJECT, -1); sprintf(name, "%05d obj refs", j); did = H5Dcreate (gid1, name, H5T_STD_REF_OBJ, sid_2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite (did, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_ref); if (!j) add_attrs(did, j); H5Dclose(did); /* 12 add region refs */ H5Sselect_elements (sid_2d, H5S_SELECT_SET, 4, coords[0]); H5Rcreate(&buf_reg_ref[0],gid1, tmp_name1, H5R_DATASET_REGION, sid_2d); H5Sselect_none(sid_2d); count = dims[0]/2+1; H5Sselect_hyperslab (sid_1d, H5S_SELECT_SET, &start, &stride, &count,NULL); H5Rcreate(&buf_reg_ref[1],gid1, tmp_name2, H5R_DATASET_REGION, sid_1d); H5Sselect_none(sid_1d); sprintf(name, "%05d region refs", j); did = H5Dcreate (gid1, name, H5T_STD_REF_DSETREG, sid_2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite (did, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf_reg_ref); if (!j) add_attrs(did, j); H5Dclose(did); } H5Gclose(gid1); H5Tclose (tid_array_f); H5Tclose (tid_vlen_i); H5Tclose (tid_vlen_s); H5Tclose (tid_enum); H5Tclose (tid_str); H5Tclose (cmp_tid); H5Pclose (dcpl); H5Pclose (fapl); H5Sclose (sid_1d); H5Sclose (sid_2d); H5Sclose (sid_2); H5Sclose (sid_large); H5Sclose (sid_null); H5Sclose (sid_scalar); H5Fclose (fid); for (i=0; i<dims[0]; i++) { if (buf_vlen_i[i].p) free(buf_vlen_i[i].p); if (buf_vlen_s[i]) free(buf_vlen_s[i]); } for (i=0; i<nrows; i++) { if (buf_comp_large[i].i_vlen.p) free(buf_comp_large[i].i_vlen.p); if (buf_comp_large[i].s_vlen) free(buf_comp_large[i].s_vlen); } free (buf_comp); free (buf_comp_large); free (buf_int); free (buf_float_a); free (buf_double2d[0]); free (buf_double2d); free (buf_str); free(buf_vlen_i); free(buf_vlen_s); return 0; }
/*+++++++++++++++++++++++++ .IDENTifer PYTABLE_make_array .PURPOSE create extensible HDF5 dataset .INPUT/OUTPUT call as stat = PYTABLE_make_array( locID, dset_name, title, rank, dims, extdim, typeID, dims_chunk, fill_data, compress, shuffle, fletcher32, buff ); input: hid_t locID : HDF5 identifier of file or group char *dset_name : name of dataset char *title : int rank : number of dimensions hsize_t *dims : size of each dimension int extdim : index of expendable dimension hid_t typeID : data type (HDF5 identifier) hsize_t *dims_chunk : chunk sizes void *fill_data : Fill value for data unsigned int compress : compression level (zero for no compression) bool shuffle : shuffel data for better compression bool fletcher32 : void *buffer : buffer with data to write (or NULL) .RETURNS A negative value is returned on failure. .COMMENTS none -------------------------*/ herr_t PYTABLE_make_array( hid_t locID, const char *dset_name, const char *title, const int rank, const hsize_t *dims, int extdim, hid_t typeID, const hsize_t *dims_chunk, void *fill_data, unsigned int compress, bool shuffle, bool fletcher32, const void *buffer ) { register int ni; hid_t dataID = -1, spaceID = -1; herr_t stat; /* check if the array has to be chunked or not */ if ( dims_chunk != NULL ) { hid_t plistID; hsize_t *maxdims = (hsize_t *) malloc( rank * sizeof(hsize_t) ); if ( maxdims == NULL ) NADC_GOTO_ERROR( NADC_ERR_ALLOC, "maxdims" ); for ( ni = 0; ni < rank; ni++ ) { if ( ni == extdim ) maxdims[ni] = H5S_UNLIMITED; else maxdims[ni] = dims[ni] < dims_chunk[ni] ? dims_chunk[ni] : dims[ni]; } spaceID = H5Screate_simple( rank, dims, maxdims ); free( maxdims ); if ( spaceID < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_SPACE, "" ); /* Modify dataset creation properties, i.e. enable chunking */ plistID = H5Pcreate( H5P_DATASET_CREATE ); if ( H5Pset_chunk( plistID, rank, dims_chunk ) < 0 ) goto done; /* set the fill value using a struct as the data type */ if ( fill_data != NULL && H5Pset_fill_value( plistID, typeID, fill_data ) < 0 ) goto done; /* dataset creation property list is modified to use */ /* fletcher must be first */ if ( fletcher32 ) { if ( H5Pset_fletcher32( plistID ) < 0 ) goto done; } /* then shuffle */ if ( shuffle ) { if ( H5Pset_shuffle( plistID ) < 0 ) goto done; } /* finally compression */ if ( compress > 0 ) { if ( H5Pset_deflate( plistID, compress ) < 0 ) goto done; } /* create the (chunked) dataset */ dataID = H5Dcreate( locID, dset_name, typeID, spaceID, H5P_DEFAULT, plistID, H5P_DEFAULT ); if ( dataID < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name ); /* end access to the property list */ if ( H5Pclose( plistID ) < 0 ) goto done; } else { spaceID = H5Screate_simple( rank, dims, NULL ); if ( spaceID < 0 ) return -1; /* create the dataset (not chunked) */ dataID = H5Dcreate( locID, dset_name, typeID, spaceID, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT ); if ( dataID < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name ); } /* * write the data */ stat = H5Dwrite( dataID, typeID, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer ); if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_WR, "" ); (void) H5Dclose( dataID ); (void) H5Sclose( spaceID ); /* * Set the conforming array attributes * * attach the CLASS attribute */ (void) H5LTset_attribute_string( locID, dset_name, "CLASS", PY_ARRAY_CLASS ); /* attach the EXTDIM attribute in case of enlargeable arrays */ (void) H5LTset_attribute_int( locID, dset_name, "EXTDIM", &extdim, 1 ); /* attach the FLAVOR attribute */ (void) H5LTset_attribute_string( locID, dset_name, "FLAVOR", PY_ARRAY_FLAVOR ); /* attach the VERSION attribute */ (void) H5LTset_attribute_string( locID, dset_name, "VERSION", PY_ARRAY_VERSION ); /* attach the TITLE attribute */ (void) H5LTset_attribute_string( locID, dset_name, "TITLE", title ); return 0; done: if ( dataID > 0 ) (void) H5Dclose( dataID ); if ( spaceID > 0 ) (void) H5Sclose( spaceID ); return -1; }
AccessTraceWriter::AccessTraceWriter(g_string _fname, uint32_t numChildren) : fname(_fname) { // Create record structure hid_t accType = H5Tenum_create(H5T_NATIVE_USHORT); uint16_t val; H5Tenum_insert(accType, "GETS", (val=GETS,&val)); H5Tenum_insert(accType, "GETX", (val=GETX,&val)); H5Tenum_insert(accType, "PUTS", (val=PUTS,&val)); H5Tenum_insert(accType, "PUTX", (val=PUTX,&val)); size_t offset = 0; size_t size = H5Tget_size(H5T_NATIVE_ULONG)*2 + H5Tget_size(H5T_NATIVE_UINT) + H5Tget_size(H5T_NATIVE_USHORT) + H5Tget_size(accType); hid_t recType = H5Tcreate(H5T_COMPOUND, size); auto insertType = [&](const char* name, hid_t type) { H5Tinsert(recType, name, offset, type); offset += H5Tget_size(type); }; insertType("lineAddr", H5T_NATIVE_ULONG); insertType("cycle", H5T_NATIVE_ULONG); insertType("lat", H5T_NATIVE_UINT); insertType("childId", H5T_NATIVE_USHORT); insertType("accType", accType); hid_t fid = H5Fcreate(fname.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); if (fid == H5I_INVALID_HID) panic("Could not create HDF5 file %s", fname.c_str()); // HACK: We want to use the SHUF filter... create the raw dataset instead of the packet table // hid_t table = H5PTcreate_fl(fid, "accs", recType, PT_CHUNKSIZE, 9); // if (table == H5I_INVALID_HID) panic("Could not create HDF5 packet table"); hsize_t dims[1] = {0}; hsize_t dims_chunk[1] = {PT_CHUNKSIZE}; hsize_t maxdims[1] = {H5S_UNLIMITED}; hid_t space_id = H5Screate_simple(1, dims, maxdims); hid_t plist_id = H5Pcreate(H5P_DATASET_CREATE); H5Pset_chunk(plist_id, 1, dims_chunk); H5Pset_shuffle(plist_id); H5Pset_deflate(plist_id, 9); hid_t table = H5Dcreate2(fid, "accs", recType, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT); if (table == H5I_INVALID_HID) panic("Could not create HDF5 dataset"); H5Dclose(table); // info("%ld %ld %ld %ld", sizeof(PackedAccessRecord), size, offset, H5Tget_size(recType)); assert(offset == size); assert(size == sizeof(PackedAccessRecord)); hid_t ncAttr = H5Acreate2(fid, "numChildren", H5T_NATIVE_UINT, H5Screate(H5S_SCALAR), H5P_DEFAULT, H5P_DEFAULT); H5Awrite(ncAttr, H5T_NATIVE_UINT, &numChildren); H5Aclose(ncAttr); hid_t fAttr = H5Acreate2(fid, "finished", H5T_NATIVE_UINT, H5Screate(H5S_SCALAR), H5P_DEFAULT, H5P_DEFAULT); uint32_t finished = 0; H5Awrite(fAttr, H5T_NATIVE_UINT, &finished); H5Aclose(fAttr); H5Fclose(fid); // Initialize buffer buf = gm_calloc<PackedAccessRecord>(PT_CHUNKSIZE); cur = 0; max = PT_CHUNKSIZE; assert((uint32_t)(((char*) &buf[1]) - ((char*) &buf[0])) == sizeof(PackedAccessRecord)); }
int apply_filters(const char* name, /* object name from traverse list */ int rank, /* rank of dataset */ hsize_t *dims, /* dimensions of dataset */ size_t msize, /* size of type */ hid_t dcpl_id, /* dataset creation property list */ pack_opt_t *options, /* repack options */ int *has_filter) /* (OUT) object NAME has a filter */ { int nfilters; /* number of filters in DCPL */ hsize_t chsize[64]; /* chunk size in elements */ H5D_layout_t layout; int i; pack_info_t obj; *has_filter = 0; if (rank==0) /* scalar dataset, do not apply */ return 0; /*------------------------------------------------------------------------- * initialize the assigment object *------------------------------------------------------------------------- */ init_packobject(&obj); /*------------------------------------------------------------------------- * find options *------------------------------------------------------------------------- */ if (aux_assign_obj(name,options,&obj)==0) return 0; /* get information about input filters */ if ((nfilters = H5Pget_nfilters(dcpl_id))<0) return -1; /*------------------------------------------------------------------------- * check if we have filters in the pipeline * we want to replace them with the input filters * only remove if we are inserting new ones *------------------------------------------------------------------------- */ if (nfilters && obj.nfilters ) { *has_filter = 1; if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0) return -1; } /*------------------------------------------------------------------------- * check if there is an existent chunk * read it only if there is not a requested layout *------------------------------------------------------------------------- */ if (obj.layout == -1 ) { if ((layout = H5Pget_layout(dcpl_id))<0) return -1; if (layout == H5D_CHUNKED) { if ((rank = H5Pget_chunk(dcpl_id,NELMTS(chsize),chsize/*out*/))<0) return -1; obj.layout = H5D_CHUNKED; obj.chunk.rank = rank; for ( i = 0; i < rank; i++) obj.chunk.chunk_lengths[i] = chsize[i]; } } /*------------------------------------------------------------------------- * the type of filter and additional parameter * type can be one of the filters * H5Z_FILTER_NONE 0 , uncompress if compressed * H5Z_FILTER_DEFLATE 1 , deflation like gzip * H5Z_FILTER_SHUFFLE 2 , shuffle the data * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC * H5Z_FILTER_SZIP 4 , szip compression * H5Z_FILTER_NBIT 5 , nbit compression * H5Z_FILTER_SCALEOFFSET 6 , scaleoffset compression *------------------------------------------------------------------------- */ if (obj.nfilters) { /*------------------------------------------------------------------------- * filters require CHUNK layout; if we do not have one define a default *------------------------------------------------------------------------- */ if (obj.layout==-1) { /* stripmine info */ hsize_t sm_size[H5S_MAX_RANK]; /*stripmine size */ hsize_t sm_nbytes; /*bytes per stripmine */ obj.chunk.rank = rank; /* * determine the strip mine size. The strip mine is * a hyperslab whose size is manageable. */ sm_nbytes = msize; for ( i = rank; i > 0; --i) { hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes; if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */ size = 1; sm_size[i - 1] = MIN(dims[i - 1], size); sm_nbytes *= sm_size[i - 1]; assert(sm_nbytes > 0); } for ( i = 0; i < rank; i++) { obj.chunk.chunk_lengths[i] = sm_size[i]; } } for ( i=0; i<obj.nfilters; i++) { switch (obj.filter[i].filtn) { default: break; /*------------------------------------------------------------------------- * H5Z_FILTER_DEFLATE 1 , deflation like gzip *------------------------------------------------------------------------- */ case H5Z_FILTER_DEFLATE: { unsigned aggression; /* the deflate level */ aggression = obj.filter[i].cd_values[0]; /* set up for deflated data */ if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if(H5Pset_deflate(dcpl_id,aggression)<0) return -1; } break; /*------------------------------------------------------------------------- * H5Z_FILTER_SZIP 4 , szip compression *------------------------------------------------------------------------- */ case H5Z_FILTER_SZIP: { unsigned options_mask; unsigned pixels_per_block; options_mask = obj.filter[i].cd_values[0]; pixels_per_block = obj.filter[i].cd_values[1]; /* set up for szip data */ if(H5Pset_chunk(dcpl_id,obj.chunk.rank,obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_szip(dcpl_id,options_mask,pixels_per_block)<0) return -1; } break; /*------------------------------------------------------------------------- * H5Z_FILTER_SHUFFLE 2 , shuffle the data *------------------------------------------------------------------------- */ case H5Z_FILTER_SHUFFLE: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_shuffle(dcpl_id)<0) return -1; break; /*------------------------------------------------------------------------- * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC *------------------------------------------------------------------------- */ case H5Z_FILTER_FLETCHER32: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_fletcher32(dcpl_id)<0) return -1; break; /*----------- ------------------------------------------------------------- * H5Z_FILTER_NBIT , NBIT compression *------------------------------------------------------------------------- */ case H5Z_FILTER_NBIT: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_nbit(dcpl_id)<0) return -1; break; /*----------- ------------------------------------------------------------- * H5Z_FILTER_SCALEOFFSET , scale+offset compression *------------------------------------------------------------------------- */ case H5Z_FILTER_SCALEOFFSET: { H5Z_SO_scale_type_t scale_type; int scale_factor; scale_type = (H5Z_SO_scale_type_t)obj.filter[i].cd_values[0]; scale_factor = obj.filter[i].cd_values[1]; if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_scaleoffset(dcpl_id,scale_type,scale_factor)<0) return -1; } break; } /* switch */ }/*i*/ } /*obj.nfilters*/ /*------------------------------------------------------------------------- * layout *------------------------------------------------------------------------- */ if (obj.layout>=0) { /* a layout was defined */ if (H5Pset_layout(dcpl_id, obj.layout)<0) return -1; if (H5D_CHUNKED == obj.layout) { if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; } else if (H5D_COMPACT == obj.layout) { if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY)<0) return -1; } /* remove filters for the H5D_CONTIGUOUS case */ else if (H5D_CONTIGUOUS == obj.layout) { if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0) return -1; } } return 0; }
/*------------------------------------------------------------------------- * Function: create_deflate_dsets_float * * Purpose: Create a dataset of FLOAT datatype with deflate filter * * Return: Success: 0 * Failure: -1 * * Programmer: Raymond Lu * 29 March 2011 * * Modifications: * *------------------------------------------------------------------------- */ int create_deflate_dsets_float(hid_t fid, hid_t fsid, hid_t msid) { #ifdef H5_HAVE_FILTER_DEFLATE hid_t dataset = -1; /* dataset handles */ hid_t dcpl = -1; float data[NX][NY]; /* data to write */ float fillvalue = -2.2f; hsize_t chunk[RANK] = {CHUNK0, CHUNK1}; int i, j; /* * Data and output buffer initialization. */ for (j = 0; j < NX; j++) { for (i = 0; i < NY; i++) data[j][i] = ((float)(i + j + 1))/3; } /* * Create the dataset creation property list, add the Scale-Offset * filter, set the chunk size, and set the fill value. */ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR if(H5Pset_deflate (dcpl, 6) < 0) TEST_ERROR if(H5Pset_chunk(dcpl, RANK, chunk) < 0) TEST_ERROR if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0) TEST_ERROR /* * Create a new dataset within the file using defined dataspace, little * endian datatype and default dataset creation properties. */ if((dataset = H5Dcreate2(fid, DATASETNAME16, H5T_IEEE_F32LE, fsid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR /* * Write the data to the dataset using default transfer properties. */ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0) TEST_ERROR /* Close dataset */ if(H5Dclose(dataset) < 0) TEST_ERROR /* Now create a dataset with a big-endian type */ if((dataset = H5Dcreate2(fid, DATASETNAME17, H5T_IEEE_F32BE, fsid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0) TEST_ERROR if(H5Dclose(dataset) < 0) TEST_ERROR /* * Close/release resources. */ if(H5Pclose(dcpl) < 0) TEST_ERROR #else /* H5_HAVE_FILTER_DEFLATE */ const char *not_supported= "Deflate filter is not enabled. Can't create the dataset."; puts(not_supported); #endif /* H5_HAVE_FILTER_DEFLATE */ return 0; #ifdef H5_HAVE_FILTER_DEFLATE error: H5E_BEGIN_TRY { H5Pclose(dcpl); H5Dclose(dataset); } H5E_END_TRY; return -1; #endif /* H5_HAVE_FILTER_DEFLATE */ }
/*------------------------------------------------------------------------- * Function: test_filters_for_datasets * * Purpose: Tests creating datasets and writing data with dynamically * loaded filters * * Return: Success: 0 * Failure: -1 * * Programmer: Raymond Lu * 14 March 2013 * *------------------------------------------------------------------------- */ static herr_t test_filters_for_datasets(hid_t file) { hid_t dc; /* Dataset creation property list ID */ const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2}; /* Chunk dimensions */ unsigned int compress_level = 9; /*---------------------------------------------------------- * STEP 1: Test deflation by itself. *---------------------------------------------------------- */ #ifdef H5_HAVE_FILTER_DEFLATE puts("Testing deflate filter"); if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error; if(H5Pset_deflate (dc, 6) < 0) goto error; if(test_filter_internal(file,DSET_DEFLATE_NAME,dc) < 0) goto error; /* Clean up objects used for this test */ if(H5Pclose (dc) < 0) goto error; #else /* H5_HAVE_FILTER_DEFLATE */ TESTING("deflate filter"); SKIPPED(); puts(" Deflate filter not enabled"); #endif /* H5_HAVE_FILTER_DEFLATE */ /*---------------------------------------------------------- * STEP 2: Test DYNLIB1 by itself. *---------------------------------------------------------- */ puts("Testing DYNLIB1 filter"); if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error; if(H5Pset_filter (dc, H5Z_FILTER_DYNLIB1, H5Z_FLAG_MANDATORY, (size_t)1, &compress_level) < 0) goto error; if(test_filter_internal(file,DSET_DYNLIB1_NAME,dc) < 0) goto error; /* Clean up objects used for this test */ if(H5Pclose (dc) < 0) goto error; /* Unregister the dynamic filter DYNLIB1 for testing purpose. The next time when this test is run for * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries * for this filter. */ if(H5Zunregister(H5Z_FILTER_DYNLIB1) < 0) goto error; /*---------------------------------------------------------- * STEP 3: Test DYNLIB2 by itself. *---------------------------------------------------------- */ puts("Testing DYNLIB2 filter"); if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error; if(H5Pset_filter (dc, H5Z_FILTER_DYNLIB2, H5Z_FLAG_MANDATORY, 0, NULL) < 0) goto error; if(test_filter_internal(file,DSET_DYNLIB2_NAME,dc) < 0) goto error; /* Clean up objects used for this test */ if(H5Pclose (dc) < 0) goto error; /* Unregister the dynamic filter DYNLIB2 for testing purpose. The next time when this test is run for * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries * for this filter. */ if(H5Zunregister(H5Z_FILTER_DYNLIB2) < 0) goto error; return 0; error: return -1; }
/*------------------------------------------------------------------------- * Function: test_invalid_parameters * * Purpose: Test invalid parameters for H5DOwrite_chunk * * Return: Success: 0 * * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 * *------------------------------------------------------------------------- */ static int test_invalid_parameters(hid_t file) { hid_t dataspace = -1, dataset = -1; hid_t mem_space = -1; hid_t cparms = -1, dxpl = -1; hsize_t dims[2] = {NX, NY}; hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY}; herr_t status; int i, j, n; unsigned filter_mask = 0; int direct_buf[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); int aggression = 9; /* Compression aggression setting */ TESTING("invalid parameters for H5DOwrite_chunk"); /* * Create the data space with unlimited dimensions. */ if((dataspace = H5Screate_simple(RANK, dims, NULL)) < 0) goto error; if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0) goto error; /* * Modify dataset creation properties */ if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; /* * Create a new contiguous dataset to verify H5DOwrite_chunk doesn't work */ if((dataset = H5Dcreate2(file, DATASETNAME5, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) goto error; if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error; /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) { direct_buf[i][j] = n++; } /* Try to write the chunk data to contiguous dataset. It should fail */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY; H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; if(H5Dclose(dataset) < 0) goto error; /* Create a chunked dataset with compression filter */ if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0) goto error; if((status = H5Pset_deflate( cparms, (unsigned ) aggression)) < 0) goto error; /* * Create a new dataset within the file using cparms * creation properties. */ if((dataset = H5Dcreate2(file, DATASETNAME6, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) goto error; /* Check invalid dataset ID */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk((hid_t)-1, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check invalid DXPL ID */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, (hid_t)-1, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check invalid OFFSET */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, NULL, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check when OFFSET is out of dataset range */ offset[0] = NX + 1; offset[1] = NY; H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check when OFFSET is not on chunk boundary */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY + 1; H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check invalid buffer size */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY; buf_size = 0; H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; /* Check invalid data buffer */ buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, NULL)) != FAIL) goto error; } H5E_END_TRY; if(H5Dclose(dataset) < 0) goto error; /* * Close/release resources. */ H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); PASSED(); return 0; error: H5E_BEGIN_TRY { H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); } H5E_END_TRY; return 1; }
herr_t H5ARRAYmake( hid_t loc_id, const char *dset_name, const char *obversion, const int rank, const hsize_t *dims, int extdim, hid_t type_id, hsize_t *dims_chunk, void *fill_data, int compress, char *complib, int shuffle, int fletcher32, const void *data) { hid_t dataset_id, space_id; hsize_t *maxdims = NULL; hid_t plist_id = 0; unsigned int cd_values[6]; int chunked = 0; int i; /* Check whether the array has to be chunked or not */ if (dims_chunk) { chunked = 1; } if(chunked) { maxdims = malloc(rank*sizeof(hsize_t)); if(!maxdims) return -1; for(i=0;i<rank;i++) { if (i == extdim) { maxdims[i] = H5S_UNLIMITED; } else { maxdims[i] = dims[i] < dims_chunk[i] ? dims_chunk[i] : dims[i]; } } } /* Create the data space for the dataset. */ if ( (space_id = H5Screate_simple( rank, dims, maxdims )) < 0 ) return -1; if (chunked) { /* Modify dataset creation properties, i.e. enable chunking */ plist_id = H5Pcreate (H5P_DATASET_CREATE); if ( H5Pset_chunk ( plist_id, rank, dims_chunk ) < 0 ) return -1; /* Set the fill value using a struct as the data type. */ if (fill_data) { if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 ) return -1; } else { if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 ) return -1; } /* Dataset creation property list is modified to use */ /* Fletcher must be first */ if (fletcher32) { if ( H5Pset_fletcher32( plist_id) < 0 ) return -1; } /* Then shuffle (not if blosc is activated) */ if ((shuffle) && (strcmp(complib, "blosc") != 0)) { if ( H5Pset_shuffle( plist_id) < 0 ) return -1; } /* Finally compression */ if (compress) { cd_values[0] = compress; cd_values[1] = (int)(atof(obversion) * 10); if (extdim <0) cd_values[2] = CArray; else cd_values[2] = EArray; /* The default compressor in HDF5 (zlib) */ if (strcmp(complib, "zlib") == 0) { if ( H5Pset_deflate( plist_id, compress) < 0 ) return -1; } /* The Blosc compressor does accept parameters */ else if (strcmp(complib, "blosc") == 0) { cd_values[4] = compress; cd_values[5] = shuffle; if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 ) return -1; } /* The LZO compressor does accept parameters */ else if (strcmp(complib, "lzo") == 0) { if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 ) return -1; } /* The bzip2 compress does accept parameters */ else if (strcmp(complib, "bzip2") == 0) { if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 ) return -1; } else { /* Compression library not supported */ fprintf(stderr, "Compression library not supported\n"); return -1; } } /* Create the (chunked) dataset */ if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id, space_id, plist_id )) < 0 ) goto out; } else { /* Not chunked case */ /* Create the dataset. */ if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id, space_id, H5P_DEFAULT )) < 0 ) goto out; } /* Write the dataset only if there is data to write */ if (data) { if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 ) goto out; } /* Terminate access to the data space. */ if ( H5Sclose( space_id ) < 0 ) return -1; /* End access to the property list */ if (plist_id) if ( H5Pclose( plist_id ) < 0 ) goto out; /* Release resources */ if (maxdims) free(maxdims); return dataset_id; out: H5Dclose( dataset_id ); H5Sclose( space_id ); if (maxdims) free(maxdims); if (dims_chunk) free(dims_chunk); return -1; }
int main(void) { hid_t src_sid = -1; /* source dataset's dataspace ID */ hid_t src_dcplid = -1; /* source dataset property list ID */ hid_t vds_sid = -1; /* VDS dataspace ID */ hid_t vds_dcplid = -1; /* VDS dataset property list ID */ hid_t fid = -1; /* HDF5 file ID */ hid_t did = -1; /* dataset ID */ hid_t msid = -1; /* memory dataspace ID */ hid_t fsid = -1; /* file dataspace ID */ /* Hyperslab arrays */ hsize_t start[RANK] = {0, 0, 0}; hsize_t count[RANK] = {H5S_UNLIMITED, 1, 1}; int *buffer = NULL; /* data buffer */ int value = -1; /* value written to datasets */ hsize_t n = 0; /* number of elements in a plane */ int i; /* iterator */ int j; /* iterator */ int k; /* iterator */ /************************************ * Create source files and datasets * ************************************/ /* Create source dataspace ID */ if((src_sid = H5Screate_simple(RANK, UC_4_SOURCE_DIMS, UC_4_SOURCE_MAX_DIMS)) < 0) UC_ERROR if(H5Sselect_hyperslab(src_sid, H5S_SELECT_SET, start, NULL, UC_4_SOURCE_MAX_DIMS, NULL) < 0) UC_ERROR /* Create source files and datasets */ for(i = 0; i < UC_4_N_SOURCES; i++) { /* source dataset dcpl */ if((src_dcplid = H5Pcreate(H5P_DATASET_CREATE)) < 0) UC_ERROR if(H5Pset_chunk(src_dcplid, RANK, UC_4_PLANE) < 0) UC_ERROR if(H5Pset_fill_value(src_dcplid, UC_4_SOURCE_DATATYPE, &UC_4_FILL_VALUES[i]) < 0) UC_ERROR if(H5Pset_deflate(src_dcplid, COMPRESSION_LEVEL) < 0) UC_ERROR /* Create source file and dataset */ if((fid = H5Fcreate(UC_4_FILE_NAMES[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) UC_ERROR if((did = H5Dcreate2(fid, UC_4_SOURCE_DSET_NAME, UC_4_SOURCE_DATATYPE, src_sid, H5P_DEFAULT, src_dcplid, H5P_DEFAULT)) < 0) UC_ERROR /* Set the dataset's extent */ if(H5Dset_extent(did, UC_4_SOURCE_MAX_DIMS) < 0) UC_ERROR /* Create a data buffer that represents a plane */ n = UC_4_PLANE[1] * UC_4_PLANE[2]; if(NULL == (buffer = (int *)malloc(n * sizeof(int)))) UC_ERROR /* Create the memory dataspace */ if((msid = H5Screate_simple(RANK, UC_4_PLANE, NULL)) < 0) UC_ERROR /* Get the file dataspace */ if((fsid = H5Dget_space(did)) < 0) UC_ERROR /* Write planes to the dataset */ for(j = 0; j < UC_4_SRC_PLANES; j++) { value = ((i + 1) * 10) + j; for(k = 0; k < n; k++) buffer[k] = value; start[0] = j; start[1] = 0; start[2] = 0; if(H5Sselect_hyperslab(fsid, H5S_SELECT_SET, start, NULL, UC_4_PLANE, NULL) < 0) UC_ERROR if(H5Dwrite(did, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, buffer) < 0) UC_ERROR } /* end for */ /* close */ if(H5Sclose(msid) < 0) UC_ERROR if(H5Sclose(fsid) < 0) UC_ERROR if(H5Pclose(src_dcplid) < 0) UC_ERROR if(H5Dclose(did) < 0) UC_ERROR if(H5Fclose(fid) < 0) UC_ERROR free(buffer); } /* end for */ /******************* * Create VDS file * *******************/ /* Create file */ if((fid = H5Fcreate(UC_4_VDS_FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) UC_ERROR /* Create VDS dcpl */ if((vds_dcplid = H5Pcreate(H5P_DATASET_CREATE)) < 0) UC_ERROR if(H5Pset_fill_value(vds_dcplid, UC_4_VDS_DATATYPE, &UC_4_VDS_FILL_VALUE) < 0) UC_ERROR /* Create VDS dataspace */ if((vds_sid = H5Screate_simple(RANK, UC_4_VDS_DIMS, UC_4_VDS_MAX_DIMS)) < 0) UC_ERROR start[0] = 0; start[1] = 0; start[2] = 0; if(H5Sselect_hyperslab(vds_sid, H5S_SELECT_SET, start, UC_4_SOURCE_MAX_DIMS, count, UC_4_SOURCE_MAX_DIMS) < 0) UC_ERROR /* Add VDS mapping - The mapped file name uses a printf-like * naming scheme that automatically maps new files. */ if(H5Pset_virtual(vds_dcplid, vds_sid, UC_4_MAPPING_FILE_NAME, UC_4_SOURCE_DSET_PATH, src_sid) < 0) UC_ERROR /* Create dataset */ if((did = H5Dcreate2(fid, UC_4_VDS_DSET_NAME, UC_4_VDS_DATATYPE, vds_sid, H5P_DEFAULT, vds_dcplid, H5P_DEFAULT)) < 0) UC_ERROR /* close */ if(H5Sclose(src_sid) < 0) UC_ERROR if(H5Pclose(vds_dcplid) < 0) UC_ERROR if(H5Sclose(vds_sid) < 0) UC_ERROR if(H5Dclose(did) < 0) UC_ERROR if(H5Fclose(fid) < 0) UC_ERROR return EXIT_SUCCESS; error: H5E_BEGIN_TRY { if(src_sid >= 0) (void)H5Sclose(src_sid); if(src_dcplid >= 0) (void)H5Pclose(src_dcplid); if(vds_sid >= 0) (void)H5Sclose(vds_sid); if(vds_dcplid >= 0) (void)H5Pclose(vds_dcplid); if(fid >= 0) (void)H5Fclose(fid); if(did >= 0) (void)H5Dclose(did); if(msid >= 0) (void)H5Sclose(msid); if(fsid >= 0) (void)H5Sclose(fsid); if(buffer != NULL) free(buffer); } H5E_END_TRY return EXIT_FAILURE; } /* end main() */
/*------------------------------------------------------------------------- * Function: test_dataset_write_with_filters * * Purpose: Tests creating datasets and writing data with dynamically loaded filters * * Return: SUCCEED/FAIL * *------------------------------------------------------------------------- */ static herr_t test_dataset_write_with_filters(hid_t fid) { hid_t dcpl_id = -1; /* Dataset creation property list ID */ unsigned int compress_level; /* Deflate compression level */ unsigned int filter1_data; /* Data used by filter 1 */ unsigned int libver_values[4]; /* Used w/ the filter that makes HDF5 calls */ /*---------------------------------------------------------- * STEP 1: Test deflation by itself. *---------------------------------------------------------- */ HDputs("Testing dataset writes with deflate filter"); #ifdef H5_HAVE_FILTER_DEFLATE if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR; if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0) TEST_ERROR; compress_level = 6; if (H5Pset_deflate(dcpl_id, compress_level) < 0) TEST_ERROR; /* Ensure the filter works */ if (ensure_filter_works(fid, DSET_DEFLATE_NAME, dcpl_id) < 0) TEST_ERROR; /* Clean up objects used for this test */ if (H5Pclose(dcpl_id) < 0) TEST_ERROR; #else /* H5_HAVE_FILTER_DEFLATE */ SKIPPED(); HDputs(" Deflate filter not enabled"); #endif /* H5_HAVE_FILTER_DEFLATE */ /*---------------------------------------------------------- * STEP 2: Test filter plugin 1 by itself. *---------------------------------------------------------- */ HDputs(" dataset writes with filter plugin 1"); if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR; if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0) TEST_ERROR; /* Set up the filter, passing in the amount the filter will add and subtract * from each data element. Note that this value has an arbitrary max of 9. */ filter1_data = 9; if (H5Pset_filter(dcpl_id, FILTER1_ID, H5Z_FLAG_MANDATORY, (size_t)1, &filter1_data) < 0) TEST_ERROR; /* Ensure the filter works */ if (ensure_filter_works(fid, DSET_FILTER1_NAME, dcpl_id) < 0) TEST_ERROR; /* Clean up objects used for this test */ if (H5Pclose(dcpl_id) < 0) TEST_ERROR; /* Unregister the dynamic filter for testing purpose. The next time when this test is run for * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries * for this filter. */ if (H5Zunregister(FILTER1_ID) < 0) TEST_ERROR; /*---------------------------------------------------------- * STEP 3: Test filter plugin 2 by itself. *---------------------------------------------------------- */ HDputs(" dataset writes with filter plugin 2"); if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR; if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0) TEST_ERROR; if (H5Pset_filter(dcpl_id, FILTER2_ID, H5Z_FLAG_MANDATORY, 0, NULL) < 0) TEST_ERROR; /* Ensure the filter works */ if (ensure_filter_works(fid, DSET_FILTER2_NAME, dcpl_id) < 0) TEST_ERROR; /* Clean up objects used for this test */ if (H5Pclose(dcpl_id) < 0) TEST_ERROR; /* Unregister the dynamic filter for testing purpose. The next time when this test is run for * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries * for this filter. */ if (H5Zunregister(FILTER2_ID) < 0) TEST_ERROR; /*---------------------------------------------------------- * STEP 4: Test filter plugin 3 by itself. * (This filter plugin makes HDF5 API calls) *---------------------------------------------------------- */ HDputs(" dataset writes with filter plugin 3"); if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR; if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0) TEST_ERROR; /* Set the add/subtract value for the filter */ libver_values[0] = 9; /* Get the library bounds and add to the filter data */ if (H5get_libversion(&libver_values[1], &libver_values[2], &libver_values[3]) < 0) TEST_ERROR; if (H5Pset_filter(dcpl_id, FILTER3_ID, H5Z_FLAG_MANDATORY, (size_t)4, libver_values) < 0) TEST_ERROR; /* Ensure the filter works */ if (ensure_filter_works(fid, DSET_FILTER3_NAME, dcpl_id) < 0) TEST_ERROR; /* Clean up objects used for this test */ if (H5Pclose(dcpl_id) < 0) TEST_ERROR; /* Unregister the dynamic filter for testing purpose. The next time when this test is run for * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries * for this filter. */ if (H5Zunregister(FILTER3_ID) < 0) TEST_ERROR; return SUCCEED; error: /* Clean up objects used for this test */ H5E_BEGIN_TRY { H5Pclose(dcpl_id); } H5E_END_TRY return FAIL; } /* end test_dataset_write_with_filters() */