void write_hdf(const std::string &filename, double* &data, int &m_rows, int &m_cols){ hid_t file_id, dataset_id, space_id, property_id; herr_t status; hsize_t dims[2] = {m_rows,m_cols}; //Create a new file using the default properties. file_id = H5Fcreate (filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); //Create dataspace. Setting maximum size to NULL sets the maximum //size to be the current size. space_id = H5Screate_simple (2, dims, NULL); //Create the dataset creation property list, set the layout to compact. property_id = H5Pcreate (H5P_DATASET_CREATE); status = H5Pset_layout (property_id, H5D_CONTIGUOUS); // Create the dataset. dataset_id = H5Dcreate (file_id, "x", H5T_NATIVE_DOUBLE, space_id, H5P_DEFAULT, property_id, H5P_DEFAULT); //Write the data to the dataset. status = H5Dwrite (dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); status = H5Sclose(space_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); status = H5Pclose(property_id); }
hid_t InfiniteDimensionalMCMCSampler::_create_scalar_dataset(const std::string & name) { // Only subprocess with rank 0 manipulates the output file if ((this->m_env).subRank() == 0) { // Create a 1D dataspace. Unlimited size. Initially set to 0. We will // extend it later const int ndims = 1; hsize_t dims[ndims] = {0}; // dataset dimensions at creation hsize_t maxdims[ndims] = {H5S_UNLIMITED}; hid_t file_space = H5Screate_simple(ndims, dims, maxdims); // Create dataset creation property list. Unlimited datasets must be // chunked. Choosing the chunk size is an issue, here we set it to 1 hsize_t chunk_dims[ndims] = {1}; hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); H5Pset_chunk(plist, ndims, chunk_dims); // Create the dataset hid_t dset = H5Dcreate(this->_outfile, name.c_str(), H5T_NATIVE_DOUBLE, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); // We don't need the property list anymore. We also don't need the file // dataspace anymore because we'll extend it later, making this one // invalild anyway. H5Pclose(plist); H5Sclose(file_space); return dset; } }
void write(const std::string &filename, std::vector<int> &data) { // HDF5 handles hid_t file_id, dataset_id, space_id, property_id; herr_t status; hsize_t dims[1] = {data.size()}; //Create a new file using the default properties. file_id = H5Fcreate (filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); //Create dataspace. Setting maximum size to NULL sets the maximum //size to be the current size. space_id = H5Screate_simple (1, dims, NULL); //Create the dataset creation property list, set the layout to compact. property_id = H5Pcreate (H5P_DATASET_CREATE); status = H5Pset_layout (property_id, H5D_COMPACT); // Create the dataset. dataset_id = H5Dcreate (file_id, "DATASET", H5T_STD_I32LE, space_id, H5P_DEFAULT, property_id, H5P_DEFAULT); //Write the data to the dataset. status = H5Dwrite (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data[0]); status = H5Sclose(space_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); status = H5Pclose(property_id); }
hid_t seissol::checkpoint::h5::Fault::initFile(int odd, const char* filename) { hid_t h5file; if (loaded()) { // Open the file h5file = open(filename, false); checkH5Err(h5file); // Fault writer m_h5timestepFault[odd] = H5Aopen(h5file, "timestep_fault", H5P_DEFAULT); checkH5Err(m_h5timestepFault[odd]); // Data for (unsigned int i = 0; i < NUM_VARIABLES; i++) { m_h5data[odd][i] = H5Dopen(h5file, VAR_NAMES[i], H5P_DEFAULT); checkH5Err(m_h5data[odd][i]); } } else { // Create the file hid_t h5plist = H5Pcreate(H5P_FILE_ACCESS); checkH5Err(h5plist); checkH5Err(H5Pset_libver_bounds(h5plist, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST)); #ifdef USE_MPI checkH5Err(H5Pset_fapl_mpio(h5plist, comm(), MPI_INFO_NULL)); #endif // USE_MPI h5file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, h5plist); checkH5Err(h5file); checkH5Err(H5Pclose(h5plist)); // Create scalar dataspace for attributes hid_t h5spaceScalar = H5Screate(H5S_SCALAR); checkH5Err(h5spaceScalar); // Fault writer m_h5timestepFault[odd] = H5Acreate(h5file, "timestep_fault", H5T_STD_I32LE, h5spaceScalar, H5P_DEFAULT, H5P_DEFAULT); checkH5Err(m_h5timestepFault[odd]); int t = 0; checkH5Err(H5Awrite(m_h5timestepFault[odd], H5T_NATIVE_INT, &t)); checkH5Err(H5Sclose(h5spaceScalar)); // Variables for (unsigned int i = 0; i < NUM_VARIABLES; i++) { h5plist = H5Pcreate(H5P_DATASET_CREATE); checkH5Err(h5plist); checkH5Err(H5Pset_layout(h5plist, H5D_CONTIGUOUS)); checkH5Err(H5Pset_alloc_time(h5plist, H5D_ALLOC_TIME_EARLY)); m_h5data[odd][i] = H5Dcreate(h5file, VAR_NAMES[i], H5T_IEEE_F64LE, m_h5fSpaceData, H5P_DEFAULT, h5plist, H5P_DEFAULT); checkH5Err(m_h5data[odd][i]); checkH5Err(H5Pclose(h5plist)); } } return h5file; }
//-------------------------------------------------------------------------- // Function: DSetCreatPropList::setLayout ///\brief Sets the type of storage used store the raw data for a dataset. ///\param layout - IN: Type of storage layout for raw data ///\exception H5::PropListIException ///\par Description /// For information on valid layout types, please refer to /// http://www.hdfgroup.org/HDF5/doc/RM/RM_H5P.html#Property-SetLayout // Programmer Binh-Minh Ribler - 2000 //-------------------------------------------------------------------------- void DSetCreatPropList::setLayout(H5D_layout_t layout) const { herr_t ret_value = H5Pset_layout( id, layout ); if( ret_value < 0 ) { throw PropListIException("DSetCreatPropList::setLayout", "H5Pset_layout failed"); } }
void HDF5Output::open(const std::string& filename) { file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); sid = H5Tcreate(H5T_COMPOUND, sizeof(OutputRow)); H5Tinsert(sid, "D", HOFFSET(OutputRow, D), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "z", HOFFSET(OutputRow, z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN", HOFFSET(OutputRow, SN), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID", HOFFSET(OutputRow, ID), H5T_NATIVE_INT32); H5Tinsert(sid, "E", HOFFSET(OutputRow, E), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X", HOFFSET(OutputRow, X), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y", HOFFSET(OutputRow, Y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z", HOFFSET(OutputRow, Z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Px", HOFFSET(OutputRow, Px), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Py", HOFFSET(OutputRow, Py), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Pz", HOFFSET(OutputRow, Pz), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN0", HOFFSET(OutputRow, SN0), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID0", HOFFSET(OutputRow, ID0), H5T_NATIVE_INT32); H5Tinsert(sid, "E0", HOFFSET(OutputRow, E0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X0", HOFFSET(OutputRow, X0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y0", HOFFSET(OutputRow, Y0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z0", HOFFSET(OutputRow, Z0), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0x", HOFFSET(OutputRow, P0x), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0y", HOFFSET(OutputRow, P0y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P0z", HOFFSET(OutputRow, P0z), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "SN1", HOFFSET(OutputRow, SN1), H5T_NATIVE_UINT64); H5Tinsert(sid, "ID1", HOFFSET(OutputRow, ID1), H5T_NATIVE_INT32); H5Tinsert(sid, "E1", HOFFSET(OutputRow, E1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "X1", HOFFSET(OutputRow, X1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Y1", HOFFSET(OutputRow, Y1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "Z1", HOFFSET(OutputRow, Z1), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1x", HOFFSET(OutputRow, P1x), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1y", HOFFSET(OutputRow, P1y), H5T_NATIVE_DOUBLE); H5Tinsert(sid, "P1z", HOFFSET(OutputRow, P1z), H5T_NATIVE_DOUBLE); // chunked prop hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); hsize_t chunk_dims[RANK] = {BUFFER_SIZE}; H5Pset_chunk(plist, RANK, chunk_dims); H5Pset_deflate(plist, 5); hsize_t dims[RANK] = {0}; hsize_t max_dims[RANK] = {H5S_UNLIMITED}; dataspace = H5Screate_simple(RANK, dims, max_dims); dset = H5Dcreate2(file, "CRPROPA3", sid, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); buffer.reserve(BUFFER_SIZE); }
void write_file(const std::string & filename, double * image, int r, int c) { hid_t file_id, dataset_id, space_id, property_id; herr_t status; hsize_t dims[2] = {r, c}; file_id = H5Fcreate (filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); space_id = H5Screate_simple (2, dims, NULL); property_id = H5Pcreate (H5P_DATASET_CREATE); status = H5Pset_layout (property_id, H5D_CONTIGUOUS); dataset_id = H5Dcreate (file_id, "DATASET", H5T_STD_I32LE, space_id, H5P_DEFAULT, property_id, H5P_DEFAULT); status = H5Dwrite (dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, image); status = H5Sclose(space_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); status = H5Pclose(property_id); }
/*------------------------------------------------------------------------- * Function: gent_compact * * Purpose: Generate a compact dataset in LOC_ID * *------------------------------------------------------------------------- */ static void gent_compact(hid_t loc_id) { hid_t sid, did, pid; hsize_t dims[1] = {6}; int buf[6] = {1,2,3,4,5,6}; /* create dataspace */ sid = H5Screate_simple(1, dims, NULL); /* create property plist */ pid = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout (pid,H5D_COMPACT); /* create dataset */ did = H5Dcreate2(loc_id, DATASET_COMPACT, H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT); /* write */ H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf); /* close */ H5Sclose(sid); H5Dclose(did); H5Pclose(pid); }
hid_t seissol::checkpoint::h5::Wavefield::initFile(int odd, const char* filename) { hid_t h5file; if (loaded()) { // Open the old file h5file = open(filename, false); checkH5Err(h5file); // Time m_h5time[odd] = H5Aopen(h5file, "time", H5P_DEFAULT); checkH5Err(m_h5time[odd]); // Wavefield writer m_h5timestepWavefield[odd] = H5Aopen(h5file, "timestep_wavefield", H5P_DEFAULT); checkH5Err(m_h5timestepWavefield[odd]); // Data m_h5data[odd] = H5Dopen(h5file, "values", H5P_DEFAULT); checkH5Err(m_h5data[odd]); } else { // Create the file hid_t h5plist = H5Pcreate(H5P_FILE_ACCESS); checkH5Err(h5plist); checkH5Err(H5Pset_libver_bounds(h5plist, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST)); hsize_t align = utils::Env::get<hsize_t>("SEISSOL_CHECKPOINT_ALIGNMENT", 0); if (align > 0) checkH5Err(H5Pset_alignment(h5plist, 1, align)); #ifdef USE_MPI MPIInfo info; checkH5Err(H5Pset_fapl_mpio(h5plist, seissol::MPI::mpi.comm(), info.get())); #endif // USE_MPI h5file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, h5plist); checkH5Err(h5file); checkH5Err(H5Pclose(h5plist)); // Create scalar dataspace for attributes hid_t h5spaceScalar = H5Screate(H5S_SCALAR); checkH5Err(h5spaceScalar); // Time m_h5time[odd] = H5Acreate(h5file, "time", H5T_IEEE_F64LE, h5spaceScalar, H5P_DEFAULT, H5P_DEFAULT); checkH5Err(m_h5time[odd]); // Partitions hid_t h5partitions = H5Acreate(h5file, "partitions", H5T_STD_I32LE, h5spaceScalar, H5P_DEFAULT, H5P_DEFAULT); checkH5Err(h5partitions); int p = partitions(); checkH5Err(H5Awrite(h5partitions, H5T_NATIVE_INT, &p)); checkH5Err(H5Aclose(h5partitions)); // Wavefield writer m_h5timestepWavefield[odd] = H5Acreate(h5file, "timestep_wavefield", H5T_STD_I32LE, h5spaceScalar, H5P_DEFAULT, H5P_DEFAULT); checkH5Err(m_h5timestepWavefield[odd]); int t = 0; checkH5Err(H5Awrite(m_h5timestepWavefield[odd], H5T_NATIVE_INT, &t)); checkH5Err(H5Sclose(h5spaceScalar)); // Variable h5plist = H5Pcreate(H5P_DATASET_CREATE); checkH5Err(h5plist); checkH5Err(H5Pset_layout(h5plist, H5D_CONTIGUOUS)); checkH5Err(H5Pset_alloc_time(h5plist, H5D_ALLOC_TIME_EARLY)); m_h5data[odd] = H5Dcreate(h5file, "values", H5T_IEEE_F64LE, m_h5fSpaceData, H5P_DEFAULT, h5plist, H5P_DEFAULT); checkH5Err(m_h5data[odd]); checkH5Err(H5Pclose(h5plist)); } return h5file; }
int apply_filters(const char* name, /* object name from traverse list */ int rank, /* rank of dataset */ hsize_t *dims, /* dimensions of dataset */ size_t msize, /* size of type */ hid_t dcpl_id, /* dataset creation property list */ pack_opt_t *options, /* repack options */ int *has_filter) /* (OUT) object NAME has a filter */ { int nfilters; /* number of filters in DCPL */ hsize_t chsize[64]; /* chunk size in elements */ H5D_layout_t layout; int i; pack_info_t obj; *has_filter = 0; if (rank==0) /* scalar dataset, do not apply */ return 0; /*------------------------------------------------------------------------- * initialize the assigment object *------------------------------------------------------------------------- */ init_packobject(&obj); /*------------------------------------------------------------------------- * find options *------------------------------------------------------------------------- */ if (aux_assign_obj(name,options,&obj)==0) return 0; /* get information about input filters */ if ((nfilters = H5Pget_nfilters(dcpl_id))<0) return -1; /*------------------------------------------------------------------------- * check if we have filters in the pipeline * we want to replace them with the input filters * only remove if we are inserting new ones *------------------------------------------------------------------------- */ if (nfilters && obj.nfilters ) { *has_filter = 1; if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0) return -1; } /*------------------------------------------------------------------------- * check if there is an existent chunk * read it only if there is not a requested layout *------------------------------------------------------------------------- */ if (obj.layout == -1 ) { if ((layout = H5Pget_layout(dcpl_id))<0) return -1; if (layout == H5D_CHUNKED) { if ((rank = H5Pget_chunk(dcpl_id,NELMTS(chsize),chsize/*out*/))<0) return -1; obj.layout = H5D_CHUNKED; obj.chunk.rank = rank; for ( i = 0; i < rank; i++) obj.chunk.chunk_lengths[i] = chsize[i]; } } /*------------------------------------------------------------------------- * the type of filter and additional parameter * type can be one of the filters * H5Z_FILTER_NONE 0 , uncompress if compressed * H5Z_FILTER_DEFLATE 1 , deflation like gzip * H5Z_FILTER_SHUFFLE 2 , shuffle the data * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC * H5Z_FILTER_SZIP 4 , szip compression * H5Z_FILTER_NBIT 5 , nbit compression * H5Z_FILTER_SCALEOFFSET 6 , scaleoffset compression *------------------------------------------------------------------------- */ if (obj.nfilters) { /*------------------------------------------------------------------------- * filters require CHUNK layout; if we do not have one define a default *------------------------------------------------------------------------- */ if (obj.layout==-1) { /* stripmine info */ hsize_t sm_size[H5S_MAX_RANK]; /*stripmine size */ hsize_t sm_nbytes; /*bytes per stripmine */ obj.chunk.rank = rank; /* * determine the strip mine size. The strip mine is * a hyperslab whose size is manageable. */ sm_nbytes = msize; for ( i = rank; i > 0; --i) { hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes; if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */ size = 1; sm_size[i - 1] = MIN(dims[i - 1], size); sm_nbytes *= sm_size[i - 1]; assert(sm_nbytes > 0); } for ( i = 0; i < rank; i++) { obj.chunk.chunk_lengths[i] = sm_size[i]; } } for ( i=0; i<obj.nfilters; i++) { switch (obj.filter[i].filtn) { default: break; /*------------------------------------------------------------------------- * H5Z_FILTER_DEFLATE 1 , deflation like gzip *------------------------------------------------------------------------- */ case H5Z_FILTER_DEFLATE: { unsigned aggression; /* the deflate level */ aggression = obj.filter[i].cd_values[0]; /* set up for deflated data */ if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if(H5Pset_deflate(dcpl_id,aggression)<0) return -1; } break; /*------------------------------------------------------------------------- * H5Z_FILTER_SZIP 4 , szip compression *------------------------------------------------------------------------- */ case H5Z_FILTER_SZIP: { unsigned options_mask; unsigned pixels_per_block; options_mask = obj.filter[i].cd_values[0]; pixels_per_block = obj.filter[i].cd_values[1]; /* set up for szip data */ if(H5Pset_chunk(dcpl_id,obj.chunk.rank,obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_szip(dcpl_id,options_mask,pixels_per_block)<0) return -1; } break; /*------------------------------------------------------------------------- * H5Z_FILTER_SHUFFLE 2 , shuffle the data *------------------------------------------------------------------------- */ case H5Z_FILTER_SHUFFLE: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_shuffle(dcpl_id)<0) return -1; break; /*------------------------------------------------------------------------- * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC *------------------------------------------------------------------------- */ case H5Z_FILTER_FLETCHER32: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_fletcher32(dcpl_id)<0) return -1; break; /*----------- ------------------------------------------------------------- * H5Z_FILTER_NBIT , NBIT compression *------------------------------------------------------------------------- */ case H5Z_FILTER_NBIT: if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_nbit(dcpl_id)<0) return -1; break; /*----------- ------------------------------------------------------------- * H5Z_FILTER_SCALEOFFSET , scale+offset compression *------------------------------------------------------------------------- */ case H5Z_FILTER_SCALEOFFSET: { H5Z_SO_scale_type_t scale_type; int scale_factor; scale_type = (H5Z_SO_scale_type_t)obj.filter[i].cd_values[0]; scale_factor = obj.filter[i].cd_values[1]; if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; if (H5Pset_scaleoffset(dcpl_id,scale_type,scale_factor)<0) return -1; } break; } /* switch */ }/*i*/ } /*obj.nfilters*/ /*------------------------------------------------------------------------- * layout *------------------------------------------------------------------------- */ if (obj.layout>=0) { /* a layout was defined */ if (H5Pset_layout(dcpl_id, obj.layout)<0) return -1; if (H5D_CHUNKED == obj.layout) { if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0) return -1; } else if (H5D_COMPACT == obj.layout) { if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY)<0) return -1; } /* remove filters for the H5D_CONTIGUOUS case */ else if (H5D_CONTIGUOUS == obj.layout) { if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0) return -1; } } return 0; }
int main(int argc, char **argv) { printf("\n*** Testing HDF5/NetCDF-4 interoperability...\n"); printf("*** testing HDF5 compatibility..."); { #define GRPA_NAME "grpa" #define VAR_NAME "vara" #define NDIMS 2 int nrowCur = 7; /* current size */ int ncolCur = 3; int nrowMax = nrowCur + 0; /* maximum size */ int ncolMax = ncolCur + 0; hid_t xdimId; hid_t ydimId; hsize_t xscaleDims[1]; hsize_t yscaleDims[1]; hid_t xdimSpaceId, spaceId; hid_t fileId; hid_t fapl; hsize_t curDims[2]; hsize_t maxDims[2]; hid_t dataTypeId, dsPropertyId, grpaId, grpaPropId, dsId; hid_t ydimSpaceId; const char * dimNameBase = "This is a netCDF dimension but not a netCDF variable."; char dimNameBuf[1000]; char *varaName = "/grpa/vara"; short amat[nrowCur][ncolCur]; int ii, jj; xscaleDims[0] = nrowCur; yscaleDims[0] = ncolCur; if ((xdimSpaceId = H5Screate_simple(1, xscaleDims, NULL)) < 0) ERR; /* With the SEMI close degree, the HDF5 file close will fail if * anything is left open. */ if ((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_fclose_degree(fapl, H5F_CLOSE_SEMI)) ERR; /* Create file */ if((fileId = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5Pcreate(H5P_FILE_CREATE), fapl)) < 0) ERR; if (H5Pclose(fapl) < 0) ERR; /* Create data space */ curDims[0] = nrowCur; curDims[1] = ncolCur; maxDims[0] = nrowMax; maxDims[1] = ncolMax; if ((spaceId = H5Screate_simple(2, curDims, maxDims)) < 0) ERR; if ((dataTypeId = H5Tcopy(H5T_NATIVE_SHORT)) < 0) ERR; if ((dsPropertyId = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if ((grpaPropId = H5Pcreate(H5P_GROUP_CREATE)) < 0) ERR; if ((grpaId = H5Gcreate2(fileId, GRPA_NAME, H5P_DEFAULT, grpaPropId, H5P_DEFAULT)) < 0) ERR; if (H5Pclose(grpaPropId) < 0) ERR; /* Create vara dataset */ if ((dsId = H5Dcreate2(fileId, varaName, dataTypeId, spaceId, H5P_DEFAULT, dsPropertyId, H5P_DEFAULT)) < 0) ERR; if (H5Pclose(dsPropertyId) < 0) ERR; if (H5Tclose(dataTypeId) < 0) ERR; if ((ydimSpaceId = H5Screate_simple(1, yscaleDims, NULL)) < 0) ERR; /* Create xdim dimension dataset */ if ((xdimId = H5Dcreate2(fileId, "/xdim", H5T_IEEE_F32BE, xdimSpaceId, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if (H5Sclose(xdimSpaceId) < 0) ERR; /* Create ydim dimension dataset */ if ((ydimId = H5Dcreate2(fileId, "/ydim", H5T_IEEE_F32BE, ydimSpaceId, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if (H5Sclose(ydimSpaceId) < 0) ERR; /* Create xdim scale */ sprintf(dimNameBuf, "%s%10d", dimNameBase, nrowCur); if (H5DSset_scale(xdimId, dimNameBuf) < 0) ERR; /* Create ydim scale */ sprintf(dimNameBuf, "%s%10d", dimNameBase, ncolCur); if (H5DSset_scale(ydimId, dimNameBuf) < 0) ERR; /* Attach dimension scales to the dataset */ if (H5DSattach_scale(dsId, xdimId, 0) < 0) ERR; if (H5DSattach_scale(dsId, ydimId, 1) < 0) ERR; /* Close stuff. */ if (H5Dclose(xdimId) < 0) ERR; if (H5Dclose(ydimId) < 0) ERR; if (H5Dclose(dsId) < 0) ERR; if (H5Gclose(grpaId) < 0) ERR; if (H5Sclose(spaceId) < 0) ERR; if (H5Fclose(fileId) < 0) ERR; /* Create some data */ for (ii = 0; ii < nrowCur; ii++) for (jj = 0; jj < ncolCur; jj++) amat[ii][jj] = 100 * ii + jj; /* Re-open file */ if ((fileId = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; if ((grpaId = H5Gopen2(fileId, GRPA_NAME, H5P_DEFAULT)) < 0) ERR; if ((dsId = H5Dopen2(grpaId, varaName, H5P_DEFAULT)) < 0) ERR; /* Write dataset */ if (H5Dwrite(dsId, H5T_NATIVE_SHORT, H5S_ALL, H5S_ALL, H5P_DEFAULT, amat) < 0) ERR; /* Write dimension values for both xdim, ydim */ { short xydimMat[ nrowCur >= ncolCur ? nrowCur : ncolCur]; for (ii = 0; ii < nrowCur; ii++) xydimMat[ii] = 0; /*#### 100 * ii; */ /* Write xdim */ if ((xdimId = H5Dopen2(fileId, "/xdim", H5P_DEFAULT)) < 0) ERR; if (H5Dwrite(xdimId, H5T_NATIVE_SHORT, H5S_ALL, H5S_ALL, H5P_DEFAULT, xydimMat) < 0) ERR; if (H5Dclose(xdimId) < 0) ERR; /* Write ydim */ if ((ydimId = H5Dopen2(fileId, "/ydim", H5P_DEFAULT)) < 0) ERR; if (H5Dwrite(ydimId, H5T_NATIVE_SHORT, H5S_ALL, H5S_ALL, H5P_DEFAULT, xydimMat) < 0) ERR; if (H5Dclose(ydimId) < 0) ERR; } if (H5Dclose(dsId) < 0) ERR; if (H5Gclose(grpaId) < 0) ERR; if (H5Fclose(fileId) < 0) ERR; { int ncid, grpid, nvars, ngatts, ndims, unlimdimid, ngrps; char name_in[NC_MAX_NAME + 1]; nc_type xtype_in; int ndims_in, natts_in, dimid_in[NDIMS]; /* nc_set_log_level(5);*/ if (nc_open(FILE_NAME, NC_NOWRITE, &ncid)) ERR; if (nc_inq(ncid, &ndims, &nvars, &ngatts, &unlimdimid)) ERR; if (ndims != 2 || nvars != 0 || ngatts != 0 || unlimdimid != -1) ERR; if (nc_inq_grps(ncid, &ngrps, &grpid)) ERR; if (ngrps != 1) ERR; if (nc_inq(grpid, &ndims, &nvars, &ngatts, &unlimdimid)) ERR; if (ndims != 0 || nvars != 1 || ngatts != 0 || unlimdimid != -1) ERR; if (nc_inq_var(grpid, 0, name_in, &xtype_in, &ndims_in, dimid_in, &natts_in)) ERR; if (strcmp(name_in, VAR_NAME) || xtype_in != NC_SHORT || ndims_in != NDIMS || dimid_in[0] != 0 || dimid_in[1] != 1 || natts_in != 0) ERR; if (nc_close(ncid)) ERR; } } SUMMARIZE_ERR; #ifdef USE_SZIP printf("*** testing HDF5 compatibility with szip..."); { #define DEFLATE_LEVEL 9 #define MAX_NAME 100 #define NUM_CD_ELEM 10 /* HDF5 defines this... */ #define DEFLATE_NAME "deflate" #define DIM1_LEN 3000 #define GRP_NAME "George_Washington" #define BATTLE_RECORD "Battle_Record" hid_t fileid, grpid, spaceid, datasetid; int data_out[DIM1_LEN], data_in[DIM1_LEN]; hsize_t dims[1] = {DIM1_LEN}; hid_t propid; char name_in[MAX_NAME + 1]; int ncid, ndims_in, nvars_in, ngatts_in, unlimdimid_in, ngrps_in; int nc_grpid; int dimid_in[1], natts_in; nc_type xtype_in; int i; for (i = 0; i < DIM1_LEN; i++) data_out[i] = i; /* Open file and create group. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gcreate(fileid, GRP_NAME, 0)) < 0) ERR; /* Write an array of bools, with szip compression. */ if ((propid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_layout(propid, H5D_CHUNKED)) ERR; if (H5Pset_chunk(propid, 1, dims)) ERR; if (H5Pset_szip(propid, H5_SZIP_EC_OPTION_MASK, 32)) ERR; if ((spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; if ((datasetid = H5Dcreate(grpid, BATTLE_RECORD, H5T_NATIVE_INT, spaceid, propid)) < 0) ERR; if (H5Dwrite(datasetid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_out) < 0) ERR; if (H5Dclose(datasetid) < 0 || H5Pclose(propid) < 0 || H5Sclose(spaceid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Open the file with netCDF and check it. */ if (nc_open(FILE_NAME, NC_NOWRITE, &ncid)) ERR; if (nc_inq(ncid, &ndims_in, &nvars_in, &ngatts_in, &unlimdimid_in)) ERR; if (ndims_in != 0 || nvars_in != 0 || ngatts_in != 0 || unlimdimid_in != -1) ERR; if (nc_inq_grps(ncid, &ngrps_in, &nc_grpid)) ERR; if (ngrps_in != 1) ERR; if (nc_inq(nc_grpid, &ndims_in, &nvars_in, &ngatts_in, &unlimdimid_in)) ERR; if (ndims_in != 1 || nvars_in != 1 || ngatts_in != 0 || unlimdimid_in != -1) ERR; /* Check the variable. */ if (nc_inq_var(nc_grpid, 0, name_in, &xtype_in, &ndims_in, dimid_in, &natts_in)) ERR; if (strcmp(name_in, BATTLE_RECORD) || xtype_in != NC_INT || ndims_in != 1 || dimid_in[0] != 0 || natts_in != 0) ERR; /* Check the data. */ if (nc_get_var(nc_grpid, 0, data_in)) ERR; for (i = 0; i < DIM1_LEN; i++) if (data_in[i] != data_out[i]) ERR; if (nc_close(ncid)) ERR; } SUMMARIZE_ERR; #endif /* USE_SZIP */ FINAL_RESULTS; }
hid_t H5Dataset::create(H5Object & loc, const std::string & name, const hid_t type, const hid_t targettype, const hid_t srcspace, const hid_t targetspace, void * data, const bool chunked) { herr_t err; hid_t dataset; if (H5Lexists(loc.getH5Id(), name.c_str(), H5P_DEFAULT) > 0) { dataset = H5Oopen(loc.getH5Id(), name.c_str(), H5P_DEFAULT); if (dataset < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot open the dataset: %s"), name.c_str()); } if (targetspace > 0) { hid_t space = H5Dget_space(dataset); if (space < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot get the dataspace associated with dataset named %s."), name.c_str()); } hsize_t * dims = 0; hsize_t * ddims = 0; hsize_t * maxdims = 0; hsize_t * dmaxdims = 0; try { herr_t err; int ndims = H5Sget_simple_extent_ndims(space); if (ndims < 0) { throw H5Exception(__LINE__, __FILE__, _("Invalid source space")); } int dndims = H5Sget_simple_extent_ndims(targetspace); if (dndims < 0) { throw H5Exception(__LINE__, __FILE__, _("Invalid target space")); } hsize_t * dims = new hsize_t[ndims]; hsize_t * ddims = new hsize_t[dndims]; hsize_t * maxdims = new hsize_t[ndims]; hsize_t * dmaxdims = new hsize_t[dndims]; H5Sget_simple_extent_dims(space, dims, maxdims); H5Sget_simple_extent_dims(targetspace, ddims, dmaxdims); H5Sclose(space); if (ndims != dndims) { throw H5Exception(__LINE__, __FILE__, _("Wrong dimensions.")); } else { for (int i = 0; i < ndims; i++) { if (maxdims[i] != dmaxdims[i]) { throw H5Exception(__LINE__, __FILE__, _("Cannot modify maximum dimensions.")); } if (ddims[i] > dims[i]) { err = H5Dset_extent(dataset, ddims); if (err < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot modify dimension %d."), i); } break; } } } delete[] dims; delete[] ddims; delete[] maxdims; delete[] dmaxdims; } catch (const H5Exception & /*e*/) { delete[] dims; delete[] ddims; delete[] maxdims; delete[] dmaxdims; throw; } } } else { if (chunked) { herr_t err; int ndims = H5Sget_simple_extent_ndims(targetspace); if (ndims < 0) { throw H5Exception(__LINE__, __FILE__, _("Invalid target space")); } hsize_t * dims = new hsize_t[ndims]; H5Sget_simple_extent_dims(targetspace, dims, 0); hid_t dcpl = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(dcpl, H5D_CHUNKED); err = H5Pset_chunk(dcpl, ndims, dims); delete[] dims; if (err < 0) { H5Pclose(dcpl); throw H5Exception(__LINE__, __FILE__, _("Cannot set the chunk dimensions: %s"), name.c_str()); } dataset = H5Dcreate(loc.getH5Id(), name.c_str(), targettype, targetspace == -1 ? srcspace : targetspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); H5Pclose(dcpl); } else { dataset = H5Dcreate(loc.getH5Id(), name.c_str(), targettype, targetspace == -1 ? srcspace : targetspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } if (dataset < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot create the dataset: %s"), name.c_str()); } } err = H5Dwrite(dataset, type, srcspace, targetspace == -1 ? H5S_ALL : targetspace, H5P_DEFAULT, data); if (err < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot write data in the dataset.")); } return dataset; }
/* * To exercise the coding for the re-read of the object header for SWMR access. * When the object header is read in H5O_load() of H5Ocache.c, the library initially reads * 512 bytes for decoding, then reads the remaining bytes later if the object header is * greater than 512 bytes. For SWMR access, the read should be done all at one time. */ static herr_t test_ohdr_swmr(void) { hid_t fid = -1; /* File ID */ hid_t fapl = -1; /* File access property list */ hid_t did = -1; /* Dataset ID */ hid_t sid = -1; /* Dataspace ID */ hid_t plist = -1; /* Dataset creation property list */ size_t compact_size = 1024; /* The size of compact dataset */ int wbuf[1024]; /* Buffer for writing */ hsize_t dims[1]; /* Dimension sizes */ unsigned int n = 0, u; /* Locatl index variable */ H5O_info_t obj_info; /* Information for the object */ TESTING("exercise the coding for the re-read of the object header for SWMR access"); /* File access property list */ if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) FAIL_STACK_ERROR /* Set to use latest library format */ if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) FAIL_STACK_ERROR /* Initialize data */ for(u = 0; u < compact_size; u++) wbuf[u] = n++; /* Create the file with the latest format (ensure version 2 object header for SWMR) */ if((fid = H5Fcreate(FILE_OHDR_SWMR, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR /* Create a small data space for compact dataset */ dims[0] = (hsize_t)compact_size; if((sid = H5Screate_simple(1, dims, NULL)) < 0) FAIL_STACK_ERROR /* Create property list for compact dataset creation */ if((plist = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR /* Set the layout for the compact dataset */ if(H5Pset_layout(plist, H5D_COMPACT) < 0) FAIL_STACK_ERROR /* Create a compact dataset */ if((did = H5Dcreate2(fid, DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR /* Write to the compact dataset */ if(H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0) FAIL_STACK_ERROR /* Close the dataset */ if(H5Dclose(did) < 0) FAIL_STACK_ERROR /* Close the file */ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR /* Open the file for SWMR write and latest format */ if((fid = H5Fopen(FILE_OHDR_SWMR, H5F_ACC_RDWR|H5F_ACC_SWMR_WRITE, fapl)) < 0) FAIL_STACK_ERROR /* Open the compact dataset */ if((did = H5Dopen2(fid, DSET_NAME, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR /* Get the object information */ if(H5Oget_info(did, &obj_info) < 0) FAIL_STACK_ERROR /* The size of object header should be greater than the speculative read size of 512 */ /* This will exercise the coding for the re-read of the object header for SWMR access */ if(obj_info.hdr.space.total < 512) TEST_ERROR; /* Close the dataset */ if(H5Dclose(did) < 0) FAIL_STACK_ERROR /* Close the file */ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR /* Close the dataspace */ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR /* Close the dataset creation property list */ if(H5Pclose(plist) < 0) FAIL_STACK_ERROR /* Close the file access property list */ if(H5Pclose(fapl) < 0) FAIL_STACK_ERROR /* Remove the test file */ if(HDremove(FILE_OHDR_SWMR) < 0) FAIL_STACK_ERROR PASSED(); return 0; error: H5E_BEGIN_TRY { H5Fclose(fid); H5Dclose(did); H5Sclose(sid); H5Pclose(plist); H5Pclose(fapl); HDremove(FILE_OHDR_SWMR); } H5E_END_TRY; return -1; } /* test_ohdr_swmr() */
int main(int argc, char **argv) { struct rlimit rlim; getrlimit(RLIMIT_STACK, &rlim); rlim.rlim_cur = 1024 * 1024 * 1024; setrlimit(RLIMIT_STACK, &rlim); hid_t file_id, dataset_id, dataspace_id, status, property_id; hsize_t dims[2]; if(argc < 4) { usage(stderr, argv[0]); exit(1); } file_id = H5Fopen(argv[1], H5F_ACC_RDONLY, H5P_DEFAULT); dataset_id = H5Dopen2(file_id, "x", H5P_DEFAULT); dataspace_id = H5Dget_space(dataset_id); status = H5Sget_simple_extent_dims(dataspace_id, dims, NULL); hsize_t a_rows = dims[0], a_cols = dims[1]; double a[a_rows][a_cols]; status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, a); status = H5Sclose(dataspace_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); file_id = H5Fopen(argv[2], H5F_ACC_RDONLY, H5P_DEFAULT); dataset_id = H5Dopen2(file_id, "x", H5P_DEFAULT); dataspace_id = H5Dget_space(dataset_id); status = H5Sget_simple_extent_dims(dataspace_id, dims, NULL); hsize_t b_rows = dims[0], b_cols = dims[1]; double b[b_rows][b_cols]; status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, b); status = H5Sclose(dataspace_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); if(a_cols != b_rows) { fprintf(stderr, "Error: matrix dimension mismatch.\n"); exit(1); } size_t m = a_rows; size_t n = a_cols; size_t p = b_cols; double c[m][p]; matrix_multiply_block(&a[0][0], &b[0][0], &c[0][0], m, n, p, BLOCKSIZE); // Write out the output dims[0] = m; dims[1] = p; file_id = H5Fcreate(argv[3], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); status = H5Screate_simple(2, dims, NULL); property_id = H5Pcreate(H5P_DATASET_CREATE); status = H5Pset_layout(property_id, H5D_CONTIGUOUS); dataset_id = H5Dcreate(file_id, "x", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, property_id, H5P_DEFAULT); status = H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &c[0][0]); status = H5Sclose(dataspace_id); status = H5Dclose(dataset_id); status = H5Fclose(file_id); status = H5Pclose(property_id); }
/**************************************************************** ** ** test_compact_vlstring(): Test code for storing VL strings in ** compact datasets. ** ****************************************************************/ static void test_compact_vlstring(void) { const char *wdata[SPACE1_DIM1] = {"one", "two", "three", "four"}; char *rdata[SPACE1_DIM1]; /* Information read in */ hid_t fid1; /* HDF5 File IDs */ hid_t dataset; /* Dataset ID */ hid_t sid1; /* Dataspace ID */ hid_t tid1; /* Datatype ID */ hid_t plist; /* Dataset creation property list */ hsize_t dims1[] = {SPACE1_DIM1}; unsigned i; /* counting variable */ herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing VL Strings in compact dataset\n")); /* Create file */ fid1 = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); CHECK(fid1, FAIL, "H5Fcreate"); /* Create dataspace for datasets */ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL); CHECK(sid1, FAIL, "H5Screate_simple"); /* Create a datatype to refer to */ tid1 = H5Tcopy (H5T_C_S1); CHECK(tid1, FAIL, "H5Tcopy"); ret = H5Tset_size (tid1,H5T_VARIABLE); CHECK(ret, FAIL, "H5Tset_size"); plist = H5Pcreate(H5P_DATASET_CREATE); CHECK(plist, FAIL, "H5Pcreate"); ret = H5Pset_layout(plist, H5D_COMPACT); CHECK(ret, FAIL, "H5Pset_layout"); /* Create a dataset */ dataset = H5Dcreate2(fid1, "Dataset5", tid1, sid1, H5P_DEFAULT, plist, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Write dataset to disk */ ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); CHECK(ret, FAIL, "H5Dwrite"); /* Read dataset from disk */ ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata); CHECK(ret, FAIL, "H5Dread"); /* Compare data read in */ for(i = 0; i < SPACE1_DIM1; i++) { if(HDstrlen(wdata[i]) != HDstrlen(rdata[i])) { TestErrPrintf("VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n",(int)i,(int)strlen(wdata[i]),(int)i,(int)strlen(rdata[i])); continue; } /* end if */ if(HDstrcmp(wdata[i], rdata[i]) != 0) { TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n",(int)i,wdata[i],(int)i,rdata[i]); continue; } /* end if */ } /* end for */ /* Reclaim the read VL data */ ret = H5Dvlen_reclaim(tid1, sid1, H5P_DEFAULT, rdata); CHECK(ret, FAIL, "H5Dvlen_reclaim"); /* Close Dataset */ ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); /* Close datatype */ ret = H5Tclose(tid1); CHECK(ret, FAIL, "H5Tclose"); /* Close disk dataspace */ ret = H5Sclose(sid1); CHECK(ret, FAIL, "H5Sclose"); /* Close dataset create property list */ ret = H5Pclose(plist); CHECK(ret, FAIL, "H5Pclose"); /* Close file */ ret = H5Fclose(fid1); CHECK(ret, FAIL, "H5Fclose"); } /*test_compact_vlstrings*/
int main() { printf("\n*** Checking HDF5 dimscales detach.\n"); printf("*** Creating a file with two vars with one dimension scale..."); { #if 0 hid_t cparmsid; #endif hid_t fileid, grpid, spaceid, var1_id, var2_id, dimscaleid; hid_t fcpl_id, fapl_id, create_propid, access_propid; hsize_t dims[NDIMS] = {DIM_LEN}; char dimscale_wo_var[STR_LEN]; float data = 42; /* Create file. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR; if (H5Pset_cache(fapl_id, 0, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, CHUNK_CACHE_PREEMPTION) < 0) ERR; if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_18, H5F_LIBVER_18) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; if (H5Pclose(fapl_id) < 0) ERR; if (H5Pclose(fcpl_id) < 0) ERR; if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; /* Create dimension scale. */ if ((create_propid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_attr_creation_order(create_propid, H5P_CRT_ORDER_TRACKED| H5P_CRT_ORDER_INDEXED) < 0) ERR; if ((spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; if ((dimscaleid = H5Dcreate1(grpid, DIMSCALE_NAME, H5T_IEEE_F32BE, spaceid, create_propid)) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; if (H5Pclose(create_propid) < 0) ERR; sprintf(dimscale_wo_var, "%s%10d", DIM_WITHOUT_VARIABLE, DIM_LEN); if (H5DSset_scale(dimscaleid, dimscale_wo_var) < 0) ERR; /* Create a variable that uses this dimension scale. */ if ((access_propid = H5Pcreate(H5P_DATASET_ACCESS)) < 0) ERR; if (H5Pset_chunk_cache(access_propid, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, CHUNK_CACHE_PREEMPTION) < 0) ERR; if ((create_propid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_fill_value(create_propid, H5T_NATIVE_FLOAT, &data) < 0) ERR; if (H5Pset_layout(create_propid, H5D_CONTIGUOUS) < 0) ERR; if (H5Pset_attr_creation_order(create_propid, H5P_CRT_ORDER_TRACKED| H5P_CRT_ORDER_INDEXED) < 0) ERR; if ((spaceid = H5Screate_simple(NDIMS, dims, dims)) < 0) ERR; if ((var1_id = H5Dcreate2(grpid, VAR1_NAME, H5T_NATIVE_FLOAT, spaceid, H5P_DEFAULT, create_propid, access_propid)) < 0) ERR; if (H5Pclose(create_propid) < 0) ERR; if (H5Pclose(access_propid) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; if (H5DSattach_scale(var1_id, dimscaleid, 0) < 0) ERR; /* Create another variable that uses this dimension scale. */ if ((access_propid = H5Pcreate(H5P_DATASET_ACCESS)) < 0) ERR; if (H5Pset_chunk_cache(access_propid, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, CHUNK_CACHE_PREEMPTION) < 0) ERR; if ((create_propid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_fill_value(create_propid, H5T_NATIVE_FLOAT, &data) < 0) ERR; if (H5Pset_layout(create_propid, H5D_CONTIGUOUS) < 0) ERR; if (H5Pset_attr_creation_order(create_propid, H5P_CRT_ORDER_TRACKED| H5P_CRT_ORDER_INDEXED) < 0) ERR; if ((spaceid = H5Screate_simple(NDIMS, dims, dims)) < 0) ERR; if ((var2_id = H5Dcreate2(grpid, VAR2_NAME, H5T_NATIVE_FLOAT, spaceid, H5P_DEFAULT, create_propid, access_propid)) < 0) ERR; if (H5Pclose(create_propid) < 0) ERR; if (H5Pclose(access_propid) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; if (H5DSattach_scale(var2_id, dimscaleid, 0) < 0) ERR; /* Now detach the scales and remove the dimscale. This doesn't * work if I reverse the order of the statements. */ if (H5DSdetach_scale(var2_id, dimscaleid, 0) < 0) ERR; if (H5DSdetach_scale(var1_id, dimscaleid, 0) < 0) ERR; /* Fold up our tents. */ if (H5Dclose(var1_id) < 0) ERR; if (H5Dclose(dimscaleid) < 0) ERR; if (H5Gclose(grpid) < 0) ERR; if (H5Fclose(fileid) < 0) ERR; /* /\* Now read the file and check it. *\/ */ /* { */ /* hid_t fileid, spaceid = 0, datasetid = 0; */ /* hsize_t num_obj, i; */ /* int obj_class; */ /* char obj_name[STR_LEN + 1]; */ /* char dimscale_name[STR_LEN+1]; */ /* htri_t is_scale; */ /* char label[STR_LEN+1]; */ /* int num_scales; */ /* hsize_t dims[1], maxdims[1]; */ /* H5G_stat_t statbuf; */ /* HDF5_OBJID_T dimscale_obj, vars_dimscale_obj; */ /* struct nc_hdf5_link_info link_info; */ /* hsize_t idx = 0; */ /* /\* Open the file. *\/ */ /* if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; */ /* if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; */ /* /\* Loop through objects in the root group. *\/ */ /* if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR; */ /* for (i = 0; i < num_obj; i++) */ /* { */ /* if (H5Literate(grpid, H5_INDEX_CRT_ORDER, H5_ITER_INC, */ /* &idx, visit_link, (void *)&link_info) < 0) ERR; */ /* printf("Encountered: HDF5 object link_info.name %s\n", link_info.name); */ /* /\* Deal with object based on its obj_class. *\/ */ /* switch(link_info.obj_type) */ /* { */ /* case H5I_GROUP: */ /* break; */ /* case H5I_DATASET: */ /* /\* Open the dataset. *\/ */ /* if ((datasetid = H5Dopen1(fileid, link_info.name)) < 0) ERR; */ /* if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; */ /* if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; */ /* if (maxdims[0] != DIM_LEN) ERR; */ /* if (H5Sclose(spaceid) < 0) ERR; */ /* /\* Is this a dimscale? *\/ */ /* if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; */ /* if (is_scale && strcmp(link_info.name, DIMSCALE_NAME)) ERR; */ /* if (is_scale) */ /* { */ /* /\* A dimscale comes with a NAME attribute, in */ /* * addition to its real name. *\/ */ /* if (H5DSget_scale_name(datasetid, dimscale_name, STR_LEN) < 0) ERR; */ /* if (strcmp(dimscale_name, dimscale_wo_var)) ERR; */ /* /\* fileno and objno uniquely identify an object and a */ /* * HDF5 file. *\/ */ /* if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; */ /* dimscale_obj.fileno[0] = statbuf.fileno[0]; */ /* dimscale_obj.objno[0] = statbuf.objno[0]; */ /* dimscale_obj.fileno[1] = statbuf.fileno[1]; */ /* dimscale_obj.objno[1] = statbuf.objno[1]; */ /* /\*printf("scale statbuf.fileno = %d statbuf.objno = %d\n", */ /* statbuf.fileno, statbuf.objno);*\/ */ /* } */ /* else */ /* { */ /* /\* Here's how to get the number of scales attached */ /* * to the dataset's dimension 0. *\/ */ /* if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; */ /* if (num_scales != 1) ERR; */ /* /\* Go through all dimscales for this var and learn about them. *\/ */ /* if (H5DSiterate_scales(datasetid, 0, NULL, alien_visitor, */ /* &vars_dimscale_obj) < 0) ERR; */ /* /\*printf("vars_dimscale_obj.fileno = %d vars_dimscale_obj.objno = %d\n", */ /* vars_dimscale_obj.fileno, vars_dimscale_obj.objno);*\/ */ /* /\* if (vars_dimscale_obj.fileno[0] != dimscale_obj.fileno[0] || *\/ */ /* /\* vars_dimscale_obj.objno[0] != dimscale_obj.objno[0] || *\/ */ /* /\* vars_dimscale_obj.fileno[1] != dimscale_obj.fileno[1] || *\/ */ /* /\* vars_dimscale_obj.objno[1] != dimscale_obj.objno[1]) ERR; *\/ */ /* /\* There's also a label for dimension 0. *\/ */ /* if (H5DSget_label(datasetid, 0, label, STR_LEN) < 0) ERR; */ /* /\*printf("found non-scale dataset %s, label %s\n", link_info.name, label);*\/ */ /* } */ /* if (H5Dclose(datasetid) < 0) ERR; */ /* break; */ /* case H5I_DATATYPE: */ /* break; */ /* default: */ /* printf("Unknown object!"); */ /* ERR; */ /* } */ /* } */ /* /\* Close up the shop. *\/ */ /* if (H5Fclose(fileid) < 0) ERR; */ /* }*/ } SUMMARIZE_ERR; FINAL_RESULTS; }
bool Hdf5Dataset::saveMap(const VecVecDouble &pose_reach, const VecVecDouble &spheres, const VecDouble &ri, const double resolution) { if(!checkPath(this->path_)) { createPath(this->path_); } const char *filepath = this->path_.c_str(); const char *name = this->filename_.c_str(); char fullpath[100]; strcpy(fullpath, filepath); strcat(fullpath, name); ROS_INFO("Saving map %s", this->filename_.c_str()); this->file_ = H5Fcreate(fullpath, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); this->group_poses_ = H5Gcreate(this->file_, "/Poses", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); this->group_spheres_ = H5Gcreate(this->file_, "/Spheres", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); ROS_INFO("Saving poses in reachability map"); const hsize_t ndims = 2; const hsize_t ncols = 10; int posSize =pose_reach.size(); int chunk_size; int PY = 10; if (posSize % 2) { chunk_size = (posSize / 2) + 1; } else { chunk_size = (posSize / 2); } // Create Dataspace hsize_t dims[ndims] = {0, ncols}; // Starting with an empty buffer hsize_t max_dims[ndims] = {H5S_UNLIMITED, ncols}; // Creating dataspace hid_t file_space = H5Screate_simple(ndims, dims, max_dims); // Create Dataset Property list hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); hsize_t chunk_dims[ndims] = {chunk_size, ncols}; H5Pset_chunk(plist, ndims, chunk_dims); // Create the datset this->poses_dataset_ = H5Dcreate(this->group_poses_, "poses_dataset", H5T_NATIVE_FLOAT, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); // Closing resources H5Pclose(plist); H5Sclose(file_space); // Creating the first buffer hsize_t nlines = chunk_size; float *buffer = new float[nlines * ncols]; float **dset1_data = new float *[nlines]; for (hsize_t i = 0; i < nlines; ++i) { dset1_data[i] = &buffer[i * ncols]; } // Data for the first chunk for (int i = 0; i < chunk_size; i++) { for (int j = 0; j < PY; j++) { dset1_data[i][j] = pose_reach[i][j]; } } // Memory dataspace indicating size of the buffer dims[0] = chunk_size; dims[1] = ncols; hid_t mem_space = H5Screate_simple(ndims, dims, NULL); // Extending dataset dims[0] = chunk_size; dims[1] = ncols; H5Dset_extent(this->poses_dataset_, dims); // Selecting hyperslab on the dataset file_space = H5Dget_space(this->poses_dataset_); hsize_t start[2] = {0, 0}; hsize_t count[2] = {chunk_size, ncols}; H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, NULL, count, NULL); // Writing buffer to the dataset H5Dwrite(this->poses_dataset_, H5T_NATIVE_FLOAT, mem_space, file_space, H5P_DEFAULT, buffer); // Closing file dataspace H5Sclose(file_space); // Data for the Second chunk for (int i = chunk_size; i < posSize; i++) { for (int j = 0; j < PY; j++) { dset1_data[i - chunk_size][j] = pose_reach[i][j]; } } // Resizing new memory dataspace indicating new size of the buffer dims[0] = posSize - chunk_size; dims[1] = ncols; H5Sset_extent_simple(mem_space, ndims, dims, NULL); // Extend dataset dims[0] = posSize; dims[1] = ncols; H5Dset_extent(this->poses_dataset_, dims); // Selecting hyperslab file_space = H5Dget_space(this->poses_dataset_); start[0] = chunk_size; start[1] = 0; count[0] = posSize - chunk_size; count[1] = ncols; H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, NULL, count, NULL); // Writing buffer to dataset H5Dwrite(this->poses_dataset_, H5T_NATIVE_FLOAT, mem_space, file_space, H5P_DEFAULT, buffer); // Closing all the resources delete[] dset1_data; delete[] buffer; // Creating Sphere dataset ROS_INFO("Saving spheres in Reachability map"); hid_t sphere_dataspace; const int SX = spheres.size(); const int SY = 4; hsize_t dims2[2]; // dataset dimensions dims2[0] = SX; dims2[1] = SY; double dset2_data[SX][SY]; for(int i=0;i<spheres.size();++i) { for(int j=0;j<spheres[i].size();++j) { dset2_data[i][j] = spheres[i][j]; } for (int j = 3; j < SY; j++) { dset2_data[i][j] = ri[i]; } } sphere_dataspace = H5Screate_simple(2, dims2, NULL); this->sphere_dataset_ = H5Dcreate2(this->group_spheres_, "sphere_dataset", H5T_NATIVE_DOUBLE, sphere_dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite(this->sphere_dataset_, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2_data); // Creating attribute hsize_t attr_dims; float attr_data[1]; attr_data[0] = resolution; attr_dims = 1; sphere_dataspace = H5Screate_simple(1, &attr_dims, NULL); this->attr_ = H5Acreate2(this->sphere_dataset_, "Resolution", H5T_NATIVE_FLOAT, sphere_dataspace, H5P_DEFAULT, H5P_DEFAULT); H5Awrite(this->attr_, H5T_NATIVE_FLOAT, attr_data); //H5Aclose(this->attr_); // Closing all H5Sclose(sphere_dataspace); H5Sclose(file_space); H5Sclose(mem_space); close(); }
void h5_write_unknowns_sp_(hid_t* file_identifier, int* index, /* index of var to write */ int* nvar, /* total number of variables */ int* nxb, /* # of zones to store in x */ int* nyb, /* # of zones to store in y */ int* nzb, /* # of zones to store in z */ int* nguard, /* # of guardcells in pass */ int* maximum_blocks, /* maximum num of blocks */ float* unknowns, /* [mblk][NZB][NYB][NXB][nvar] */ char record_label[5],/* add char-null termination */ int* local_blocks, int* total_blocks, int* global_offset) { hid_t dataspace, dataset, memspace, dxfer_template, dataset_plist; herr_t status; int rank; hsize_t dimens_4d[4], dimens_5d[5]; hsize_t start_4d[4]; hsize_t stride_4d[4], count_4d[4]; #ifdef CHUNK hsize_t dimens_chunk[4]; #endif char record_label_new[5]; int ierr; /* the variable names are 4 characters long -- copy this into record_label_new, the 5th character is for the \0 termination */ strncpy(record_label_new, record_label,4); *(record_label_new + 4) = '\0'; /* set the dimensions of the dataset */ rank = 4; dimens_4d[0] = *total_blocks; dimens_4d[1] = *nzb; dimens_4d[2] = *nyb; dimens_4d[3] = *nxb; dataspace = H5Screate_simple(rank, dimens_4d, NULL); #ifdef DEBUG_IO printf("UNKNOWNS: dataspace = %d\n", (int) dataspace); #endif dataset_plist = H5Pcreate(H5P_DATASET_CREATE); #ifdef CHUNK /* set the layout to chunked */ ierr = H5Pset_layout(dataset_plist, H5D_CHUNKED); /* create a chunk containing 10 blocks worth of data */ dimens_chunk[0] = 10; dimens_chunk[1] = *nzb; dimens_chunk[2] = *nyb; dimens_chunk[3] = *nxb; ierr = H5Pset_chunk(dataset_plist, 4, dimens_chunk); #endif dataset = H5Dcreate(*file_identifier, record_label_new, H5T_NATIVE_FLOAT, dataspace, dataset_plist); #ifdef DEBUG_IO printf("UNKNOWNS: dataset = %d\n", (int) dataset); #endif /* create the hyperslab -- this will differ on the different processors */ start_4d[0] = (hsize_t) (*global_offset); start_4d[1] = 0; start_4d[2] = 0; start_4d[3] = 0; stride_4d[0] = 1; stride_4d[1] = 1; stride_4d[2] = 1; stride_4d[3] = 1; count_4d[0] = (hsize_t) (*local_blocks); count_4d[1] = *nzb; count_4d[2] = *nyb; count_4d[3] = *nxb; status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start_4d, stride_4d, count_4d, NULL); #ifdef DEBUG_IO printf("UNKNOWNS: hyperslab selection = %d\n", (int) status); #endif /* create the memory space -- we can get away with a simple memory space for the unknowns, since we are passing a contiguous block of memory now, and the block count is the last index in FORTRAN */ rank = 5; dimens_5d[0] = *local_blocks; dimens_5d[1] = *nzb+(*nguard)*2*k3d; dimens_5d[2] = *nyb+(*nguard)*2*k2d; dimens_5d[3] = *nxb+(*nguard)*2; dimens_5d[4] = *nvar; memspace = H5Screate_simple(rank, dimens_5d, NULL); #ifdef DEBUG_IO printf("UNKNOWNS: memspace = %d\n", (int) memspace); #endif /* setting the transfer template */ flash_tune_plist(&dxfer_template); #ifdef DEBUG_IO printf("UNKNOWNS: dxfer_template = %d\n", (int) dxfer_template); #endif /* write the data */ status = H5Dwrite(dataset, H5T_NATIVE_FLOAT, memspace, dataspace, dxfer_template, unknowns); #ifdef DEBUG_IO printf("UNKNOWNS: wrote unknowns, status = %d\n", (int) status); #endif H5Pclose(dxfer_template); H5Sclose(memspace); H5Sclose(dataspace); H5Dclose(dataset); }
int dump_hdf_file(const float *data, int docompression) { hid_t file_id, dataset_id, propid; hid_t file_spaceid, mem_spaceid, access_plistid, xfer_plistid; hsize_t dims[NDIMS] = {X_LEN, Y_LEN, Z_LEN}; hsize_t start[NDIMS] = {0, 0, 0}; hsize_t count[NDIMS] = {1, 1, Z_LEN}; /* create file */ file_id = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); /* create property for dataset */ propid = H5Pcreate(H5P_DATASET_CREATE); if (docompression) { if (H5Pset_layout(propid, H5D_CHUNKED) < 0) ERR; if (H5Pset_chunk(propid, NDIMS, dims) < 0) ERR; /* values[0]=9; */ /* status = H5Pset_filter(propid, H5Z_FILTER_DEFLATE,0,1,&values[0]); */ /* printf("deflat estatus is: %i\n",status); */ /* sets defalte level */ if (H5Pset_deflate(propid, 1)) ERR; } if ((file_spaceid = H5Screate_simple(NDIMS, dims, dims)) < 0) ERR; /* Set up the cache. */ if ((access_plistid = H5Pcreate(H5P_DATASET_ACCESS)) < 0) ERR; if (H5Pset_chunk_cache(access_plistid, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, CHUNK_CACHE_PREEMPTION) < 0) ERR; /* Create the dataset. */ if ((dataset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_FLOAT, file_spaceid, H5P_DEFAULT, propid, access_plistid)) < 0) ERR; /* if ((file_spaceid = H5Dget_space(dataset_id)) < 0) ERR;*/ if ((mem_spaceid = H5Screate_simple(NDIMS, count, NULL)) < 0) ERR; if ((xfer_plistid = H5Pcreate(H5P_DATASET_XFER)) < 0) ERR; /* Write the dataset. */ for (start[0] = 0; start[0] < X_LEN; start[0]++) for (start[1] = 0; start[1] < Y_LEN; start[1]++) { if (H5Sselect_hyperslab(file_spaceid, H5S_SELECT_SET, start, NULL, count, NULL) < 0) ERR_RET; if (H5Dwrite(dataset_id, H5T_NATIVE_FLOAT, mem_spaceid, file_spaceid, xfer_plistid, data) < 0) ERR_RET; } /* Close property lists. */ if (H5Pclose(propid) < 0) ERR; if (H5Pclose(access_plistid) < 0) ERR; if (H5Pclose(xfer_plistid) < 0) ERR; /* Close spaces. */ if (H5Sclose(file_spaceid) < 0) ERR; if (H5Sclose(mem_spaceid) < 0) ERR; /* End access to the dataset and release resources used by it. */ if (H5Dclose(dataset_id) < 0) ERR; /* close file */ if (H5Fclose(file_id) < 0) ERR; return 0; }
/* Example of using PHDF5 to create, write, and read compact dataset. * * Changes: Updated function to use a dynamically calculated size, * instead of the old SIZE #define. This should allow it * to function with an arbitrary number of processors. * * JRM - 8/11/04 */ void compact_dataset(void) { int i, j, mpi_size, mpi_rank, size, err_num=0; hbool_t use_gpfs = FALSE; hid_t iof, plist, dcpl, dxpl, dataset, filespace; hsize_t file_dims [DIM]; double * outme; double * inme; char dname[]="dataset"; herr_t ret; const char *filename; size = get_size(); for ( i = 0; i < DIM; i++ ) { file_dims[i] = size; } MPI_Comm_rank (MPI_COMM_WORLD, &mpi_rank); MPI_Comm_size (MPI_COMM_WORLD, &mpi_size); outme = HDmalloc((size_t)(size * size * sizeof(double))); VRFY((outme != NULL), "HDmalloc succeeded for outme"); inme = HDmalloc((size_t)(size * size * sizeof(double))); VRFY((outme != NULL), "HDmalloc succeeded for inme"); filename = GetTestParameters(); VRFY((mpi_size <= size), "mpi_size <= size"); plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type, use_gpfs); iof = H5Fcreate (filename, H5F_ACC_TRUNC, H5P_DEFAULT, plist); /* Define data space */ filespace = H5Screate_simple (DIM, file_dims, NULL); /* Create a compact dataset */ dcpl = H5Pcreate(H5P_DATASET_CREATE); VRFY((dcpl>=0), "dataset creation property list succeeded"); ret=H5Pset_layout(dcpl, H5D_COMPACT); VRFY((dcpl >= 0), "set property list for compact dataset"); ret=H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY); VRFY((ret >= 0), "set space allocation time for compact dataset"); dataset = H5Dcreate (iof, dname, H5T_NATIVE_DOUBLE, filespace, dcpl); VRFY((dataset >= 0), "H5Dcreate succeeded"); /* set up the collective transfer properties list */ dxpl = H5Pcreate (H5P_DATASET_XFER); VRFY((dxpl >= 0), ""); ret=H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE); VRFY((ret >= 0), "H5Pcreate xfer succeeded"); /* Recalculate data to write. Each process writes the same data. */ for (i = 0; i < size; i++) for (j = 0; j < size; j++) outme[(i * size) + j] = (i+j)*1000; ret=H5Dwrite (dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, dxpl, outme); VRFY((ret >= 0), "H5Dwrite succeeded"); H5Pclose (dcpl); H5Pclose (plist); H5Dclose (dataset); H5Sclose (filespace); H5Fclose (iof); /* Open the file and dataset, read and compare the data. */ plist = create_faccess_plist(MPI_COMM_WORLD, MPI_INFO_NULL, facc_type, use_gpfs); iof = H5Fopen(filename, H5F_ACC_RDONLY, plist); VRFY((iof >= 0), "H5Fopen succeeded"); /* set up the collective transfer properties list */ dxpl = H5Pcreate (H5P_DATASET_XFER); VRFY((dxpl >= 0), ""); ret=H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE); VRFY((ret >= 0), "H5Pcreate xfer succeeded"); dataset = H5Dopen(iof, dname); VRFY((dataset >= 0), "H5Dcreate succeeded"); ret = H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, dxpl, inme); VRFY((ret >= 0), "H5Dread succeeded"); /* Verify data value */ for (i = 0; i < size; i++) for (j = 0; j < size; j++) if ( inme[(i * size) + j] != outme[(i * size) + j]) if(err_num++ < MAX_ERR_REPORT || VERBOSE_MED) printf("Dataset Verify failed at [%d][%d]: expect %f, got %f\n", i, j, outme[(i * size) + j], inme[(i * size) + j]); H5Pclose(plist); H5Pclose(dxpl); H5Dclose(dataset); H5Fclose(iof); HDfree(inme); HDfree(outme); }