int AMRreader:: readAMRmesh() { //if( blkxs_!=NULL ) return 0; hid_t file_id = H5Fopen( filename_.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT ); if( file_id<0 ) { debug1 << "Failed to open AMR file: " << filename_ << " when read in mesh.\n"; return -1; } hid_t gid = H5Gopen1( file_id, amr_grpname ); if( gid<0 ) { debug1 << "Failed to open AMR group in " << filename_ << " when read in mesh.\n"; return -2; } blkxs_ = new float[3*nblks_]; blkdx_ = new float[3*nblks_]; if( blkxs_==NULL || blkdx_==NULL ) { debug1 << "Failed to allocate blkxs_ or blkddx_ for " << filename_ << ".\n"; return -3; } hid_t xsid = H5Dopen1( gid, amr_crdname ); if( xsid<0 ) { debug1 << "Failed to open block coordinates in " << filename_ << ".\n"; return -4; } herr_t herr = H5Dread( xsid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, blkxs_ ); H5Dclose(xsid); if( herr<0 ) { debug1 << "Failed to read block coordinates in " << filename_ << ".\n"; return -5; } hid_t dxid = H5Dopen1( gid, amr_stpname ); if( dxid<0 ) { debug1 << "Failed to open block steps in " << filename_ << ".\n"; return -6; } herr = H5Dread( dxid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, blkdx_ ); H5Dclose(dxid); if( herr<0 ) { debug1 << "Failed to read block steps in " << filename_ << ".\n"; return -7; } H5Gclose( gid ); H5Fclose( file_id ); return 0; }
/** Select a different resolution from a multi-resolution image. * \ingroup mi2VPrp */ int miselect_resolution(mihandle_t volume, int depth) { hid_t grp_id; char path[MI2_MAX_PATH]; if ( volume->hdf_id < 0 || depth > MI2_MAX_RESOLUTION_GROUP || depth < 0) { return (MI_ERROR); } grp_id = H5Gopen1(volume->hdf_id, MI_ROOT_PATH "/image"); if (grp_id < 0) { return (MI_ERROR); } /* Check given depth with the available depth in file. Make sure the selected resolution does exist. */ if (depth > volume->create_props->depth) { return (MI_ERROR); } else if (depth != 0) { if (minc_update_thumbnail(volume, grp_id, 0, depth) < 0) { return (MI_ERROR); } } volume->selected_resolution = depth; if (volume->image_id >= 0) { H5Dclose(volume->image_id); } sprintf(path, "%d/image", depth); volume->image_id = H5Dopen1(grp_id, path); if (volume->volume_class == MI_CLASS_REAL) { if (volume->imax_id >= 0) { H5Dclose(volume->imax_id); } sprintf(path, "%d/image-max", depth); volume->imax_id = H5Dopen1(grp_id, path); if (volume->imin_id >= 0) { H5Dclose(volume->imin_id); } sprintf(path, "%d/image-min", depth); volume->imin_id = H5Dopen1(grp_id, path); } return (MI_NOERROR); }
/** Return the byte size of the voxel datatytpe */ int miget_data_type_size ( mihandle_t volume, misize_t *voxel_size ) { hid_t grp_id; hid_t dset_id; hid_t type_id; hid_t file_id = volume->hdf_id; grp_id = midescend_path ( file_id, MI_FULLIMAGE_PATH ); if ( grp_id < 0 ) { return ( MI_ERROR ); } dset_id = H5Dopen1 ( grp_id, "image" ); if ( dset_id < 0 ) { return ( MI_ERROR ); } type_id = H5Dget_type ( dset_id ); if ( type_id < 0 ) { return ( MI_ERROR ); } *voxel_size = H5Tget_size ( type_id ); H5Tclose ( type_id ); H5Dclose ( dset_id ); H5Gclose ( grp_id ); return ( MI_NOERROR ); }
void readSimple( char * file , char * group , char * dset , void * data , hid_t type ){ hid_t h5fil = H5Fopen( file , H5F_ACC_RDWR , H5P_DEFAULT ); hid_t h5grp = H5Gopen1( h5fil , group ); hid_t h5dst = H5Dopen1( h5grp , dset ); H5Dread( h5dst , type , H5S_ALL , H5S_ALL , H5P_DEFAULT , data ); H5Dclose( h5dst ); H5Gclose( h5grp ); H5Fclose( h5fil ); }
bool hdfutil::HasDataSet (const H5::H5File & h5file, const std::string & name) { hid_t loc_id = h5file.getLocId(); #if H5_VERS_MINOR >= 8 hid_t dataset_id = H5Dopen1( loc_id, name.c_str()); #else hid_t dataset_id = H5Dopen( loc_id, name.c_str()); #endif if(dataset_id < 0) return false; H5Dclose(dataset_id); return true; }
void getH5dims( char * file , char * group , char * dset , hsize_t * dims ){ hid_t h5fil = H5Fopen( file , H5F_ACC_RDWR , H5P_DEFAULT ); hid_t h5grp = H5Gopen1( h5fil , group ); hid_t h5dst = H5Dopen1( h5grp , dset ); hid_t h5spc = H5Dget_space( h5dst ); H5Sget_simple_extent_dims( h5spc , dims , NULL); H5Sclose( h5spc ); H5Dclose( h5dst ); H5Gclose( h5grp ); H5Fclose( h5fil ); }
int AMRreader:: GetInterfaceVariable( int vid, void* dat ) { std::string vname; hid_t mtype; switch(vid) { case(i_coor): vname = intf_coor_name; mtype=H5T_NATIVE_FLOAT; break; case(i_velo): vname = intf_velo_name; mtype=H5T_NATIVE_FLOAT; break; case(i_pres): vname = intf_pres_name; mtype=H5T_NATIVE_FLOAT; break; case(i_segt): vname = intf_segt_name; mtype=H5T_NATIVE_INT; break; default: debug1 << "Unknown variable id " << vid << " .\n"; return -1; } hid_t file_id = H5Fopen( filename_.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT ); if( file_id<0 ) { debug1 << "Failed to open AMR file: " << filename_ << " when read in mesh.\n"; return -2; } hid_t gid = H5Gopen1( file_id, intf_grp_name ); if( gid<0 ) { debug1 << "Failed to open interface group in " << filename_ << " when read in mesh.\n"; return -3; } hid_t dtid = H5Dopen1( gid, vname.c_str() ); if( dtid<0 ) { debug1 << "Failed to open interface variable " << vname << " in " << filename_ << ".\n"; return -4; } herr_t herr = H5Dread( dtid, mtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, dat ); H5Dclose(dtid); if( herr<0 ) { debug1 << "Failed to read interface variable " << vname << " in " << filename_ << ".\n"; return -5; } H5Gclose( gid ); H5Fclose( file_id ); return 0; }
void UPCdata::readHDF5file(char* filename){ #if HDF5_AVAILABLE==1 int matrix[5][5]; hid_t file, dataset; // File and dataset identifiers herr_t status; file = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); dataset = H5Dopen1(file,"dataset1"); status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, matrix); for (int i=0; i < 5; i++) { for(int j=0; j < 5; j++) printf("%3d \n", matrix[i][j]); printf("\n"); } #endif }
int AMRreader:: readAMRdata() { // if( datbuf_!=NULL ) return 0; hid_t file_id = H5Fopen( filename_.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT ); if( file_id<0 ) { debug1 << "Failed to open AMR file: " << filename_ << " when read in mesh.\n"; return -1; } hid_t gid = H5Gopen1( file_id, amr_grpname ); if( gid<0 ) { debug1 << "Failed to open AMR group in " << filename_ << " when read in mesh.\n"; return -2; } datbuf_ = new float[ 5*blksz_*nblks_]; if( datbuf_==NULL ) { debug1 << "Failed to allocate datbuf_ for " << filename_ << ".\n"; return -3; } hid_t datid = H5Dopen1( gid, amr_datname ); if( datid<0 ) { debug1 << "Failed to open block data in " << filename_ << ".\n"; return -4; } herr_t herr = H5Dread( datid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, datbuf_ ); H5Dclose(datid); if( herr<0 ) { debug1 << "Failed to read block data in " << filename_ << ".\n"; return -5; } H5Gclose( gid ); H5Fclose( file_id ); return 0; }
void readPatch( char * file , char * group , char * dset , void * data , hid_t type , int dim , int * start , int * loc_size , int * glo_size){ hid_t h5fil = H5Fopen( file , H5F_ACC_RDWR , H5P_DEFAULT ); hid_t h5grp = H5Gopen1( h5fil , group ); hid_t h5dst = H5Dopen1( h5grp , dset ); hsize_t mdims[dim]; hsize_t fdims[dim]; hsize_t fstart[dim]; hsize_t fstride[dim]; hsize_t fcount[dim]; hsize_t fblock[dim]; int d; for( d=0 ; d<dim ; ++d ){ mdims[d] = loc_size[d]; fdims[d] = glo_size[d]; fstart[d] = start[d]; fstride[d] = 1; fcount[d] = loc_size[d]; fblock[d] = 1; } hid_t mspace = H5Screate_simple(dim,mdims,NULL); hid_t fspace = H5Screate_simple(dim,fdims,NULL); H5Sselect_hyperslab( fspace , H5S_SELECT_SET , fstart , fstride , fcount , fblock ); H5Dread( h5dst , type , mspace , fspace , H5P_DEFAULT , data ); H5Sclose( mspace ); H5Sclose( fspace ); H5Dclose( h5dst ); H5Gclose( h5grp ); H5Fclose( h5fil ); }
int main() { printf("\n*** Checking HDF5 dimscales some more.\n"); printf("*** Creating a file with one var with one dimension scale..."); { hid_t fileid, spaceid, datasetid, dimscaleid, cparmsid; hsize_t dims[NDIMS] = {DIM1_LEN}, maxdims[NDIMS] = {H5S_UNLIMITED}; /* Create file. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; /* Create the space that will be used both for the dimscale and * the 1D dataset that will attach it. */ if ((spaceid = H5Screate_simple(NDIMS, dims, maxdims)) < 0) ERR; /* Modify dataset creation properties, i.e. enable chunking. */ dims[0] = 1; if ((cparmsid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_chunk(cparmsid, NDIMS, dims) < 0) ERR; /* Create our dimension scale, as an unlimited dataset. */ if ((dimscaleid = H5Dcreate(fileid, DIMSCALE_NAME, H5T_NATIVE_INT, spaceid, cparmsid)) < 0) ERR; if (H5DSset_scale(dimscaleid, NAME_ATTRIBUTE) < 0) ERR; /* Create a variable which uses it. */ if ((datasetid = H5Dcreate(fileid, VAR1_NAME, H5T_NATIVE_INT, spaceid, cparmsid)) < 0) ERR; if (H5DSattach_scale(datasetid, dimscaleid, 0) < 0) ERR; if (H5DSset_label(datasetid, 0, DIMSCALE_LABEL) < 0) ERR; /* Fold up our tents. */ if (H5Dclose(dimscaleid) < 0 || H5Dclose(datasetid) < 0 || H5Sclose(spaceid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking that one var, one dimscale file can be read..."); { hid_t fileid, spaceid = 0, datasetid = 0; hsize_t num_obj, i; int obj_class; char obj_name[NC_MAX_NAME + 1]; char dimscale_name[NC_MAX_NAME+1]; htri_t is_scale; char label[NC_MAX_NAME+1]; int num_scales; hsize_t dims[1], maxdims[1]; H5G_stat_t statbuf; HDF5_OBJID_T dimscale_obj, vars_dimscale_obj; /* Open the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; /* Loop through objects in the root group. */ if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR; for (i=0; i<num_obj; i++) { /* Get the type (i.e. group, dataset, etc.), and the name of * the object. */ if ((obj_class = H5Gget_objtype_by_idx(fileid, i)) < 0) ERR; if (H5Gget_objname_by_idx(fileid, i, obj_name, NC_MAX_NAME) < 0) ERR; /*printf("\nEncountered: HDF5 object obj_class %d obj_name %s\n", obj_class, obj_name);*/ /* Deal with object based on its obj_class. */ switch(obj_class) { case H5G_GROUP: break; case H5G_DATASET: /* Open the dataset. */ if ((datasetid = H5Dopen1(fileid, obj_name)) < 0) ERR; /* This should be an unlimited dataset. */ if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; if (maxdims[0] != H5S_UNLIMITED) ERR; /* Is this a dimscale? */ if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; if (is_scale && strcmp(obj_name, DIMSCALE_NAME)) ERR; if (is_scale) { /* A dimscale comes with a NAME attribute, in * addition to its real name. */ if (H5DSget_scale_name(datasetid, dimscale_name, NC_MAX_NAME) < 0) ERR; if (strcmp(dimscale_name, NAME_ATTRIBUTE)) ERR; /* fileno and objno uniquely identify an object and a * HDF5 file. */ if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; dimscale_obj.fileno[0] = statbuf.fileno[0]; dimscale_obj.objno[0] = statbuf.objno[0]; dimscale_obj.fileno[1] = statbuf.fileno[1]; dimscale_obj.objno[1] = statbuf.objno[1]; /*printf("statbuf.fileno = %d statbuf.objno = %d\n", statbuf.fileno, statbuf.objno);*/ } else { /* Here's how to get the number of scales attached * to the dataset's dimension 0. */ if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; if (num_scales != 1) ERR; /* Go through all dimscales for this var and learn about them. */ if (H5DSiterate_scales(datasetid, 0, NULL, alien_visitor, &vars_dimscale_obj) < 0) ERR; /*printf("vars_dimscale_obj.fileno = %d vars_dimscale_obj.objno = %d\n", vars_dimscale_obj.fileno, vars_dimscale_obj.objno);*/ if (vars_dimscale_obj.fileno[0] != dimscale_obj.fileno[0] || vars_dimscale_obj.objno[0] != dimscale_obj.objno[0] || vars_dimscale_obj.fileno[1] != dimscale_obj.fileno[1] || vars_dimscale_obj.objno[1] != dimscale_obj.objno[1]) ERR; /* There's also a label for dimension 0. */ if (H5DSget_label(datasetid, 0, label, NC_MAX_NAME) < 0) ERR; /*printf("found non-scale dataset %s, label %s\n", obj_name, label);*/ } if (H5Dclose(datasetid) < 0) ERR; break; case H5G_TYPE: break; case H5G_LINK: break; default: printf("Unknown object class %d!", obj_class); } } /* Close up the shop. */ if (H5Sclose(spaceid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Creating a file with one var with two dimension scales..."); { #define LAT_LEN 3 #define LON_LEN 2 #define DIMS_2 2 #define LAT_NAME "lat" #define LON_NAME "lon" #define PRES_NAME "pres" hid_t fileid, lat_spaceid, lon_spaceid, pres_spaceid; hid_t pres_datasetid, lat_dimscaleid, lon_dimscaleid; hsize_t dims[DIMS_2]; /* Create file. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; /* Create the spaces that will be used for the dimscales. */ dims[0] = LAT_LEN; if ((lat_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = LON_LEN; if ((lon_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; /* Create the space for the dataset. */ dims[0] = LAT_LEN; dims[1] = LON_LEN; if ((pres_spaceid = H5Screate_simple(DIMS_2, dims, dims)) < 0) ERR; /* Create our dimension scales. */ if ((lat_dimscaleid = H5Dcreate(fileid, LAT_NAME, H5T_NATIVE_INT, lat_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lat_dimscaleid, NULL) < 0) ERR; if ((lon_dimscaleid = H5Dcreate(fileid, LON_NAME, H5T_NATIVE_INT, lon_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lon_dimscaleid, NULL) < 0) ERR; /* Create a variable which uses these two dimscales. */ if ((pres_datasetid = H5Dcreate(fileid, PRES_NAME, H5T_NATIVE_FLOAT, pres_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lat_dimscaleid, 0) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lon_dimscaleid, 1) < 0) ERR; /* Fold up our tents. */ if (H5Dclose(lat_dimscaleid) < 0 || H5Dclose(lon_dimscaleid) < 0 || H5Dclose(pres_datasetid) < 0 || H5Sclose(lat_spaceid) < 0 || H5Sclose(lon_spaceid) < 0 || H5Sclose(pres_spaceid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking that one var, two dimscales file can be read..."); { #define NDIMS2 2 hid_t fileid, spaceid = 0, datasetid = 0; hsize_t num_obj, i; int obj_class; char obj_name[NC_MAX_NAME + 1]; htri_t is_scale; int num_scales; hsize_t dims[NDIMS2], maxdims[NDIMS2]; H5G_stat_t statbuf; HDF5_OBJID_T dimscale_obj[2], vars_dimscale_obj[2]; int dimscale_cnt = 0; int d, ndims; /* Open the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; /* Loop through objects in the root group. */ if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR; for (i=0; i<num_obj; i++) { /* Get the type (i.e. group, dataset, etc.), and the name of * the object. */ if ((obj_class = H5Gget_objtype_by_idx(fileid, i)) < 0) ERR; if (H5Gget_objname_by_idx(fileid, i, obj_name, NC_MAX_NAME) < 0) ERR; /* printf("\nEncountered: HDF5 object obj_class %d obj_name %s\n", */ /* obj_class, obj_name); */ /* Deal with object based on its obj_class. */ switch(obj_class) { case H5G_GROUP: break; case H5G_DATASET: /* Open the dataset. */ if ((datasetid = H5Dopen1(fileid, obj_name)) < 0) ERR; /* Get space info. */ if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; if ((ndims = H5Sget_simple_extent_ndims(spaceid)) < 0) ERR; if (ndims > NDIMS2) ERR; /* Is this a dimscale? */ if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; if (is_scale) { /* fileno and objno uniquely identify an object and a * HDF5 file. */ if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; dimscale_obj[dimscale_cnt].fileno[0] = statbuf.fileno[0]; dimscale_obj[dimscale_cnt].objno[0] = statbuf.objno[0]; dimscale_obj[dimscale_cnt].fileno[1] = statbuf.fileno[1]; dimscale_obj[dimscale_cnt].objno[1] = statbuf.objno[1]; /* printf("dimscale_obj[%d].fileno = %d dimscale_obj[%d].objno = %d\n", */ /* dimscale_cnt, dimscale_obj[dimscale_cnt].fileno, dimscale_cnt, */ /* dimscale_obj[dimscale_cnt].objno); */ dimscale_cnt++; } else { /* Here's how to get the number of scales attached * to the dataset's dimension 0 and 1. */ if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; if (num_scales != 1) ERR; if ((num_scales = H5DSget_num_scales(datasetid, 1)) < 0) ERR; if (num_scales != 1) ERR; /* Go through all dimscales for this var and learn about them. */ for (d = 0; d < ndims; d++) { if (H5DSiterate_scales(datasetid, d, NULL, alien_visitor2, &(vars_dimscale_obj[d])) < 0) ERR; /* Verify that the object ids passed from the * alien_visitor2 function match the ones we found * for the lat and lon datasets. */ if (vars_dimscale_obj[d].fileno[0] != dimscale_obj[d].fileno[0] || vars_dimscale_obj[d].objno[0] != dimscale_obj[d].objno[0]) ERR; if (vars_dimscale_obj[d].fileno[1] != dimscale_obj[d].fileno[1] || vars_dimscale_obj[d].objno[1] != dimscale_obj[d].objno[1]) ERR; } } if (H5Dclose(datasetid) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; break; case H5G_TYPE: break; case H5G_LINK: break; default: printf("Unknown object class %d!", obj_class); } } /* Close up the shop. */ if (H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Creating a file with one var with two unlimited dimension scales..."); { #define U1_LEN 3 #define U2_LEN 2 #define DIMS2 2 #define U1_NAME "u1" #define U2_NAME "u2" #define VNAME "v1" hid_t fapl_id, fcpl_id, grpid, plistid, plistid2; hid_t fileid, lat_spaceid, lon_spaceid, pres_spaceid; hid_t pres_datasetid, lat_dimscaleid, lon_dimscaleid; hsize_t dims[DIMS2], maxdims[DIMS2], chunksize[DIMS2] = {10, 10}; hid_t spaceid = 0, datasetid = 0; hsize_t num_obj, i; int obj_class; char obj_name[NC_MAX_NAME + 1]; htri_t is_scale; int num_scales; H5G_stat_t statbuf; HDF5_OBJID_T dimscale_obj[2], vars_dimscale_obj[2]; int dimscale_cnt = 0; int d, ndims; /* Create file access and create property lists. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; /* Set latest_format in access propertly list. This ensures that * the latest, greatest, HDF5 versions are used in the file. */ if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR; /* Set H5P_CRT_ORDER_TRACKED in the creation property list. This * turns on HDF5 creation ordering in the file. */ if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; /* Create file. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; /* Open the root group. */ if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; /* Create the spaces that will be used for the dimscales. */ dims[0] = 0; maxdims[0] = H5S_UNLIMITED; if ((lat_spaceid = H5Screate_simple(1, dims, maxdims)) < 0) ERR; if ((lon_spaceid = H5Screate_simple(1, dims, maxdims)) < 0) ERR; /* Create the space for the dataset. */ dims[0] = 0; dims[1] = 0; maxdims[0] = H5S_UNLIMITED; maxdims[1] = H5S_UNLIMITED; if ((pres_spaceid = H5Screate_simple(DIMS2, dims, maxdims)) < 0) ERR; /* Set up the dataset creation property list for the two dimensions. */ if ((plistid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_chunk(plistid, 1, chunksize) < 0) ERR; if (H5Pset_attr_creation_order(plistid, H5P_CRT_ORDER_TRACKED| H5P_CRT_ORDER_INDEXED) < 0) ERR; /* Create our dimension scales. */ if ((lat_dimscaleid = H5Dcreate(grpid, U1_NAME, H5T_NATIVE_INT, lat_spaceid, plistid)) < 0) ERR; if (H5DSset_scale(lat_dimscaleid, NULL) < 0) ERR; if ((lon_dimscaleid = H5Dcreate(grpid, U2_NAME, H5T_NATIVE_INT, lon_spaceid, plistid)) < 0) ERR; if (H5DSset_scale(lon_dimscaleid, NULL) < 0) ERR; /* Set up the dataset creation property list for the variable. */ if ((plistid2 = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if (H5Pset_chunk(plistid2, DIMS2, chunksize) < 0) ERR; if (H5Pset_attr_creation_order(plistid2, H5P_CRT_ORDER_TRACKED| H5P_CRT_ORDER_INDEXED) < 0) ERR; /* Create a variable which uses these two dimscales. */ if ((pres_datasetid = H5Dcreate(grpid, VNAME, H5T_NATIVE_DOUBLE, pres_spaceid, plistid2)) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lat_dimscaleid, 0) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lon_dimscaleid, 1) < 0) ERR; /* Close down the show. */ if (H5Pclose(fapl_id) < 0 || H5Pclose(fcpl_id) < 0 || H5Dclose(lat_dimscaleid) < 0 || H5Dclose(lon_dimscaleid) < 0 || H5Dclose(pres_datasetid) < 0 || H5Sclose(lat_spaceid) < 0 || H5Sclose(lon_spaceid) < 0 || H5Sclose(pres_spaceid) < 0 || H5Pclose(plistid) < 0 || H5Pclose(plistid2) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Open the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; /* Loop through objects in the root group. */ if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR; for (i = 0; i < num_obj; i++) { /*Get the type (i.e. group, dataset, etc.), and the name of the object. */ if ((obj_class = H5Gget_objtype_by_idx(grpid, i)) < 0) ERR; if (H5Gget_objname_by_idx(grpid, i, obj_name, NC_MAX_NAME) < 0) ERR; /* Deal with object based on its obj_class. */ switch(obj_class) { case H5G_GROUP: break; case H5G_DATASET: /* Open the dataset. */ if ((datasetid = H5Dopen1(grpid, obj_name)) < 0) ERR; /* Get space info. */ if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; if ((ndims = H5Sget_simple_extent_ndims(spaceid)) < 0) ERR; /* Is this a dimscale? */ if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; if (is_scale) { /* fileno and objno uniquely identify an object and a * HDF5 file. */ if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; dimscale_obj[dimscale_cnt].fileno[0] = statbuf.fileno[0]; dimscale_obj[dimscale_cnt].objno[0] = statbuf.objno[0]; dimscale_obj[dimscale_cnt].fileno[1] = statbuf.fileno[1]; dimscale_obj[dimscale_cnt].objno[1] = statbuf.objno[1]; dimscale_cnt++; } else { /* Here's how to get the number of scales attached * to the dataset's dimension 0 and 1. */ if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; if (num_scales != 1) ERR; if ((num_scales = H5DSget_num_scales(datasetid, 1)) < 0) ERR; if (num_scales != 1) ERR; /* Go through all dimscales for this var and learn about them. */ for (d = 0; d < ndims; d++) { if (H5DSiterate_scales(datasetid, d, NULL, alien_visitor2, &(vars_dimscale_obj[d])) < 0) ERR; /* Verify that the object ids passed from the * alien_visitor2 function match the ones we found * for the lat and lon datasets. */ if (vars_dimscale_obj[d].fileno[0] != dimscale_obj[d].fileno[0] || vars_dimscale_obj[d].objno[0] != dimscale_obj[d].objno[0]) ERR; if (vars_dimscale_obj[d].fileno[1] != dimscale_obj[d].fileno[1] || vars_dimscale_obj[d].objno[1] != dimscale_obj[d].objno[1]) ERR; } } if (H5Dclose(datasetid) < 0) ERR; break; case H5G_TYPE: break; case H5G_LINK: break; default: printf("Unknown object class %d!", obj_class); } } /* Check the dimension lengths. */ { hid_t spaceid1; hsize_t h5dimlen[DIMS2], h5dimlenmax[DIMS2]; int dataset_ndims; /* Check U1. */ if ((datasetid = H5Dopen1(grpid, U1_NAME)) < 0) ERR; if ((spaceid1 = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(spaceid1, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 1 || h5dimlen[0] != 0 || h5dimlenmax[0] != H5S_UNLIMITED) ERR; if (H5Dclose(datasetid) || H5Sclose(spaceid1)) ERR; /* Check U2. */ if ((datasetid = H5Dopen1(grpid, U2_NAME)) < 0) ERR; if ((spaceid1 = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(spaceid1, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 1 || h5dimlen[0] != 0 || h5dimlenmax[0] != H5S_UNLIMITED) ERR; if (H5Dclose(datasetid) || H5Sclose(spaceid1)) ERR; /* Check V1. */ if ((datasetid = H5Dopen1(grpid, VNAME)) < 0) ERR; if ((spaceid1 = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(spaceid1, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 2 || h5dimlen[0] != 0 || h5dimlen[1] != 0 || h5dimlenmax[0] != H5S_UNLIMITED || h5dimlenmax[1] != H5S_UNLIMITED) ERR; /* All done. */ if (H5Dclose(datasetid) || H5Sclose(spaceid1)) ERR; } /* Write two hyperslabs. */ { #define NUM_VALS 3 hid_t file_spaceid, mem_spaceid; hsize_t h5dimlen[DIMS2], h5dimlenmax[DIMS2], xtend_size[DIMS2] = {1, NUM_VALS}; hsize_t start[DIMS2] = {0, 0}; hsize_t count[DIMS2] = {1, NUM_VALS}; double value[NUM_VALS]; int dataset_ndims; int i; /* Set up phony data. */ for (i = 0; i < NUM_VALS; i++) value[i] = (float)i; /* Open the dataset, check its dimlens. */ if ((datasetid = H5Dopen1(grpid, VNAME)) < 0) ERR; if ((file_spaceid = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(file_spaceid, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 2 || h5dimlen[0] != 0 || h5dimlen[1] != 0 || h5dimlenmax[0] != H5S_UNLIMITED || h5dimlenmax[1] != H5S_UNLIMITED) ERR; /* Extend the size of the dataset. */ if (H5Dextend(datasetid, xtend_size) < 0) ERR; if ((file_spaceid = H5Dget_space(datasetid)) < 0) ERR; /* Check the size. */ if ((dataset_ndims = H5Sget_simple_extent_dims(file_spaceid, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 2 || h5dimlen[0] != 1 || h5dimlen[1] != NUM_VALS || h5dimlenmax[0] != H5S_UNLIMITED || h5dimlenmax[1] != H5S_UNLIMITED) ERR; /* Set up the file and memory spaces. */ if (H5Sselect_hyperslab(file_spaceid, H5S_SELECT_SET, start, NULL, count, NULL) < 0) ERR; if ((mem_spaceid = H5Screate_simple(DIMS2, count, NULL)) < 0) ERR; /* Write a slice of data. */ if (H5Dwrite(datasetid, H5T_NATIVE_DOUBLE, mem_spaceid, file_spaceid, H5P_DEFAULT, value) < 0) /* Check the size. */ if ((file_spaceid = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(file_spaceid, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 2 || h5dimlen[0] != 1 || h5dimlen[1] != NUM_VALS || h5dimlenmax[0] != H5S_UNLIMITED || h5dimlenmax[1] != H5S_UNLIMITED) ERR; /* Extend the size of the dataset for the second slice. */ xtend_size[0]++; if (H5Dextend(datasetid, xtend_size) < 0) ERR; if ((file_spaceid = H5Dget_space(datasetid)) < 0) ERR; /* Set up the file and memory spaces for a second slice. */ start[0]++; if (H5Sselect_hyperslab(file_spaceid, H5S_SELECT_SET, start, NULL, count, NULL) < 0) ERR; if ((mem_spaceid = H5Screate_simple(DIMS2, count, NULL)) < 0) ERR; /* Write a second slice of data. */ if (H5Dwrite(datasetid, H5T_NATIVE_DOUBLE, mem_spaceid, file_spaceid, H5P_DEFAULT, value) < 0) /* Check the size again. */ if ((file_spaceid = H5Dget_space(datasetid)) < 0) ERR; if ((dataset_ndims = H5Sget_simple_extent_dims(file_spaceid, h5dimlen, h5dimlenmax)) < 0) ERR; if (dataset_ndims != 2 || h5dimlen[0] != 2 || h5dimlen[1] != NUM_VALS || h5dimlenmax[0] != H5S_UNLIMITED || h5dimlenmax[1] != H5S_UNLIMITED) ERR; /* All done. */ if (H5Dclose(datasetid) || H5Sclose(mem_spaceid) || H5Sclose(file_spaceid)) ERR; } /* Close up the shop. */ if (H5Sclose(spaceid)) ERR; if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking dimension scales with attached dimension scales..."); { #define LAT_LEN 3 #define LON_LEN 2 #define TIME_LEN 5 #define LEN_LEN 10 #define DIMS_3 3 #define NUM_DIMSCALES1 4 #define LAT_NAME "lat" #define LON_NAME "lon" #define PRES_NAME1 "z_pres" #define TIME_NAME "time" #define LEN_NAME "u_len" hid_t fileid, lat_spaceid, lon_spaceid, time_spaceid, pres_spaceid, len_spaceid; hid_t pres_datasetid, lat_dimscaleid, lon_dimscaleid, time_dimscaleid, len_dimscaleid; hid_t fapl_id, fcpl_id; hsize_t dims[DIMS_3]; hid_t spaceid = 0, datasetid = 0; hsize_t num_obj, i; int obj_class; char obj_name[NC_MAX_NAME + 1]; htri_t is_scale; int num_scales; hsize_t maxdims[DIMS_3]; H5G_stat_t statbuf; HDF5_OBJID_T dimscale_obj[NUM_DIMSCALES1], vars_dimscale_obj[NUM_DIMSCALES1]; int dimscale_cnt = 0; int d, ndims; /* Create file access and create property lists. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; /* Set latest_format in access propertly list. This ensures that * the latest, greatest, HDF5 versions are used in the file. */ if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR; /* Set H5P_CRT_ORDER_TRACKED in the creation property list. This * turns on HDF5 creation ordering in the file. */ if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; /* Create file. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; /* Create the spaces that will be used for the dimscales. */ dims[0] = LAT_LEN; if ((lat_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = LON_LEN; if ((lon_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = TIME_LEN; if ((time_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = LEN_LEN; if ((len_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; /* Create the space for the dataset. */ dims[0] = LAT_LEN; dims[1] = LON_LEN; dims[2] = TIME_LEN; if ((pres_spaceid = H5Screate_simple(DIMS_3, dims, dims)) < 0) ERR; /* Create our dimension scales. */ if ((lat_dimscaleid = H5Dcreate1(fileid, LAT_NAME, H5T_NATIVE_INT, lat_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lat_dimscaleid, NULL) < 0) ERR; if ((lon_dimscaleid = H5Dcreate1(fileid, LON_NAME, H5T_NATIVE_INT, lon_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lon_dimscaleid, NULL) < 0) ERR; if ((time_dimscaleid = H5Dcreate1(fileid, TIME_NAME, H5T_NATIVE_INT, time_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(time_dimscaleid, NULL) < 0) ERR; if ((len_dimscaleid = H5Dcreate1(fileid, LEN_NAME, H5T_NATIVE_INT, len_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(len_dimscaleid, NULL) < 0) ERR; /* Create a variable which uses these three dimscales. */ if ((pres_datasetid = H5Dcreate1(fileid, PRES_NAME1, H5T_NATIVE_FLOAT, pres_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lat_dimscaleid, 0) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lon_dimscaleid, 1) < 0) ERR; if (H5DSattach_scale(pres_datasetid, time_dimscaleid, 2) < 0) ERR; /* Attach a dimscale to a dimscale. Unfortunately, HDF5 does not * allow this. Woe is me. */ /*if (H5DSattach_scale(time_dimscaleid, len_dimscaleid, 0) < 0) ERR;*/ /* Fold up our tents. */ if (H5Dclose(lat_dimscaleid) < 0 || H5Dclose(lon_dimscaleid) < 0 || H5Dclose(time_dimscaleid) < 0 || H5Dclose(len_dimscaleid) < 0 || H5Dclose(pres_datasetid) < 0 || H5Sclose(lat_spaceid) < 0 || H5Sclose(lon_spaceid) < 0 || H5Sclose(time_spaceid) < 0 || H5Sclose(pres_spaceid) < 0 || H5Sclose(len_spaceid) < 0 || H5Pclose(fapl_id) < 0 || H5Pclose(fcpl_id) < 0 || H5Fclose(fileid) < 0) ERR; /* Open the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; /* Loop through objects in the root group. */ if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR; for (i=0; i<num_obj; i++) { /* Get the type (i.e. group, dataset, etc.), and the name of * the object. */ if ((obj_class = H5Gget_objtype_by_idx(fileid, i)) < 0) ERR; if (H5Gget_objname_by_idx(fileid, i, obj_name, NC_MAX_NAME) < 0) ERR; /* printf("\nEncountered: HDF5 object obj_class %d obj_name %s\n", */ /* obj_class, obj_name); */ /* Deal with object based on its obj_class. */ switch(obj_class) { case H5G_GROUP: break; case H5G_DATASET: /* Open the dataset. */ if ((datasetid = H5Dopen1(fileid, obj_name)) < 0) ERR; /* Get space info. */ if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; if ((ndims = H5Sget_simple_extent_ndims(spaceid)) < 0) ERR; /* Is this a dimscale? */ if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; if (is_scale) { /* fileno and objno uniquely identify an object and a * HDF5 file. */ if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; dimscale_obj[dimscale_cnt].fileno[0] = statbuf.fileno[0]; dimscale_obj[dimscale_cnt].objno[0] = statbuf.objno[0]; dimscale_obj[dimscale_cnt].fileno[1] = statbuf.fileno[1]; dimscale_obj[dimscale_cnt].objno[1] = statbuf.objno[1]; /* printf("dimscale_obj[%d].fileno = %d dimscale_obj[%d].objno = %d\n", */ /* dimscale_cnt, dimscale_obj[dimscale_cnt].fileno, dimscale_cnt, */ /* dimscale_obj[dimscale_cnt].objno); */ dimscale_cnt++; } else { /* Here's how to get the number of scales attached * to the dataset's dimension 0 and 1. */ if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; if (num_scales != 1) ERR; if ((num_scales = H5DSget_num_scales(datasetid, 1)) < 0) ERR; if (num_scales != 1) ERR; /* Go through all dimscales for this var and learn about them. */ for (d = 0; d < ndims; d++) { if (H5DSiterate_scales(datasetid, d, NULL, alien_visitor2, &(vars_dimscale_obj[d])) < 0) ERR; /* Verify that the object ids passed from the * alien_visitor2 function match the ones we found * for the lat and lon datasets. */ if (vars_dimscale_obj[d].fileno[0] != dimscale_obj[d].fileno[0] || vars_dimscale_obj[d].objno[0] != dimscale_obj[d].objno[0]) ERR; if (vars_dimscale_obj[d].fileno[1] != dimscale_obj[d].fileno[1] || vars_dimscale_obj[d].objno[1] != dimscale_obj[d].objno[1]) ERR; } } if (H5Dclose(datasetid) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; break; case H5G_TYPE: break; case H5G_LINK: break; default: printf("Unknown object class %d!", obj_class); } } /* Close up the shop. */ if (H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking cration ordering of datasets which are also dimension scales..."); { #define LAT_LEN 3 #define LON_LEN 2 #define TIME_LEN 5 #define LEN_LEN 10 #define DIMS_3 3 #define NUM_DIMSCALES2 4 #define LAT_NAME "lat" #define LON_NAME "lon" #define PRES_NAME1 "z_pres" #define TIME_NAME "time" #define LEN_NAME "u_len" hid_t fileid, lat_spaceid, lon_spaceid, time_spaceid, pres_spaceid, len_spaceid; hid_t pres_datasetid, lat_dimscaleid, lon_dimscaleid, time_dimscaleid, len_dimscaleid; hid_t fapl_id, fcpl_id; hsize_t dims[DIMS_3]; hid_t spaceid = 0, datasetid = 0; hsize_t num_obj, i; int obj_class; char obj_name[NC_MAX_NAME + 1]; htri_t is_scale; int num_scales; hsize_t maxdims[DIMS_3]; H5G_stat_t statbuf; HDF5_OBJID_T dimscale_obj[NUM_DIMSCALES2], vars_dimscale_obj[NUM_DIMSCALES2]; int dimscale_cnt = 0; int d, ndims; /* Create file access and create property lists. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; /* Set latest_format in access propertly list. This ensures that * the latest, greatest, HDF5 versions are used in the file. */ if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR; /* Set H5P_CRT_ORDER_TRACKED in the creation property list. This * turns on HDF5 creation ordering in the file. */ if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; /* Create file. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; /* Create the spaces that will be used for the dimscales. */ dims[0] = LAT_LEN; if ((lat_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = LON_LEN; if ((lon_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = TIME_LEN; if ((time_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; dims[0] = LEN_LEN; if ((len_spaceid = H5Screate_simple(1, dims, dims)) < 0) ERR; /* Create the space for the dataset. */ dims[0] = LAT_LEN; dims[1] = LON_LEN; dims[2] = TIME_LEN; if ((pres_spaceid = H5Screate_simple(DIMS_3, dims, dims)) < 0) ERR; /* Create our dimension scales. */ if ((lat_dimscaleid = H5Dcreate1(fileid, LAT_NAME, H5T_NATIVE_INT, lat_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lat_dimscaleid, NULL) < 0) ERR; if ((lon_dimscaleid = H5Dcreate1(fileid, LON_NAME, H5T_NATIVE_INT, lon_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(lon_dimscaleid, NULL) < 0) ERR; if ((time_dimscaleid = H5Dcreate1(fileid, TIME_NAME, H5T_NATIVE_INT, time_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(time_dimscaleid, NULL) < 0) ERR; if ((len_dimscaleid = H5Dcreate1(fileid, LEN_NAME, H5T_NATIVE_INT, len_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSset_scale(len_dimscaleid, NULL) < 0) ERR; /* Create a variable which uses these three dimscales. */ if ((pres_datasetid = H5Dcreate1(fileid, PRES_NAME1, H5T_NATIVE_FLOAT, pres_spaceid, H5P_DEFAULT)) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lat_dimscaleid, 0) < 0) ERR; if (H5DSattach_scale(pres_datasetid, lon_dimscaleid, 1) < 0) ERR; if (H5DSattach_scale(pres_datasetid, time_dimscaleid, 2) < 0) ERR; /* Attach a dimscale to a dimscale. Unfortunately, HDF5 does not * allow this. Woe is me. */ /*if (H5DSattach_scale(time_dimscaleid, len_dimscaleid, 0) < 0) ERR;*/ /* Fold up our tents. */ if (H5Dclose(lat_dimscaleid) < 0 || H5Dclose(lon_dimscaleid) < 0 || H5Dclose(time_dimscaleid) < 0 || H5Dclose(len_dimscaleid) < 0 || H5Dclose(pres_datasetid) < 0 || H5Sclose(lat_spaceid) < 0 || H5Sclose(lon_spaceid) < 0 || H5Sclose(time_spaceid) < 0 || H5Sclose(pres_spaceid) < 0 || H5Sclose(len_spaceid) < 0 || H5Pclose(fapl_id) < 0 || H5Pclose(fcpl_id) < 0 || H5Fclose(fileid) < 0) ERR; /* Open the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; /* Loop through objects in the root group. */ if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR; for (i=0; i<num_obj; i++) { /* Get the type (i.e. group, dataset, etc.), and the name of * the object. */ if ((obj_class = H5Gget_objtype_by_idx(fileid, i)) < 0) ERR; if (H5Gget_objname_by_idx(fileid, i, obj_name, NC_MAX_NAME) < 0) ERR; /* printf("\nEncountered: HDF5 object obj_class %d obj_name %s\n", */ /* obj_class, obj_name); */ /* Deal with object based on its obj_class. */ switch(obj_class) { case H5G_GROUP: break; case H5G_DATASET: /* Open the dataset. */ if ((datasetid = H5Dopen1(fileid, obj_name)) < 0) ERR; /* Get space info. */ if ((spaceid = H5Dget_space(datasetid)) < 0) ERR; if (H5Sget_simple_extent_dims(spaceid, dims, maxdims) < 0) ERR; if ((ndims = H5Sget_simple_extent_ndims(spaceid)) < 0) ERR; /* Is this a dimscale? */ if ((is_scale = H5DSis_scale(datasetid)) < 0) ERR; if (is_scale) { /* fileno and objno uniquely identify an object and a * HDF5 file. */ if (H5Gget_objinfo(datasetid, ".", 1, &statbuf) < 0) ERR; dimscale_obj[dimscale_cnt].fileno[0] = statbuf.fileno[0]; dimscale_obj[dimscale_cnt].objno[0] = statbuf.objno[0]; dimscale_obj[dimscale_cnt].fileno[1] = statbuf.fileno[1]; dimscale_obj[dimscale_cnt].objno[1] = statbuf.objno[1]; /* printf("dimscale_obj[%d].fileno = %d dimscale_obj[%d].objno = %d\n", */ /* dimscale_cnt, dimscale_obj[dimscale_cnt].fileno, dimscale_cnt, */ /* dimscale_obj[dimscale_cnt].objno); */ dimscale_cnt++; } else { /* Here's how to get the number of scales attached * to the dataset's dimension 0 and 1. */ if ((num_scales = H5DSget_num_scales(datasetid, 0)) < 0) ERR; if (num_scales != 1) ERR; if ((num_scales = H5DSget_num_scales(datasetid, 1)) < 0) ERR; if (num_scales != 1) ERR; /* Go through all dimscales for this var and learn about them. */ for (d = 0; d < ndims; d++) { if (H5DSiterate_scales(datasetid, d, NULL, alien_visitor2, &(vars_dimscale_obj[d])) < 0) ERR; /* Verify that the object ids passed from the * alien_visitor2 function match the ones we found * for the lat and lon datasets. */ if (vars_dimscale_obj[d].fileno[0] != dimscale_obj[d].fileno[0] || vars_dimscale_obj[d].objno[0] != dimscale_obj[d].objno[0]) ERR; if (vars_dimscale_obj[d].fileno[1] != dimscale_obj[d].fileno[1] || vars_dimscale_obj[d].objno[1] != dimscale_obj[d].objno[1]) ERR; } } if (H5Dclose(datasetid) < 0) ERR; if (H5Sclose(spaceid) < 0) ERR; break; case H5G_TYPE: break; case H5G_LINK: break; default: printf("Unknown object class %d!", obj_class); } } /* Close up the shop. */ if (H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; FINAL_RESULTS; }
/** Read/write a hyperslab of data, performing dimension remapping * and data rescaling as needed. */ static int mirw_hyperslab_icv(int opcode, mihandle_t volume, mitype_t buffer_data_type, const misize_t start[], const misize_t count[], void *buffer) { hid_t dset_id = -1; hid_t mspc_id = -1; hid_t fspc_id = -1; hid_t buffer_type_id = -1; int result = MI_ERROR; hsize_t hdf_start[MI2_MAX_VAR_DIMS]; hsize_t hdf_count[MI2_MAX_VAR_DIMS]; int dir[MI2_MAX_VAR_DIMS]; /* Direction vector in file order */ hsize_t ndims; int slice_ndims; int n_different = 0; double volume_valid_min, volume_valid_max; misize_t buffer_size; void *temp_buffer=NULL; size_t icount[MI2_MAX_VAR_DIMS]; int idir[MI2_MAX_VAR_DIMS]; int imap[MI2_MAX_VAR_DIMS]; double *image_slice_max_buffer=NULL; double *image_slice_min_buffer=NULL; int scaling_needed=0; char path[MI2_MAX_PATH]; hsize_t image_slice_start[MI2_MAX_VAR_DIMS]; hsize_t image_slice_count[MI2_MAX_VAR_DIMS]; hsize_t image_slice_length=0; hsize_t total_number_of_slices=0; hsize_t i; int j; /* Disallow write operations to anything but the highest resolution. */ if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) { return MI_LOG_ERROR(MI2_MSG_GENERIC,"Trying to write to a volume thumbnail"); } sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution); /*printf("Using:%s\n",path);*/ /* Open the dataset with the specified path */ MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1"); if (dset_id < 0) { return (MI_ERROR); } MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space"); if (fspc_id < 0) { goto cleanup; } buffer_type_id = mitype_to_hdftype(buffer_data_type, TRUE); if(buffer_type_id<0) { goto cleanup; } ndims = volume->number_of_dims; if (ndims == 0) { /* A scalar volume is possible but extremely unlikely, not to * mention useless! */ mspc_id = H5Screate(H5S_SCALAR); hdf_count[0]=1; } else { n_different = mitranslate_hyperslab_origin(volume, start, count, hdf_start, hdf_count, dir); mspc_id = H5Screate_simple(ndims, hdf_count, NULL); if (mspc_id < 0) { fprintf(stderr,"H5Screate_simple: Fail %s:%d\n",__FILE__,__LINE__); goto cleanup; } } miget_hyperslab_size_hdf(buffer_type_id, ndims, hdf_count, &buffer_size); MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL, hdf_count, NULL),"H5Sselect_hyperslab"); if (result < 0) { goto cleanup; } if((result=miget_volume_valid_range( volume, &volume_valid_max, &volume_valid_min))<0) { goto cleanup; } #ifdef _DEBUG printf("mirw_hyperslab_icv:Volume:%lx valid_max:%f valid_min:%f scaling:%d n_different:%d\n",(long int)(volume),volume_valid_max,volume_valid_min,volume->has_slice_scaling,n_different); #endif if(volume->has_slice_scaling) { hid_t image_max_fspc_id; hid_t image_min_fspc_id; hid_t scaling_mspc_id; total_number_of_slices=1; image_slice_length=1; scaling_needed=1; image_max_fspc_id=H5Dget_space(volume->imax_id); image_min_fspc_id=H5Dget_space(volume->imin_id); if ( image_max_fspc_id < 0 ) { /*Report error that image-max is not found!*/ return ( MI_ERROR ); } slice_ndims = H5Sget_simple_extent_ndims ( image_max_fspc_id ); if(slice_ndims<0) { /*TODO: report read error somehow*/ fprintf(stderr,"H5Sget_simple_extent_ndims: Fail %s:%d\n",__FILE__,__LINE__); goto cleanup; } if ( (hsize_t)slice_ndims > ndims ) { /*Can this really happen?*/ slice_ndims = ndims; } for ( j = 0; j < slice_ndims; j++ ) { image_slice_count[j] = hdf_count[j]; image_slice_start[j] = hdf_start[j]; if(hdf_count[j]>1) /*avoid zero sized dimensions?*/ total_number_of_slices*=hdf_count[j]; } for (i = slice_ndims; i < ndims; i++ ) { if(hdf_count[i]>1) /*avoid zero sized dimensions?*/ image_slice_length*=hdf_count[i]; image_slice_count[i] = 0; image_slice_start[i] = 0; } image_slice_max_buffer=malloc(total_number_of_slices*sizeof(double)); if(!image_slice_max_buffer) { result=MI_ERROR; MI_LOG_ERROR(MI2_MSG_OUTOFMEM,total_number_of_slices*sizeof(double)); goto cleanup; } image_slice_min_buffer=malloc(total_number_of_slices*sizeof(double)); if(!image_slice_min_buffer) { result=MI_ERROR; MI_LOG_ERROR(MI2_MSG_OUTOFMEM,total_number_of_slices*sizeof(double)); goto cleanup; } scaling_mspc_id = H5Screate_simple(slice_ndims, image_slice_count, NULL); if( (result=H5Sselect_hyperslab(image_max_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 ) { if((result=H5Dread(volume->imax_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_max_fspc_id, H5P_DEFAULT,image_slice_max_buffer))<0) { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread"); goto cleanup; } } else { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab"); goto cleanup; } if((result=H5Sselect_hyperslab(image_min_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 ) { if((result=H5Dread(volume->imin_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_min_fspc_id, H5P_DEFAULT,image_slice_min_buffer))<0) { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread"); goto cleanup; } } else { /*TODO: report read error somehow*/ MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab"); goto cleanup; } H5Sclose(scaling_mspc_id); H5Sclose(image_max_fspc_id); } else { slice_ndims=0; total_number_of_slices=1; image_slice_max_buffer=malloc(sizeof(double)); image_slice_min_buffer=malloc(sizeof(double)); miget_volume_range(volume,image_slice_max_buffer,image_slice_min_buffer); image_slice_length=1; /*it produces unity scaling*/ scaling_needed=(*image_slice_max_buffer!=volume_valid_max) || (*image_slice_min_buffer!=volume_valid_min); for (i = 0; i < ndims; i++) { image_slice_length *= hdf_count[i]; } #ifdef _DEBUG printf("mirw_hyperslab_icv:Real max:%f min:%f\n",*image_slice_max_buffer,*image_slice_min_buffer); #endif } //A hack to disable interslice scaling when it is not needed according to MINC1 specs if( volume->volume_type==MI_TYPE_FLOAT || volume->volume_type==MI_TYPE_DOUBLE || volume->volume_type==MI_TYPE_FCOMPLEX || volume->volume_type==MI_TYPE_DCOMPLEX ) { scaling_needed=0; } #ifdef _DEBUG printf("mirw_hyperslab_icv:Slice_ndim:%d total_number_of_slices:%d image_slice_length:%d scaling_needed:%d\n",slice_ndims,total_number_of_slices,image_slice_length,scaling_needed); #endif if (opcode == MIRW_OP_READ) { MI_CHECK_HDF_CALL(result = H5Dread(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, buffer),"H5Dread"); if(result<0) { goto cleanup; } if(scaling_needed) { switch(buffer_data_type) { case MI_TYPE_FLOAT: #ifdef _DEBUG printf("Descaling float\n"); #endif APPLY_DESCALING(float,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_DOUBLE: #ifdef _DEBUG printf("Descaling double\n"); #endif APPLY_DESCALING(double,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_INT: #ifdef _DEBUG printf("Descaling int\n"); #endif APPLY_DESCALING(int,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_UINT: #ifdef _DEBUG printf("Descaling uint\n"); #endif APPLY_DESCALING(unsigned int,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_SHORT: #ifdef _DEBUG printf("Descaling short\n"); #endif APPLY_DESCALING(short,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_USHORT: #ifdef _DEBUG printf("Descaling ushort\n"); #endif APPLY_DESCALING(unsigned short,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_BYTE: #ifdef _DEBUG printf("Descaling byte\n"); #endif APPLY_DESCALING(char,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_UBYTE: #ifdef _DEBUG printf("Descaling ubyte\n"); #endif APPLY_DESCALING(unsigned char,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; default: /*TODO: report unsupported conversion*/ result=MI_ERROR; goto cleanup; } } else { #ifdef _DEBUG printf("Descaling not needed!\n"); #endif } if (n_different != 0 ) { for (i = 0; i < ndims; i++) { icount[i] = count[i]; } restructure_array(ndims, buffer, icount, H5Tget_size(buffer_type_id),volume->dim_indices, dir); /*TODO: check if we managed to restructure the array*/ result=0; } } else { /*opcode != MIRW_OP_READ*/ volume->is_dirty = TRUE; /* Mark as modified. */ if (n_different != 0 ) { /* Invert before calling */ for (i = 0; i < ndims; i++) { icount[volume->dim_indices[i]] = count[i]; idir[volume->dim_indices[i]] = dir[i]; /* this one was correct the original way*/ imap[volume->dim_indices[i]] = i; } } if(scaling_needed || n_different != 0) { /*create temporary copy, to be destroyed*/ temp_buffer=malloc(buffer_size); if(!temp_buffer) { MI_LOG_ERROR(MI2_MSG_OUTOFMEM,buffer_size); result=MI_ERROR; /*TODO: error code?*/ goto cleanup; } memcpy(temp_buffer,buffer,buffer_size); if (n_different != 0 ) restructure_array(ndims, temp_buffer, icount, H5Tget_size(buffer_type_id), imap, idir); if(scaling_needed) { switch(buffer_data_type) { case MI_TYPE_FLOAT: #ifdef _DEBUG printf("scaling float\n"); #endif APPLY_SCALING(float,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_DOUBLE: #ifdef _DEBUG printf("scaling double\n"); #endif APPLY_SCALING(double,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_INT: #ifdef _DEBUG printf("scaling int\n"); #endif APPLY_SCALING(int,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_UINT: #ifdef _DEBUG printf("scaling unsigned int\n"); #endif APPLY_SCALING(unsigned int,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_SHORT: #ifdef _DEBUG printf("scaling short\n"); #endif APPLY_SCALING(short,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_USHORT: #ifdef _DEBUG printf("scaling unsigned short\n"); #endif APPLY_SCALING(unsigned short,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_BYTE: #ifdef _DEBUG printf("scaling char\n"); #endif APPLY_SCALING(char,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; case MI_TYPE_UBYTE: #ifdef _DEBUG printf("scaling unsigned char\n"); #endif APPLY_SCALING(unsigned char,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max); break; default: /*TODO: report unsupported conversion*/ result=MI_ERROR; goto cleanup; } } MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, temp_buffer),"H5Dwrite"); } else { MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, buffer),"H5Dwrite"); } if(result<0) { goto cleanup; } } cleanup: if (buffer_type_id >= 0) { H5Tclose(buffer_type_id); } if (mspc_id >= 0) { H5Sclose(mspc_id); } if (fspc_id >= 0) { H5Sclose(fspc_id); } if ( dset_id >=0 ) { H5Dclose(dset_id); } if(temp_buffer!=NULL) { free(temp_buffer); } if(image_slice_min_buffer!=NULL) { free(image_slice_min_buffer); } if(image_slice_max_buffer!=NULL) { free(image_slice_max_buffer); } return (result); }
/** Read/write a hyperslab of data, performing dimension remapping * and data rescaling as needed. Data in the range (min-max) will map to the appropriate full range of buffer_data_type */ static int mirw_hyperslab_normalized(int opcode, mihandle_t volume, mitype_t buffer_data_type, const misize_t start[], const misize_t count[], double data_min, double data_max, void *buffer) { hid_t dset_id = -1; hid_t mspc_id = -1; hid_t fspc_id = -1; hid_t volume_type_id = -1; hid_t buffer_type_id = -1; int result = MI_ERROR; hsize_t hdf_start[MI2_MAX_VAR_DIMS]; hsize_t hdf_count[MI2_MAX_VAR_DIMS]; int dir[MI2_MAX_VAR_DIMS]; /* Direction vector in file order */ hsize_t ndims; int slice_ndims; int n_different = 0; double volume_valid_min, volume_valid_max; misize_t buffer_size; misize_t input_buffer_size; double *temp_buffer=NULL; size_t icount[MI2_MAX_VAR_DIMS]; int idir[MI2_MAX_VAR_DIMS]; int imap[MI2_MAX_VAR_DIMS]; double *image_slice_max_buffer=NULL; double *image_slice_min_buffer=NULL; char path[MI2_MAX_PATH]; hsize_t image_slice_start[MI2_MAX_VAR_DIMS]; hsize_t image_slice_count[MI2_MAX_VAR_DIMS]; hsize_t image_slice_length=0; hsize_t total_number_of_slices=0; hsize_t i; int j; /* Disallow write operations to anything but the highest resolution. */ if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) { /*TODO: report error that we are not dealing with the rihgt image here*/ return (MI_ERROR); } sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution); /* Open the dataset with the specified path */ MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1"); if (dset_id < 0) { return (MI_ERROR); } MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space"); if (fspc_id < 0) { /*TODO: report can't get dataset*/ goto cleanup; } buffer_type_id = mitype_to_hdftype(buffer_data_type,TRUE); if(buffer_type_id<0) { goto cleanup; } MI_CHECK_HDF_CALL(volume_type_id = H5Tcopy ( H5T_NATIVE_DOUBLE ),"H5Tcopy"); if(volume_type_id<0) { fprintf(stderr,"H5Tcopy: Fail %s:%d\n",__FILE__,__LINE__); goto cleanup; } ndims = volume->number_of_dims; if (ndims == 0) { /* A scalar volume is possible but extremely unlikely, not to * mention useless! */ mspc_id = H5Screate(H5S_SCALAR); } else { n_different = mitranslate_hyperslab_origin(volume,start,count, hdf_start,hdf_count,dir); MI_CHECK_HDF_CALL(mspc_id = H5Screate_simple(ndims, hdf_count, NULL),"H5Screate_simple"); if (mspc_id < 0) { goto cleanup; } } miget_hyperslab_size_hdf(volume_type_id,ndims,hdf_count,&buffer_size); miget_hyperslab_size_hdf(buffer_type_id,ndims,hdf_count,&input_buffer_size); MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL, hdf_count, NULL),"H5Sselect_hyperslab"); if (result < 0) { goto cleanup; } miget_volume_valid_range( volume, &volume_valid_max, &volume_valid_min); #ifdef _DEBUG printf("mirw_hyperslab_normalized:Volume:%x valid_max:%f valid_min:%f scaling:%d\n",volume,volume_valid_max,volume_valid_min,volume->has_slice_scaling); #endif if(volume->has_slice_scaling && !(volume->volume_type==MI_TYPE_FLOAT || volume->volume_type==MI_TYPE_DOUBLE || volume->volume_type==MI_TYPE_FCOMPLEX || volume->volume_type==MI_TYPE_DCOMPLEX) ) { hid_t image_max_fspc_id; hid_t image_min_fspc_id; hid_t scaling_mspc_id; total_number_of_slices=1; image_slice_length=1; MI_CHECK_HDF_CALL(image_max_fspc_id=H5Dget_space(volume->imax_id),"H5Dget_space"); MI_CHECK_HDF_CALL(image_min_fspc_id=H5Dget_space(volume->imin_id),"H5Dget_space"); if ( image_max_fspc_id < 0 || image_min_fspc_id<0 ) { result=MI_ERROR; goto cleanup; } MI_CHECK_HDF_CALL(slice_ndims = H5Sget_simple_extent_ndims ( image_max_fspc_id ),"H5Sget_simple_extent_ndims"); if(slice_ndims<0) { goto cleanup; } if ( (hsize_t)slice_ndims > ndims ) { /*Can this really happen?*/ slice_ndims = ndims; } for ( j = 0; j < slice_ndims; j++ ) { image_slice_count[j] = hdf_count[j]; image_slice_start[j] = hdf_start[j]; if(hdf_count[j]>1) /*avoid zero sized dimensions?*/ total_number_of_slices*=hdf_count[j]; } for (i = slice_ndims; i < ndims; i++ ) { if(hdf_count[i]>1) /*avoid zero sized dimensions?*/ image_slice_length*=hdf_count[i]; image_slice_count[i] = 0; image_slice_start[i] = 0; } image_slice_max_buffer=malloc(total_number_of_slices*sizeof(double)); image_slice_min_buffer=malloc(total_number_of_slices*sizeof(double)); /*TODO check for allocation failure ?*/ MI_CHECK_HDF_CALL(scaling_mspc_id = H5Screate_simple(slice_ndims, image_slice_count, NULL),"H5Screate_simple"); if( (result=H5Sselect_hyperslab(image_max_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 ) { if( ( result=H5Dread(volume->imax_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_max_fspc_id, H5P_DEFAULT,image_slice_max_buffer))<0) { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread"); goto cleanup; } } else { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab"); goto cleanup; } if( (result=H5Sselect_hyperslab(image_min_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 ) { if( (result=H5Dread(volume->imin_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_min_fspc_id, H5P_DEFAULT,image_slice_min_buffer))<0) { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread"); goto cleanup; } } else { MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab"); goto cleanup; } H5Sclose(scaling_mspc_id); H5Sclose(image_max_fspc_id); } else { slice_ndims=0; total_number_of_slices=1; image_slice_max_buffer=malloc(sizeof(double)); image_slice_min_buffer=malloc(sizeof(double)); miget_volume_range( volume,image_slice_max_buffer,image_slice_min_buffer ); image_slice_length=1; for (i = 0; i < ndims; i++) { image_slice_length *= hdf_count[i]; } #ifdef _DEBUG printf("mirw_hyperslab_normalized:Real max:%f min:%f\n",*image_slice_max_buffer,*image_slice_min_buffer); #endif } #ifdef _DEBUG printf("mirw_hyperslab_normalized:Slice_ndim:%d total_number_of_slices:%d image_slice_length:%d\n",slice_ndims,total_number_of_slices,image_slice_length); printf("mirw_hyperslab_normalized:data min:%f data max:%f buffer_data_type:%d\n",data_min,data_max,buffer_data_type); #endif /*Allocate temporary Buffer*/ temp_buffer=(double*)malloc(buffer_size); if(!temp_buffer) { MI_LOG_ERROR(MI2_MSG_OUTOFMEM,buffer_size); result=MI_ERROR; goto cleanup; } if (opcode == MIRW_OP_READ) { MI_CHECK_HDF_CALL(result = H5Dread(dset_id, volume_type_id, mspc_id, fspc_id, H5P_DEFAULT, temp_buffer),"H5Dread"); if(result<0) { goto cleanup; } /*WARNING: floating point types will be normalized between 0.0 and 1.0*/ switch(buffer_data_type) { case MI_TYPE_FLOAT: APPLY_DESCALING_NORM(float,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0.0f,1.0f); break; case MI_TYPE_DOUBLE: APPLY_DESCALING_NORM(double,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0.0,1.0); break; case MI_TYPE_INT: APPLY_DESCALING_NORM(int,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,INT_MIN,INT_MAX); break; case MI_TYPE_UINT: APPLY_DESCALING_NORM(unsigned int,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,UINT_MAX); break; case MI_TYPE_SHORT: APPLY_DESCALING_NORM(short,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,SHRT_MIN,SHRT_MAX); break; case MI_TYPE_USHORT: APPLY_DESCALING_NORM(unsigned short,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,USHRT_MAX); break; case MI_TYPE_BYTE: APPLY_DESCALING_NORM(char,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,SCHAR_MIN,SCHAR_MAX); break; case MI_TYPE_UBYTE: APPLY_DESCALING_NORM(unsigned char,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,UCHAR_MAX); break; default: /*TODO: report unsupported conversion*/ result=MI_ERROR; goto cleanup; } if (n_different != 0 ) { for (i = 0; i < ndims; i++) { icount[i] = count[i]; } restructure_array(ndims, buffer, icount, H5Tget_size(buffer_type_id),volume->dim_indices, dir); /*TODO: check if we managed to restructure the array*/ result=0; } } else { /*opcode != MIRW_OP_READ*/
void UPCdata::readHDFfile(char* filename, int count){ // count: 11040 for segmentation and 14640 for classification #if HDF5_AVAILABLE==2 // based on http://www.hdfgroup.org/HDF5/doc/cpplus_RM/readdata_8cpp-example.html // maybe first deleting all data? does it do that automatically or is there some possible memory leak? // I need to know first the data set sizes... so I create one dummy sample... this is only a hack samples.resize(0); /* Sample dummy; dummy.data.resize(1020,0); dummy.concentrations[0]=1;dummy.concentrations[1]=1;dummy.concentrations[2]=1; dummy.samplenr=1; dummy.set=Sample::Training; dummy.compoundnr=1; */ const int NFIELDS = 5; hsize_t chunk_size = 10; // ?? typedef struct _samplestruct{ int concentrations[3]; int samplenr; int set; int compoundnr; float data[1020]; // having difficulties here; dynamic allocation? }samplestruct; samplestruct *sampledata; void *sampleptr = malloc(NRECORDS*sizeof(samplestruct)); sampledata = (samplestruct*)sampleptr; if (sampleptr == NULL) { // Memory could not be allocated, the program should handle the error here as appropriate. cout << "memory could not be allocated!"; return; } // ... // free(samplestruct); /* size_t dst_size = sizeof(Sample); size_t dst_offset[NFIELDS] = { HOFFSET(Sample, concentrations), HOFFSET(Sample, samplenr), HOFFSET(Sample, set), HOFFSET(Sample, compoundnr), HOFFSET(Sample, data) }; size_t dst_sizes[NFIELDS] = { sizeof( samples[0].data), sizeof(samples[0].concentrations), sizeof(samples[0].samplenr), sizeof(samples[0].set), sizeof(samples[0].compoundnr)}; */ size_t dst_size = sizeof(samplestruct); size_t dst_offset[NFIELDS] = {HOFFSET(samplestruct, concentrations), HOFFSET(samplestruct, samplenr), HOFFSET(samplestruct, set), HOFFSET(samplestruct, compoundnr), HOFFSET(samplestruct, data), }; size_t dst_sizes[NFIELDS] = { sizeof( sampledata[0].data), sizeof(sampledata[0].concentrations), sizeof(sampledata[0].samplenr), sizeof(sampledata[0].set), sizeof(sampledata[0].compoundnr) }; hid_t file_id; int *fill_data = NULL; int compress = 1; herr_t status; //const hsize_t datadim[] = {samples[0].data.size()}; const hsize_t datadim[] = {1020*sizeof(float)}; const hsize_t concdim[] = {3}; hid_t floatarray = H5Tarray_create(H5T_NATIVE_FLOAT, 1, datadim); hid_t concarray = H5Tarray_create(H5T_NATIVE_FLOAT, 1, concdim); // Initialize field_type hid_t field_type[NFIELDS]; field_type[0] = floatarray; field_type[1] = concarray; field_type[2] = H5T_NATIVE_INT; field_type[3] = H5T_NATIVE_INT; field_type[4] = H5T_NATIVE_INT; // opening file file_id=H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); if ( file_id < 0 ){ std::cout << "UPCdata::readHDFfile(): Error opening " << filename <<": " << file_id << std::endl; return; } //const int NRECORDS=samples.size(); // how to get the data set size? // is this the way? hid_t dataset_id=H5Dopen1(file_id, "Dataset"); hsize_t size = H5Dget_storage_size(dataset_id); //vector<Sample> buf(NRECORDS+1); //vector<Sample> buf(static_cast<int>(size+1), 0x00); status=H5TBread_table(file_id, "Dataset", dst_size, dst_offset, dst_sizes, sampledata); cout << "converting data format" << endl; for(int i=0;i<NRECORDS;i++){ Sample dummysample; dummysample.concentrations[0]=sampledata[i].concentrations[0]; dummysample.concentrations[1]=sampledata[i].concentrations[1]; dummysample.concentrations[2]=sampledata[i].concentrations[2]; dummysample.samplenr=sampledata[i].samplenr; dummysample.set=(Sample::SetType)sampledata[i].set; dummysample.compoundnr=sampledata[i].compoundnr; dummysample.data.resize(1020); for(int j=0;j<1020;j++) dummysample.data[j]=sampledata[i].data[j]; samples.push_back(dummysample); } // close type(s)?? // close the file / H5Fclose( file_id ); #endif }
/** Read/write a hyperslab of data. This is the simplified function * which performs no value conversion. It is much more efficient than * mirw_hyperslab_icv() */ static int mirw_hyperslab_raw(int opcode, mihandle_t volume, mitype_t midatatype, const misize_t start[], const misize_t count[], void *buffer) { hid_t dset_id = -1; hid_t mspc_id = -1; hid_t fspc_id = -1; hid_t type_id = -1; int result = MI_ERROR; hsize_t hdf_start[MI2_MAX_VAR_DIMS]; hsize_t hdf_count[MI2_MAX_VAR_DIMS]; int dir[MI2_MAX_VAR_DIMS]; /* Direction vector in file order */ int ndims; int n_different = 0; misize_t buffer_size; void *temp_buffer=NULL; char path[MI2_MAX_PATH]; size_t icount[MI2_MAX_VAR_DIMS]; /* Disallow write operations to anything but the highest resolution. */ if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) { return MI_LOG_ERROR(MI2_MSG_GENERIC,"Trying to write to a volume thumbnail"); } sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution); /*printf("Using:%s\n",path);*/ /* Open the dataset with the specified path */ MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1"); if (dset_id < 0) { return (MI_ERROR); } MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space"); if (fspc_id < 0) { /*TODO: report can't get dataset*/ goto cleanup; } MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space"); if (fspc_id < 0) { goto cleanup; } if (midatatype == MI_TYPE_UNKNOWN) { type_id = H5Tcopy(volume->mtype_id); } else { type_id = mitype_to_hdftype(midatatype, TRUE); } ndims = volume->number_of_dims; if (ndims == 0) { /* A scalar volume is possible but extremely unlikely, not to * mention useless! */ mspc_id = H5Screate(H5S_SCALAR); } else { n_different = mitranslate_hyperslab_origin(volume, start, count, hdf_start, hdf_count, dir); MI_CHECK_HDF_CALL(mspc_id = H5Screate_simple(ndims, hdf_count, NULL),"H5Screate_simple"); if (mspc_id < 0) { goto cleanup; } } MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL, hdf_count, NULL),"H5Sselect_hyperslab"); if (result < 0) { goto cleanup; } miget_hyperslab_size_hdf(type_id,ndims,hdf_count,&buffer_size); if (opcode == MIRW_OP_READ) { MI_CHECK_HDF_CALL(result = H5Dread(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT,buffer),"H5Dread"); /* Restructure the array after reading the data in file orientation. */ if (n_different != 0) { int i; for (i = 0; i < ndims; i++) { icount[i] = count[i]; } restructure_array(ndims, buffer, icount, H5Tget_size(type_id), volume->dim_indices, dir); } } else { volume->is_dirty = TRUE; /* Mark as modified. */ /* Restructure array before writing to file. * TODO: use temporary buffer for that! */ if (n_different != 0) { int idir[MI2_MAX_VAR_DIMS]; int imap[MI2_MAX_VAR_DIMS]; int i; /* Invert before calling */ for (i = 0; i < ndims; i++) { icount[volume->dim_indices[i]] = count[i]; idir[volume->dim_indices[i]] = dir[i]; // this one was correct the original way imap[volume->dim_indices[i]] = i; } /*Use temporary array to preserve input data*/ temp_buffer=malloc(buffer_size); if(temp_buffer==NULL) { /*TODO: report memory error*/ result=MI_ERROR; goto cleanup; } memcpy(temp_buffer,buffer,buffer_size); restructure_array(ndims, temp_buffer, icount, H5Tget_size(type_id), imap, idir); MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT, temp_buffer),"H5Dwrite"); } else { MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT, buffer),"H5Dwrite"); } } cleanup: if (type_id >= 0) { H5Tclose(type_id); } if (mspc_id >= 0) { H5Sclose(mspc_id); } if (fspc_id >= 0) { H5Sclose(fspc_id); } if ( dset_id >=0 ) { H5Dclose(dset_id); } if ( temp_buffer!= NULL) { free( temp_buffer ); } return (result); }
//***************************************************************************** // CONSTRUCTOR: ossimHdfGridModel(filename) // // Constructs model from geometry file // //***************************************************************************** ossimHdfGridModel::ossimHdfGridModel(const ossimFilename& file, const ossimDrect& imageRect, ossimString latGridIndexOrName, ossimString lonGridIndexOrName, const ossimIpt& gridSpacing) : ossimCoarseGridModel(), m_isHdf4(true) { theLatGrid.setDomainType(ossimDblGrid::SAWTOOTH_90); theLonGrid.setDomainType(ossimDblGrid::WRAP_180); if (latGridIndexOrName.contains("/Latitude") == false && lonGridIndexOrName.contains("/Longitude") == false)//hdf4 { ossim_int32 latGridIndex = ossimString::toInt(latGridIndexOrName); ossim_int32 lonGridIndex = ossimString::toInt(lonGridIndexOrName); int32 sd_id = SDstart(file.c_str(), DFACC_READ); if (sd_id > 0) { int32 sds_id = SDselect(sd_id, latGridIndex); if (sds_id > 0) { setGridNodes(theLatGrid, sds_id, gridSpacing); } SDendaccess (sds_id); sds_id = SDselect(sd_id, lonGridIndex); if (sds_id > 0) { setGridNodes(theLonGrid, sds_id, gridSpacing); } SDendaccess (sds_id); } SDend(sd_id); } else //hdf5 { m_isHdf4 = false; ossim_int32 file_id = H5Fopen(file.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT); // Depreciated, need to fix... (drb) // ossim_int32 dataset_id = H5Dopen(file_id, latGridIndexOrName); ossim_int32 dataset_id = H5Dopen1(file_id, latGridIndexOrName); if (dataset_id >= 0) { setGridNodes(theLatGrid, dataset_id, gridSpacing); } H5Dclose(dataset_id); // Depreciated, need to fix... (drb) // dataset_id = H5Dopen(file_id, lonGridIndexOrName); dataset_id = H5Dopen1(file_id, lonGridIndexOrName); if (dataset_id > 0) { setGridNodes(theLatGrid, dataset_id, gridSpacing); } H5Dclose(dataset_id); H5Fclose(file_id); } // Filter this HDF data as it is often very noisy: double filter_kernel[81]; double weight = 1.0/81.0; for (int i=0; i<81; i++) filter_kernel[i] = weight; theLatGrid.filter(9,9, filter_kernel); theLonGrid.filter(9,9, filter_kernel); theLatGrid.enableExtrapolation(); theLonGrid.enableExtrapolation(); theHeightEnabledFlag = false; initializeModelParams(imageRect); //debugDump(); //### }
void readSpectrumEnergyScale(cGlobal *global, char *filename) { char groupname[1024]; char fieldname[1024]; hid_t file_id; hid_t datagroup_id; hid_t dataset_id; hid_t dataspace_id; hid_t datatype_id; H5T_class_t dataclass; size_t size; int ndims; sprintf(groupname, "energySpectrum"); sprintf(fieldname, "runIntegratedEnergyScale"); // Check if an energy scale calibration file has been specified if ( strcmp(filename,"") == 0 ){ printf("spectrum energy scale calibration file path was not specified\n"); printf("spectra will be output with default (0) energy scale\n"); return; } // Check whether file exists! FILE* fp = fopen(filename, "r"); if (fp) // file exists fclose(fp); else { // file doesn't exist printf("specified energy scale calibration file does not exist: %s\n",filename); printf("spectra will be output with default (0) energy scale\n"); return; } printf("Reading energy spectrum scale calibration file:\n"); printf("\t%s\n",filename); // Open the file file_id = H5Fopen(filename,H5F_ACC_RDONLY,H5P_DEFAULT); if(file_id < 0){ printf("ERROR: Could not open file %s\n",filename); printf("spectra will be output with default (0) energy scale\n"); return; } // Open the dataset datagroup_id = H5Gopen1(file_id, groupname); dataset_id = H5Dopen1(datagroup_id, fieldname); dataspace_id = H5Dget_space(dataset_id); // Test if correct dimensions / size ndims = H5Sget_simple_extent_ndims(dataspace_id); if(ndims != 1) { printf("the specified file does not have the correct dimensions for energy scale calibration, ndims=%i\n",ndims); printf("spectra will be output with default (0) energy scale\n"); return; } hsize_t dims[ndims]; H5Sget_simple_extent_dims(dataspace_id,dims,NULL); if (dims[0]!=1 || dims[1]!=(hsize_t)global->espectrumLength) { printf("the specified file does not have the correct dimensions for energy scale calibration\n"); printf("spectra will be output with default (0) energy scale\n"); return; } datatype_id = H5Dget_type(dataset_id); dataclass = H5Tget_class(datatype_id); size = H5Tget_size(datatype_id); double* energyscale = (double *) calloc(global->espectrumLength, sizeof(double)); H5Dread(dataset_id, datatype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, energyscale); for(int i=0; i<global->espectrumLength; i++) { global->espectrumScale[i] = energyscale[i]; } free(energyscale); // Close and cleanup H5Dclose(dataset_id); H5Gclose(datagroup_id); // Cleanup stale IDs hid_t ids[256]; int n_ids = H5Fget_obj_ids(file_id, H5F_OBJ_ALL, 256, ids); for (long i=0; i<n_ids; i++ ) { hid_t id; H5I_type_t type; id = ids[i]; type = H5Iget_type(id); if ( type == H5I_GROUP ) H5Gclose(id); if ( type == H5I_DATASET ) H5Dclose(id); if ( type == H5I_DATASPACE ) H5Sclose(id); //if ( type == H5I_DATATYPE ) // H5Dclose(id); } H5Fclose(file_id); printf("energy spectrum scale calibration file read successful:\n"); return; }
int AMRreader:: readAMRadditionData() { hid_t file_id = H5Fopen( filename_.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT ); if( file_id<0 ) { debug1 << "Failed to open AMR file: " << filename_ << " when read in mesh.\n"; return -1; } hid_t gid = H5Gopen1( file_id, amr_grpname ); if( gid<0 ) { debug1 << "Failed to open AMR group in " << filename_ << " when read in mesh.\n"; return -2; } prebuf_ = new float[ blksz_*nblks_ ]; sndbuf_ = new float[ blksz_*nblks_ ]; tmpbuf_ = new float[ blksz_*nblks_ ]; if( prebuf_==NULL || sndbuf_==NULL || tmpbuf_==NULL ) { debug1 << "Failed to allocate additional datbuf for " << filename_ << ".\n"; return -3; } hid_t datid = H5Dopen1( gid, amr_prename ); if( datid<0 ) { debug1 << "Failed to open " << amr_prename << " data in " << filename_ << ".\n"; return -4; } herr_t herr = H5Dread( datid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, prebuf_ ); H5Dclose(datid); if( herr<0 ) { debug1 << "Failed to read " << amr_prename << " in " << filename_ << ".\n"; return -5; } datid = H5Dopen1( gid, amr_sndname ); if( datid<0 ) { debug1 << "Failed to open " << amr_sndname << " data in " << filename_ << ".\n"; return -4; } herr = H5Dread( datid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, sndbuf_ ); H5Dclose(datid); if( herr<0 ) { debug1 << "Failed to read " << amr_sndname << " in " << filename_ << ".\n"; return -5; } datid = H5Dopen1( gid, amr_tmpname ); if( datid<0 ) { debug1 << "Failed to open " << amr_tmpname << " data in " << filename_ << ".\n"; return -4; } herr = H5Dread( datid, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, tmpbuf_ ); H5Dclose(datid); if( herr<0 ) { debug1 << "Failed to read " << amr_tmpname << " in " << filename_ << ".\n"; return -5; } H5Gclose( gid ); H5Fclose( file_id ); return 0; }
void cData2d::readHDF5(char* filename, char* fieldname){ // Open the file hid_t file_id; file_id = H5Fopen(filename,H5F_ACC_RDONLY,H5P_DEFAULT); if(file_id < 0){ printf("ERROR: Could not open file %s\n",filename); return; } // Open the dataset hid_t dataset_id; hid_t dataspace_id; dataset_id = H5Dopen1(file_id, fieldname); dataspace_id = H5Dget_space(dataset_id); // Test if 2D data int ndims; ndims = H5Sget_simple_extent_ndims(dataspace_id); if(ndims != 2) { printf("2dData::readHDF5: Not 2D data set, ndims=%i\n",ndims); exit(0); } // Get dimensions of data set (nx, ny, nn) hsize_t dims[ndims]; H5Sget_simple_extent_dims(dataspace_id,dims,NULL); ny = dims[0]; nx = dims[1]; nn = 1; for(int i = 0;i<ndims;i++) nn *= dims[i]; // Create space for the new data free(data); data = NULL; data = (tData2d *) calloc(nn, sizeof(tData2d)); // Read in data after setting up a temporary buffer of the appropriate variable type // Somehow this works best when split out accordint to different data types // Fix into general form later hid_t datatype_id; H5T_class_t dataclass; size_t size; datatype_id = H5Dget_type(dataset_id); dataclass = H5Tget_class(datatype_id); size = H5Tget_size(datatype_id); if(dataclass == H5T_FLOAT){ if (size == sizeof(float)) { float* buffer = (float *) calloc(nn, sizeof(float)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(double)) { double* buffer = (double *) calloc(nn, sizeof(double)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else { printf("2dData::readHDF5: unknown floating point type, size=%i\n",(int) size); return; } } else if(dataclass == H5T_INTEGER){ if (size == sizeof(char)) { char* buffer = (char*) calloc(nn, sizeof(char)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(short)) { short* buffer = (short*) calloc(nn, sizeof(short)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(int)) { int* buffer = (int *) calloc(nn, sizeof(int)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(long)) { long* buffer = (long *) calloc(nn, sizeof(long)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else { printf("2dData::readHDF5: unknown integer type, size=%lu\n",size); exit(1); } } else { printf("2dData::readHDF5: unknown HDF5 data type\n"); return; } // Close and cleanup H5Dclose(dataset_id); // Cleanup stale IDs hid_t ids[256]; int n_ids = H5Fget_obj_ids(file_id, H5F_OBJ_ALL, 256, ids); for (long i=0; i<n_ids; i++ ) { hid_t id; H5I_type_t type; id = ids[i]; type = H5Iget_type(id); if ( type == H5I_GROUP ) H5Gclose(id); if ( type == H5I_DATASET ) H5Dclose(id); if ( type == H5I_DATASPACE ) H5Sclose(id); //if ( type == H5I_DATATYPE ) // H5Dclose(id); } H5Fclose(file_id); }
int main() { printf("\n*** Checking HDF5 group functions.\n"); printf("*** Checking out root group..."); { hid_t fileid, grpid, access_plistid; /* Open the root group of a new file. */ if ((access_plistid = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_fclose_degree(access_plistid, H5F_CLOSE_SEMI)) ERR; if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, access_plistid)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen file and root group. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, access_plistid)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking out H5Gmove..."); { hid_t fileid, grpid; hid_t datasetid, spaceid; /* Create file with one dataset. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; if ((spaceid = H5Screate(H5S_SCALAR)) < 0) ERR; if ((datasetid = H5Dcreate(grpid, DATASET_NAME, H5T_NATIVE_INT, spaceid, H5P_DEFAULT)) < 0) ERR; if (H5Dclose(datasetid) < 0 || H5Sclose(spaceid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen file and check, then rename dataset. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; if ((datasetid = H5Dopen1(grpid, DATASET_NAME)) < 0) ERR; if (H5Dclose(datasetid) < 0) ERR; if (H5Gmove(grpid, DATASET_NAME, NEW_NAME) < 0) ERR; if ((datasetid = H5Dopen1(grpid, NEW_NAME)) < 0) ERR; if (H5Dclose(datasetid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking out sub-groups..."); { hid_t fileid, grpid, subgrpid; /* Create file with some nested groups. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gcreate(fileid, GRP_NAME, 0)) < 0) ERR; if ((subgrpid = H5Gcreate(grpid, SUB_GRP_NAME, 0)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen file and discover groups. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, GRP_NAME)) < 0) ERR; if ((subgrpid = H5Gopen(grpid, SUB_GRP_NAME)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking out UTF8 named sub-group..."); { hid_t fileid, grpid, subgrpid; /* Create file with nested group. */ if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gcreate(fileid, (char *)norm_utf8, 0)) < 0) ERR; if ((subgrpid = H5Gcreate(grpid, SUB_GRP_NAME, 0)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen file and discover groups. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, (char *)norm_utf8)) < 0) ERR; if ((subgrpid = H5Gopen(grpid, SUB_GRP_NAME)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; printf("*** Checking out UTF8 named sub-group with group creation ordering..."); { hid_t fileid, grpid, subgrpid; hid_t fapl_id, fcpl_id, gcpl_id; /* Create file with nested group. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; if (H5Pset_link_creation_order(fcpl_id, H5P_CRT_ORDER_TRACKED|H5P_CRT_ORDER_INDEXED) < 0) ERR; if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0) ERR; if (H5Pset_link_creation_order(gcpl_id, H5P_CRT_ORDER_TRACKED|H5P_CRT_ORDER_INDEXED) < 0) ERR; if ((grpid = H5Gcreate_anon(fileid, gcpl_id, H5P_DEFAULT)) < 0) ERR; if ((H5Olink(grpid, fileid, (char *)norm_utf8, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if ((subgrpid = H5Gcreate(grpid, SUB_GRP_NAME, 0)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen file and discover groups. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, (char *)norm_utf8)) < 0) ERR; if ((subgrpid = H5Gopen(grpid, SUB_GRP_NAME)) < 0) ERR; if (H5Gclose(subgrpid) < 0 || H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; } SUMMARIZE_ERR; FINAL_RESULTS; }
bool ReadHDF5file(char* filename, char* fieldname, dtype** outArray, int* dims) { #ifdef _HDF5_H // Open the file hid_t file_id; file_id = H5Fopen(filename,H5F_ACC_RDONLY,H5P_DEFAULT); //? file_id = H5Fopen(filename,H5F_ACC_RDONLY,faplist_id); if(file_id < 0){ printf("ERROR: Could not open file %s\n",filename); return false; } // Open the dataset hid_t dataset_id; hid_t dataspace_id; dataset_id = H5Dopen1(file_id, fieldname); if(dataset_id < 0){ printf("ERROR: Could not open the data field %s\n",fieldname); return false; } dataspace_id = H5Dget_space(dataset_id); // Test if 2D data int ndims; ndims = H5Sget_simple_extent_ndims(dataspace_id); // Get dimensions of data set (nx, ny, nn) hsize_t* dimsl = new hsize_t[ndims]; H5Sget_simple_extent_dims(dataspace_id,dimsl,NULL); for(int i = 0;(i<ndims&&i<3);i++) dims[i] = dimsl[ndims-1-i]; //!!!!!!!! NOT SURE size_t nn = 1; for(int i = 0;i<ndims;i++) nn *= dimsl[i]; // Create space for the new data dtype* data = *outArray; if (data!=NULL) delete data;//free(data); *outArray = new dtype[nn]; data = *outArray; hid_t datatype_id; H5T_class_t dataclass; size_t size; datatype_id = H5Dget_type(dataset_id); dataclass = H5Tget_class(datatype_id); size = H5Tget_size(datatype_id); int rrr = sizeof(int); if(dataclass == H5T_FLOAT){ if (size == sizeof(float)) { float* buffer = (float *) calloc(nn, sizeof(float)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(double)) { double* buffer = (double *) calloc(nn, sizeof(double)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else { printf("2dData::readHDF5: unknown floating point type, size=%i\n",(int) size); return false; } } else if(dataclass == H5T_INTEGER){ if (size == sizeof(short)) { short* buffer = (short*) calloc(nn, sizeof(short)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(int)) { int* buffer = (int *) calloc(nn, sizeof(int)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else if (size == sizeof(long)) { long* buffer = (long *) calloc(nn, sizeof(long)); H5Dread(dataset_id, datatype_id, H5S_ALL,H5S_ALL, H5P_DEFAULT, buffer); for(long i=0; i<nn; i++) data[i] = buffer[i]; free(buffer); } else { printf("2dData::readHDF5: unknown integer type, size=%i\n",(int) size); return false; } } else { printf("2dData::readHDF5: unknown HDF5 data type\n"); return false; } // Close and cleanup H5Dclose(dataset_id); // Cleanup stale IDs hid_t ids[256]; int n_ids = H5Fget_obj_ids(file_id, H5F_OBJ_ALL, 256, ids); for (long i=0; i<n_ids; i++ ) { hid_t id; H5I_type_t type; id = ids[i]; type = H5Iget_type(id); if ( type == H5I_GROUP ) H5Gclose(id); if ( type == H5I_DATASET ) H5Dclose(id); if ( type == H5I_DATASPACE ) H5Sclose(id); //if ( type == H5I_DATATYPE ) // H5Dclose(id); } H5Fclose(file_id); return true; #endif return false; }
void restart(int restart_flag) { extern int qpts, dimR, dimQ, Nx, Nz; extern mcomplex ****U, ****C; extern mcomplex ****IU, ****IC; int x, y, z, i; hid_t file_id1, dataset_u, dataset_v, dataset_w; /* file identifier */ hid_t dataset_iu, dataset_iv, dataset_iw; hid_t complex_id; herr_t ret; char filename[50]; /* define compound datatype for the complex number */ typedef struct { double re; /*real part */ double im; /*imaginary part */ } complex_t; complex_id = H5Tcreate(H5T_COMPOUND, sizeof(complex_t)); H5Tinsert(complex_id, "real", HOFFSET(complex_t, re), H5T_NATIVE_DOUBLE); H5Tinsert(complex_id, "imaginary", HOFFSET(complex_t, im), H5T_NATIVE_DOUBLE); /* define some temporal matrix to store the data to the hdf file */ complex_t Matrix1[qpts][Nz][Nx / 2]; complex_t Matrix2[qpts][Nz][Nx / 2]; complex_t Matrix3[qpts][Nz][Nx / 2]; complex_t IMatrix1[qpts][Nz][Nx / 2]; complex_t IMatrix2[qpts][Nz][Nx / 2]; complex_t IMatrix3[qpts][Nz][Nx / 2]; // find the restart file sprintf(filename, "data_t=%d.h5", restart_flag); // open the file and dataset file_id1 = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); dataset_u = H5Dopen1(file_id1, "/data_u"); dataset_v = H5Dopen1(file_id1, "/data_v"); dataset_w = H5Dopen1(file_id1, "/data_w"); dataset_iu = H5Dopen1(file_id1, "/data_iu"); dataset_iv = H5Dopen1(file_id1, "/data_iv"); dataset_iw = H5Dopen1(file_id1, "/data_iw"); ret = H5Dread(dataset_u, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, Matrix1); ret = H5Dread(dataset_v, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, Matrix2); ret = H5Dread(dataset_w, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, Matrix3); ret = H5Dread(dataset_iu, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, IMatrix1); ret = H5Dread(dataset_iv, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, IMatrix2); ret = H5Dread(dataset_iw, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, IMatrix3); ret = H5Dclose(dataset_u); ret = H5Dclose(dataset_v); ret = H5Dclose(dataset_w); ret = H5Dclose(dataset_iu); ret = H5Dclose(dataset_iv); ret = H5Dclose(dataset_iw); ret = H5Fclose(file_id1); for (y = 0; y < qpts; y++) { for (z = 0; z < Nz; ++z) { for (x = 0; x < Nx / 2; ++x) { Re(U[z][XEL][y][x]) = Matrix1[y][z][x].re; Im(U[z][XEL][y][x]) = Matrix1[y][z][x].im; Re(U[z][YEL][y][x]) = Matrix2[y][z][x].re; Im(U[z][YEL][y][x]) = Matrix2[y][z][x].im; Re(U[z][ZEL][y][x]) = Matrix3[y][z][x].re; Im(U[z][ZEL][y][x]) = Matrix3[y][z][x].im; Re(IU[z][XEL][y][x]) = IMatrix1[y][z][x].re; Im(IU[z][XEL][y][x]) = IMatrix1[y][z][x].im; Re(IU[z][YEL][y][x]) = IMatrix2[y][z][x].re; Im(IU[z][YEL][y][x]) = IMatrix2[y][z][x].im; Re(IU[z][ZEL][y][x]) = IMatrix3[y][z][x].re; Im(IU[z][ZEL][y][x]) = IMatrix3[y][z][x].im; } } } /* compute inital coefficients in expansion given the value of ux hat, uy hat, uz hat. */ initAlphaBeta(); /* compute the boundary condition for previous stage or time step, result is stored in Uxb and Uzb */ if (increBoundary() != NO_ERR) { printf("increBoundary failure\n"); } /* compute initial coefficients for incremental state solver */ incre_initAlphaBeta(); printf(" restart computed coefficients!\n"); for (i = 0; i < dimQ; ++i) { for (z = 0; z < Nz; ++z) { for (x = 0; x < Nx / 2; ++x) { printf(" IC[%d][ALPHA][%d][%d]=%f+%fi\n", z, i, x, Re(IC[z][ALPHA][i][x]), Im(IC[z][ALPHA][i][x])); } } } for (i = 0; i < dimR; ++i) { for (z = 0; z < Nz; ++z) { for (x = 0; x < Nx / 2; ++x) { printf(" IC[%d][BETA][%d][%d]=%f+%fi\n", z, i, x, Re(IC[z][BETA][i][x]), Im(IC[z][BETA][i][x])); } } } printf(" finish restart computing coefficients!\n\n"); }
// expose restart2 to python void read_solution(char * filename, mcomplex * C_ptr) { extern int qpts, dimR, dimQ, Nx, Nz; extern mcomplex ****U, ****C; extern mcomplex ****IU, ****IC; int x, y, z; hid_t file_id1, dataset_a, dataset_b; /* file identifier */ hid_t dataset_ia, dataset_ib; hid_t complex_id; /* define compound datatype for the complex number */ typedef struct { double re; /*real part */ double im; /*imaginary part */ } complex_t; complex_id = H5Tcreate(H5T_COMPOUND, sizeof(complex_t)); H5Tinsert(complex_id, "real", HOFFSET(complex_t, re), H5T_NATIVE_DOUBLE); H5Tinsert(complex_id, "imaginary", HOFFSET(complex_t, im), H5T_NATIVE_DOUBLE); /* define some temporal matrix to store the data to the hdf file */ complex_t Matrix1[dimR][Nz][Nx / 2]; complex_t Matrix2[dimR][Nz][Nx / 2]; complex_t IMatrix1[dimR][Nz][Nx / 2]; complex_t IMatrix2[dimR][Nz][Nx / 2]; // open the file and dataset file_id1 = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); dataset_a = H5Dopen1(file_id1, "/data_alpha"); dataset_b = H5Dopen1(file_id1, "/data_beta"); dataset_ia = H5Dopen1(file_id1, "/data_ialpha"); dataset_ib = H5Dopen1(file_id1, "/data_ibeta"); assert(EXIT_SUCCESS == H5Dread(dataset_a, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, Matrix1)); assert(EXIT_SUCCESS == H5Dread(dataset_b, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, Matrix2)); assert(EXIT_SUCCESS == H5Dread(dataset_ia, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, IMatrix1)); assert(EXIT_SUCCESS == H5Dread(dataset_ib, complex_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, IMatrix2)); assert(EXIT_SUCCESS == H5Dclose(dataset_a)); assert(EXIT_SUCCESS == H5Dclose(dataset_b)); assert(EXIT_SUCCESS == H5Dclose(dataset_ia)); assert(EXIT_SUCCESS == H5Dclose(dataset_ib)); assert(EXIT_SUCCESS == H5Fclose(file_id1)); for (y = 0; y < dimR; y++) { for (z = 0; z < Nz; ++z) { for (x = 0; x < Nx / 2; ++x) { Re(C[z][ALPHA][y][x]) = Matrix1[y][z][x].re; Im(C[z][ALPHA][y][x]) = Matrix1[y][z][x].im; Re(C[z][BETA][y][x]) = Matrix2[y][z][x].re; Im(C[z][BETA][y][x]) = Matrix2[y][z][x].im; Re(IC[z][ALPHA][y][x]) = IMatrix1[y][z][x].re; Im(IC[z][ALPHA][y][x]) = IMatrix1[y][z][x].im; Re(IC[z][BETA][y][x]) = IMatrix2[y][z][x].re; Im(IC[z][BETA][y][x]) = IMatrix2[y][z][x].im; } } } /* compute ux hat, uy hat, uz hat given alpha, beta,. */ initAlphaBeta2(); /* compute the boundary condition for previous stage or time step, result is stored in Uxb and Uzb */ if (increBoundary() != NO_ERR) { printf("increBoundary failure\n"); } /* compute iux, iuy, iuz given i-alpha, i-beta */ incre_initAlphaBeta2(); memset(U[Nz / 2][0][0], 0, 5 * qpts * (Nx / 2) * sizeof(mcomplex)); memset(IU[Nz / 2][0][0], 0, 5 * qpts * (Nx / 2) * sizeof(mcomplex)); memmove(C_ptr, C[0][0][0], Nz * 2 * dimR * (Nx / 2) * sizeof(mcomplex)); }