//-------------------------------------------------------------------------- // Function: ArrayType overloaded constructor ///\brief Creates a new array data type based on the specified /// \a base_type. ///\param base_type - IN: Existing datatype ///\param ndims - IN: Rank of the array, [0..H5S_MAX_RANK] ///\param dims - IN: Size of each array dimension ///\exception H5::DataTypeIException // Programmer Binh-Minh Ribler - May 2004 //-------------------------------------------------------------------------- ArrayType::ArrayType(const DataType& base_type, int ndims, const hsize_t* dims) : DataType() { // Call C API to create an array data type hid_t new_type_id = H5Tarray_create2(base_type.getId(), ndims, dims); if (new_type_id < 0) throw DataTypeIException("ArrayType constructor", "H5Tarray_create2 failed"); // Set the id and rank for this object id = new_type_id; rank = ndims; // Allocate space then set the dimensions as provided by caller dimensions = new hsize_t[rank]; for (int i = 0; i < rank; i++) dimensions[i] = dims[i]; }
/*------------------------------------------------------------------------- * Function: test_data_conv * * Purpose: Test data conversion * * Return: Success: 0 * * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 * *------------------------------------------------------------------------- */ static int test_data_conv(hid_t file) { typedef struct { int a, b, c[4], d, e; } src_type_t; typedef struct { int a, c[4], e; } dst_type_t; hid_t dataspace = -1, dataset = -1; hid_t mem_space = -1; hid_t cparms = -1, dxpl = -1; hsize_t dims[2] = {NX, NY}; hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY}; herr_t status; int i, j, n; const hsize_t four = 4; hid_t st=-1, dt=-1; hid_t array_dt; unsigned filter_mask = 0; src_type_t direct_buf[CHUNK_NX][CHUNK_NY]; dst_type_t check_chunk[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(src_type_t); hsize_t start[2]; /* Start of hyperslab */ hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ TESTING("data conversion for H5DOwrite_chunk"); /* * Create the data space with unlimited dimensions. */ if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0) goto error; if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0) goto error; /* * Modify dataset creation properties, i.e. enable chunking */ if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error; if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0) goto error; /* Build hdf5 datatypes */ array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four); if((st = H5Tcreate(H5T_COMPOUND, sizeof(src_type_t))) < 0 || H5Tinsert(st, "a", HOFFSET(src_type_t, a), H5T_NATIVE_INT) < 0 || H5Tinsert(st, "b", HOFFSET(src_type_t, b), H5T_NATIVE_INT) < 0 || H5Tinsert(st, "c", HOFFSET(src_type_t, c), array_dt) < 0 || H5Tinsert(st, "d", HOFFSET(src_type_t, d), H5T_NATIVE_INT) < 0 || H5Tinsert(st, "e", HOFFSET(src_type_t, e), H5T_NATIVE_INT) < 0) goto error; if(H5Tclose(array_dt) < 0) goto error; array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four); if((dt = H5Tcreate(H5T_COMPOUND, sizeof(dst_type_t))) < 0 || H5Tinsert(dt, "a", HOFFSET(dst_type_t, a), H5T_NATIVE_INT) < 0 || H5Tinsert(dt, "c", HOFFSET(dst_type_t, c), array_dt) < 0 || H5Tinsert(dt, "e", HOFFSET(dst_type_t, e), H5T_NATIVE_INT) < 0) goto error; if(H5Tclose(array_dt) < 0) goto error; /* * Create a new dataset within the file using cparms * creation properties. */ if((dataset = H5Dcreate2(file, DATASETNAME4, st, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) goto error; if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error; /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) { for(j = 0; j < CHUNK_NY; j++) { (direct_buf[i][j]).a = i*j+0; (direct_buf[i][j]).b = i*j+1; (direct_buf[i][j]).c[0] = i*j+2; (direct_buf[i][j]).c[1] = i*j+3; (direct_buf[i][j]).c[2] = i*j+4; (direct_buf[i][j]).c[3] = i*j+5; (direct_buf[i][j]).d = i*j+6; (direct_buf[i][j]).e = i*j+7; } } /* write the chunk data to dataset, using the direct writing function. * There should be no data conversion involved. */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY; if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0) goto error; if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) goto error; if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0) goto error; /* * Select hyperslab for the chunk just written in the file */ start[0] = CHUNK_NX; start[1] = CHUNK_NY; stride[0] = 1; stride[1] = 1; count[0] = 1; count[1] = 1; block[0] = CHUNK_NX; block[1] = CHUNK_NY; if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; /* Read the chunk back. Data should be converted */ if((status = H5Dread(dataset, dt, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; /* Check that the values read are the same as the values written */ for(i = 0; i < CHUNK_NX; i++) { for(j = 0; j < CHUNK_NY; j++) { if ((direct_buf[i][j]).a != (check_chunk[i][j]).a || (direct_buf[i][j]).c[0] != (check_chunk[i][j]).c[0] || (direct_buf[i][j]).c[1] != (check_chunk[i][j]).c[1] || (direct_buf[i][j]).c[2] != (check_chunk[i][j]).c[2] || (direct_buf[i][j]).c[3] != (check_chunk[i][j]).c[3] || (direct_buf[i][j]).e != (check_chunk[i][j]).e) { printf(" 1. Read different values than written."); printf(" At index %d,%d\n", i, j); printf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n", (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1], (direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e); printf(" dst={a=%d, c=[%d,%d,%d,%d], e=%d\n", (check_chunk[i][j]).a, (check_chunk[i][j]).c[0], (check_chunk[i][j]).c[1], (check_chunk[i][j]).c[2], (check_chunk[i][j]).c[3], (check_chunk[i][j]).e); goto error; } } } /* * Close/release resources. */ H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); H5Tclose(st); H5Tclose(dt); PASSED(); return 0; error: H5E_BEGIN_TRY { H5Dclose(dataset); H5Sclose(mem_space); H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); H5Tclose(st); H5Tclose(dt); } H5E_END_TRY; return 1; }
int main(void) { hid_t fil,spc,set; hid_t cs6, cmp, fix; hid_t cmp1, cmp2, cmp3; hid_t plist; hid_t array_dt; hsize_t dim[2]; hsize_t cdim[4]; char string5[5]; float fok[2] = {1234., 2341.}; float fnok[2] = {5678., 6785.}; float *fptr; char *data; char *mname; int result = 0; printf("%-70s", "Testing alignment in compound datatypes"); strcpy(string5, "Hi!"); HDunlink(fname); fil = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); if (fil < 0) { puts("*FAILED*"); return 1; } H5E_BEGIN_TRY { H5Ldelete(fil, setname, H5P_DEFAULT); } H5E_END_TRY; cs6 = H5Tcopy(H5T_C_S1); H5Tset_size(cs6, sizeof(string5)); H5Tset_strpad(cs6, H5T_STR_NULLPAD); cmp = H5Tcreate(H5T_COMPOUND, sizeof(fok) + sizeof(string5) + sizeof(fnok)); H5Tinsert(cmp, "Awkward length", 0, cs6); cdim[0] = sizeof(fok) / sizeof(float); array_dt = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, cdim); H5Tinsert(cmp, "Ok", sizeof(string5), array_dt); H5Tclose(array_dt); cdim[0] = sizeof(fnok) / sizeof(float); array_dt = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, cdim); H5Tinsert(cmp, "Not Ok", sizeof(fok) + sizeof(string5), array_dt); H5Tclose(array_dt); fix=h5tools_get_native_type(cmp); cmp1 = H5Tcreate(H5T_COMPOUND, sizeof(fok)); cdim[0] = sizeof(fok) / sizeof(float); array_dt = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, cdim); H5Tinsert(cmp1, "Ok", 0, array_dt); H5Tclose(array_dt); cmp2 = H5Tcreate(H5T_COMPOUND, sizeof(string5)); H5Tinsert(cmp2, "Awkward length", 0, cs6); cmp3 = H5Tcreate(H5T_COMPOUND, sizeof(fnok)); cdim[0] = sizeof(fnok) / sizeof(float); array_dt = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, cdim); H5Tinsert(cmp3, "Not Ok", 0, array_dt); H5Tclose(array_dt); plist = H5Pcreate(H5P_DATASET_XFER); H5Pset_preserve(plist, 1); /* * Create a small dataset, and write data into it we write each field * in turn so that we are avoid alignment issues at this point */ dim[0] = 1; spc = H5Screate_simple(1, dim, NULL); set = H5Dcreate2(fil, setname, cmp, spc, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite(set, cmp1, spc, H5S_ALL, plist, fok); H5Dwrite(set, cmp2, spc, H5S_ALL, plist, string5); H5Dwrite(set, cmp3, spc, H5S_ALL, plist, fnok); H5Dclose(set); /* Now open the set, and read it back in */ data = malloc(H5Tget_size(fix)); if(!data) { perror("malloc() failed"); abort(); } set = H5Dopen2(fil, setname, H5P_DEFAULT); H5Dread(set, fix, spc, H5S_ALL, H5P_DEFAULT, data); fptr = (float *)(data + H5Tget_member_offset(fix, 1)); if(fok[0] != fptr[0] || fok[1] != fptr[1] || fnok[0] != fptr[2] || fnok[1] != fptr[3]) { result = 1; printf("%14s (%2d) %6s = %s\n", mname = H5Tget_member_name(fix, 0), (int)H5Tget_member_offset(fix,0), string5, (char *)(data + H5Tget_member_offset(fix, 0))); free(mname); fptr = (float *)(data + H5Tget_member_offset(fix, 1)); printf("Data comparison:\n" "%14s (%2d) %6f = %f\n" " %6f = %f\n", mname = H5Tget_member_name(fix, 1), (int)H5Tget_member_offset(fix,1), fok[0], fptr[0], fok[1], fptr[1]); free(mname); fptr = (float *)(data + H5Tget_member_offset(fix, 2)); printf("%14s (%2d) %6f = %f\n" " %6f = %6f\n", mname = H5Tget_member_name(fix, 2), (int)H5Tget_member_offset(fix,2), fnok[0], fptr[0], fnok[1], fptr[1]); free(mname); fptr = (float *)(data + H5Tget_member_offset(fix, 1)); printf("\n" "Short circuit\n" " %6f = %f\n" " %6f = %f\n" " %6f = %f\n" " %6f = %f\n", fok[0], fptr[0], fok[1], fptr[1], fnok[0], fptr[2], fnok[1], fptr[3]); puts("*FAILED*"); } else { puts(" PASSED"); } free(data); H5Sclose(spc); H5Tclose(cmp); H5Tclose(cmp1); H5Tclose(cmp2); H5Tclose(cmp3); H5Pclose(plist); H5Fclose(fil); HDunlink(fname); fflush(stdout); return result; }
void pyne::Material::_load_comp_protocol1(hid_t db, std::string datapath, int row) { std::string nucpath; hid_t data_set = H5Dopen2(db, datapath.c_str(), H5P_DEFAULT); hsize_t data_offset[1] = {row}; if (row < 0) { // Handle negative row indices hid_t data_space = H5Dget_space(data_set); hsize_t data_dims[1]; H5Sget_simple_extent_dims(data_space, data_dims, NULL); data_offset[0] += data_dims[0]; }; // Grab the nucpath hid_t nuc_attr = H5Aopen(data_set, "nucpath", H5P_DEFAULT); H5A_info_t nuc_info; H5Aget_info(nuc_attr, &nuc_info); hsize_t nuc_attr_len = nuc_info.data_size; hid_t str_attr = H5Tcopy(H5T_C_S1); H5Tset_size(str_attr, nuc_attr_len); char * nucpathbuf = new char [nuc_attr_len]; H5Aread(nuc_attr, str_attr, nucpathbuf); nucpath = std::string(nucpathbuf, nuc_attr_len); delete[] nucpathbuf; // Grab the nuclides std::vector<int> nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5T_NATIVE_INT); int nuc_size = nuclides.size(); hsize_t nuc_dims[1] = {nuc_size}; // Get the data hyperslab hid_t data_hyperslab = H5Dget_space(data_set); hsize_t data_count[1] = {1}; H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL); // Get memory space for writing hid_t mem_space = H5Screate_simple(1, data_count, NULL); // Get material type size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*nuc_size; hid_t desc = H5Tcreate(H5T_COMPOUND, material_struct_size); hid_t comp_values_array_type = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, nuc_dims); // make the data table type H5Tinsert(desc, "mass", HOFFSET(pyne::material_struct, mass), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "density", HOFFSET(pyne::material_struct, density), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "atoms_per_molecule", HOFFSET(pyne::material_struct, atoms_per_mol), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type); // make the data array, have to over-allocate material_struct * mat_data = new material_struct [material_struct_size]; // Finally, get data and put in on this instance H5Dread(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data); mass = (*mat_data).mass; density = (*mat_data).density; atoms_per_molecule = (*mat_data).atoms_per_mol; for (int i = 0; i < nuc_size; i++) comp[nuclides[i]] = (double) (*mat_data).comp[i]; delete[] mat_data; H5Tclose(str_attr); // // Get metadata from associated dataset, if available // std::string attrpath = datapath + "_metadata"; bool attrpath_exists = h5wrap::path_exists(db, attrpath); if (!attrpath_exists) return; hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace; int attrrank; hvl_t attrdata [1]; attrtype = H5Tvlen_create(H5T_NATIVE_CHAR); // Get the metadata from the file metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT); metadatalab = H5Dget_space(metadataet); H5Sselect_hyperslab(metadatalab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL); attrmemspace = H5Screate_simple(1, data_count, NULL); H5Dread(metadataet, attrtype, attrmemspace, metadatalab, H5P_DEFAULT, attrdata); // convert to in-memory JSON Json::Reader reader; reader.parse((char *) attrdata[0].p, (char *) attrdata[0].p+attrdata[0].len, metadata, false); // close attr data objects H5Fflush(db, H5F_SCOPE_GLOBAL); H5Dclose(metadataet); H5Sclose(metadatapace); H5Tclose(attrtype); // Close out the HDF5 file H5Fclose(db); };
void pyne::Material::write_hdf5(std::string filename, std::string datapath, std::string nucpath, float row, int chunksize) { int row_num = (int) row; // Turn off annoying HDF5 errors H5Eset_auto2(H5E_DEFAULT, NULL, NULL); //Set file access properties so it closes cleanly hid_t fapl; fapl = H5Pcreate(H5P_FILE_ACCESS); H5Pset_fclose_degree(fapl,H5F_CLOSE_STRONG); // Create new/open datafile. hid_t db; if (pyne::file_exists(filename)) { bool ish5 = H5Fis_hdf5(filename.c_str()); if (!ish5) throw h5wrap::FileNotHDF5(filename); db = H5Fopen(filename.c_str(), H5F_ACC_RDWR, fapl); } else db = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, fapl); // // Read in nuclist if available, write it out if not // bool nucpath_exists = h5wrap::path_exists(db, nucpath); std::vector<int> nuclides; int nuc_size; hsize_t nuc_dims[1]; if (nucpath_exists) { nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5T_NATIVE_INT); nuc_size = nuclides.size(); nuc_dims[0] = nuc_size; } else { nuclides = std::vector<int>(); for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++) nuclides.push_back(i->first); nuc_size = nuclides.size(); // Create the data if it doesn't exist int nuc_data [nuc_size]; for (int n = 0; n != nuc_size; n++) nuc_data[n] = nuclides[n]; nuc_dims[0] = nuc_size; hid_t nuc_space = H5Screate_simple(1, nuc_dims, NULL); hid_t nuc_set = H5Dcreate2(db, nucpath.c_str(), H5T_NATIVE_INT, nuc_space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite(nuc_set, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, nuc_data); H5Fflush(db, H5F_SCOPE_GLOBAL); }; // // Write out the data itself to the file // hid_t data_set, data_space, data_hyperslab; int data_rank = 1; hsize_t data_dims[1] = {1}; hsize_t data_max_dims[1] = {H5S_UNLIMITED}; hsize_t data_offset[1] = {0}; size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*nuc_size; hid_t desc = H5Tcreate(H5T_COMPOUND, material_struct_size); hid_t comp_values_array_type = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, nuc_dims); // make the data table type H5Tinsert(desc, "mass", HOFFSET(pyne::material_struct, mass), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "density", HOFFSET(pyne::material_struct, density), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "atoms_per_molecule", HOFFSET(pyne::material_struct, atoms_per_mol), H5T_NATIVE_DOUBLE); H5Tinsert(desc, "comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type); material_struct * mat_data = new material_struct[material_struct_size]; (*mat_data).mass = mass; (*mat_data).density = density; (*mat_data).atoms_per_mol = atoms_per_molecule; for (int n = 0; n != nuc_size; n++) { if (0 < comp.count(nuclides[n])) (*mat_data).comp[n] = comp[nuclides[n]]; else (*mat_data).comp[n] = 0.0; }; // get / make the data set bool datapath_exists = h5wrap::path_exists(db, datapath); if (datapath_exists) { data_set = H5Dopen2(db, datapath.c_str(), H5P_DEFAULT); data_space = H5Dget_space(data_set); data_rank = H5Sget_simple_extent_dims(data_space, data_dims, data_max_dims); // Determine the row size. if (std::signbit(row)) row_num = data_dims[0] + row; // careful, row is negative if (data_dims[0] <= row_num) { // row == -0, extend to data set so that we can append, or // row_num is larger than current dimension, resize to accomodate. data_dims[0] = row_num + 1; H5Dset_extent(data_set, data_dims); } data_offset[0] = row_num; } else { // Get full space data_space = H5Screate_simple(1, data_dims, data_max_dims); // Make data set properties to enable chunking hid_t data_set_params = H5Pcreate(H5P_DATASET_CREATE); hsize_t chunk_dims[1] ={chunksize}; H5Pset_chunk(data_set_params, 1, chunk_dims); H5Pset_deflate(data_set_params, 1); material_struct * data_fill_value = new material_struct[material_struct_size]; (*data_fill_value).mass = -1.0; (*data_fill_value).density= -1.0; (*data_fill_value).atoms_per_mol = -1.0; for (int n = 0; n != nuc_size; n++) (*data_fill_value).comp[n] = 0.0; H5Pset_fill_value(data_set_params, desc, &data_fill_value); // Create the data set data_set = H5Dcreate2(db, datapath.c_str(), desc, data_space, H5P_DEFAULT, data_set_params, H5P_DEFAULT); H5Dset_extent(data_set, data_dims); // Add attribute pointing to nuc path hid_t nuc_attr_type = H5Tcopy(H5T_C_S1); H5Tset_size(nuc_attr_type, nucpath.length()); hid_t nuc_attr_space = H5Screate(H5S_SCALAR); hid_t nuc_attr = H5Acreate2(data_set, "nucpath", nuc_attr_type, nuc_attr_space, H5P_DEFAULT, H5P_DEFAULT); H5Awrite(nuc_attr, nuc_attr_type, nucpath.c_str()); H5Fflush(db, H5F_SCOPE_GLOBAL); // Remember to de-allocate delete[] data_fill_value; }; // Get the data hyperslab data_hyperslab = H5Dget_space(data_set); hsize_t data_count[1] = {1}; H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL); // Get a memory space for writing hid_t mem_space = H5Screate_simple(1, data_count, data_max_dims); // Write the row... H5Dwrite(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data); // Close out the Dataset H5Fflush(db, H5F_SCOPE_GLOBAL); H5Dclose(data_set); H5Sclose(data_space); H5Tclose(desc); // // Write out the metadata to the file // std::string attrpath = datapath + "_metadata"; hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace; int attrrank; attrtype = H5Tvlen_create(H5T_NATIVE_CHAR); // get / make the data set bool attrpath_exists = h5wrap::path_exists(db, attrpath); if (attrpath_exists) { metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT); metadatapace = H5Dget_space(metadataet); attrrank = H5Sget_simple_extent_dims(metadatapace, data_dims, data_max_dims); if (data_dims[0] <= row_num) { // row == -0, extend to data set so that we can append, or // row_num is larger than current dimension, resize to accomodate. data_dims[0] = row_num + 1; H5Dset_extent(metadataet, data_dims); } data_offset[0] = row_num; } else { hid_t metadataetparams; hsize_t attrchunkdims [1]; // Make data set properties to enable chunking metadataetparams = H5Pcreate(H5P_DATASET_CREATE); attrchunkdims[0] = chunksize; H5Pset_chunk(metadataetparams, 1, attrchunkdims); H5Pset_deflate(metadataetparams, 1); hvl_t attrfillvalue [1]; attrfillvalue[0].len = 3; attrfillvalue[0].p = (char *) "{}\n"; H5Pset_fill_value(metadataetparams, attrtype, &attrfillvalue); // make dataset metadatapace = H5Screate_simple(1, data_dims, data_max_dims); metadataet = H5Dcreate2(db, attrpath.c_str(), attrtype, metadatapace, H5P_DEFAULT, metadataetparams, H5P_DEFAULT); H5Dset_extent(metadataet, data_dims); }; // set the attr string hvl_t attrdata [1]; Json::FastWriter writer; std::string metadatatr = writer.write(metadata); attrdata[0].p = (char *) metadatatr.c_str(); attrdata[0].len = metadatatr.length(); // write the attr metadatalab = H5Dget_space(metadataet); H5Sselect_hyperslab(metadatalab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL); attrmemspace = H5Screate_simple(1, data_count, data_max_dims); H5Dwrite(metadataet, attrtype, attrmemspace, metadatalab, H5P_DEFAULT, attrdata); // close attr data objects H5Fflush(db, H5F_SCOPE_GLOBAL); H5Dclose(metadataet); H5Sclose(metadatapace); H5Tclose(attrtype); // Close out the HDF5 file H5Fclose(db); // Remember the milk! // ...by which I mean to deallocate delete[] mat_data; };