void Attributes::write(H5::H5File f, const cpath & dataset_root) { for(uint i=0;i<attrs.size();i++) attrs[i].write(f, dataset_root); f.flush(H5F_SCOPE_GLOBAL); }
void pyne::Material::write_hdf5(std::string filename, std::string datapath, std::string nucpath, float row, int chunksize) { // Turn off annoying HDF5 errors H5::Exception::dontPrint(); // Create new/open datafile. H5::H5File db; if (pyne::file_exists(filename)) { bool isH5 = H5::H5File::isHdf5(filename); if (!isH5) throw h5wrap::FileNotHDF5(filename); db = H5::H5File(filename, H5F_ACC_RDWR); } else db = H5::H5File(filename, H5F_ACC_TRUNC); // // Read in nuclist if available, write it out if not // bool nucpath_exists = h5wrap::path_exists(&db, nucpath); std::vector<int> nuclides; int nuc_size; hsize_t nuc_dims[1]; if (nucpath_exists) { nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(&db, nucpath, H5::PredType::NATIVE_INT); nuc_size = nuclides.size(); nuc_dims[0] = nuc_size; } else { nuclides = std::vector<int>(); for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++) nuclides.push_back(i->first); nuc_size = nuclides.size(); // Create the data if it doesn't exist int nuc_data [nuc_size]; for (int n = 0; n != nuc_size; n++) nuc_data[n] = nuclides[n]; nuc_dims[0] = nuc_size; H5::DataSpace nuc_space(1, nuc_dims); H5::DataSet nuc_set = db.createDataSet(nucpath, H5::PredType::NATIVE_INT, nuc_space); nuc_set.write(nuc_data, H5::PredType::NATIVE_INT); db.flush(H5F_SCOPE_GLOBAL); }; // // Write out to the file // H5::DataSet data_set; H5::DataSpace data_space, data_hyperslab; int data_rank = 1; hsize_t data_dims[1] = {1}; hsize_t data_max_dims[1] = {H5S_UNLIMITED}; hsize_t data_offset[1] = {0}; size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*(nuc_size); H5::CompType data_desc(material_struct_size); H5::ArrayType comp_values_array_type (H5::PredType::NATIVE_DOUBLE, 1, nuc_dims); // make the data table type data_desc.insertMember("name", HOFFSET(pyne::material_struct, name), H5::StrType(0, 20)); data_desc.insertMember("mass", HOFFSET(pyne::material_struct, mass), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type); // make the data array, have to over-allocate material_struct * mat_data = (material_struct *) malloc(material_struct_size); int name_len = name.length(); for (int i=0; i < 20; i++) { if (i < name_len) (*mat_data).name[i] = name[i]; else (*mat_data).name[i] = NULL; }; (*mat_data).mass = mass; (*mat_data).atoms_per_mol = atoms_per_mol; for (int n = 0; n != nuc_size; n++) { if (0 < comp.count(nuclides[n])) (*mat_data).comp[n] = comp[nuclides[n]]; else (*mat_data).comp[n] = 0.0; }; // get / make the data set bool datapath_exists = h5wrap::path_exists(&db, datapath); if (datapath_exists) { data_set = db.openDataSet(datapath); data_space = data_set.getSpace(); data_rank = data_space.getSimpleExtentDims(data_dims, data_max_dims); // Determine the row size. int row_num = (int) row; if (std::signbit(row)) row_num = data_dims[0] + row; // careful, row is negative if (data_dims[0] <= row_num) { // row == -0, extend to data set so that we can append, or // row_num is larger than current dimension, resize to accomodate. data_dims[0] = row_num + 1; data_set.extend(data_dims); } else if (data_dims[0] < 0) throw h5wrap::HDF5BoundsError(); data_offset[0] = row_num; } else { // Get full space data_space = H5::DataSpace(1, data_dims, data_max_dims); // Make data set properties to enable chunking H5::DSetCreatPropList data_set_params; hsize_t chunk_dims[1] ={chunksize}; data_set_params.setChunk(1, chunk_dims); material_struct * data_fill_value = (material_struct *) malloc(material_struct_size); for (int i=0; i < 20; i++) (*data_fill_value).name[i] = NULL; (*data_fill_value).mass = -1.0; (*data_fill_value).atoms_per_mol = -1.0; for (int n = 0; n != nuc_size; n++) (*data_fill_value).comp[n] = 0.0; data_set_params.setFillValue(data_desc, &data_fill_value); // Create the data set data_set = db.createDataSet(datapath, data_desc, data_space, data_set_params); data_set.extend(data_dims); // Add attribute pointing to nuc path H5::StrType nuc_attr_type(0, nucpath.length()); H5::DataSpace nuc_attr_space(H5S_SCALAR); H5::Attribute nuc_attr = data_set.createAttribute("nucpath", nuc_attr_type, nuc_attr_space); nuc_attr.write(nuc_attr_type, nucpath); // Remember to de-allocate free(data_fill_value); }; // Get the data hyperslab data_hyperslab = data_set.getSpace(); hsize_t data_count[1] = {1}; data_hyperslab.selectHyperslab(H5S_SELECT_SET, data_count, data_offset); // Get a memory space for writing H5::DataSpace mem_space (1, data_count, data_max_dims); // Write the row... data_set.write(mat_data, data_desc, mem_space, data_hyperslab); // Close out the HDF5 file db.close(); // Remember the milk! // ...by which I mean to deallocate free(mat_data); };