H5::DataSet CompartmentReportHDF5::_openDataset( const H5::H5File& file, const uint32_t cellID ) { std::stringstream cellName; cellName << "a" << cellID; const std::string datasetName = "/" + cellName.str() + "/" + _reportName + "/" + dataDatasetName; H5::DataSet dataset; H5E_BEGIN_TRY dataset = file.openDataSet( datasetName ); H5E_END_TRY if( !dataset.getId() ) { LBTHROW( std::runtime_error( "ReportReaderHDF5: " "Dataset " + datasetName + " not found " "in file: " + file.getFileName( ))); } if( dataset.getSpace().getSimpleExtentNdims() != 2 ) { LBTHROW( std::runtime_error("Compartment_Report_HDF5_File_Reader: " "Error, not 2 dimensional array on " + datasetName)); } return dataset; }
arma::Mat<uint16_t> readLUT(const std::string& path) { H5::H5File file (path.c_str(), H5F_ACC_RDONLY); H5::DataSet ds = file.openDataSet("LUT"); H5::DataSpace filespace = ds.getSpace(); int ndims = filespace.getSimpleExtentNdims(); assert(ndims == 2); hsize_t dims[2] = {1, 1}; filespace.getSimpleExtentDims(dims); H5::DataSpace memspace (ndims, dims); arma::Mat<uint16_t> res (dims[0], dims[1]); ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace); filespace.close(); memspace.close(); ds.close(); file.close(); // NOTE: Armadillo stores data in column-major order, while HDF5 uses // row-major ordering. Above, we read the data directly from HDF5 into // the arma matrix, so it was implicitly transposed. The next function // fixes this problem. arma::inplace_trans(res); return res; }
bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data) { H5::DataSet dataset = file.openDataSet(name); H5::DataSpace dataspace = dataset.getSpace(); hsize_t dims_out[1]; int rank = dataspace.getSimpleExtentDims( dims_out, NULL); int _type; bool read = getNodeType(dataset, _type); read &= (_type == StorageNode::SEQ); read &= (rank == 1); if (!read) return read; data.resize(dims_out[0]); dataset.read(data.data(), dataset.getDataType()); return true; }
std::vector<double> readEloss(const std::string& path) { H5::H5File file (path.c_str(), H5F_ACC_RDONLY); H5::DataSet ds = file.openDataSet("eloss"); H5::DataSpace filespace = ds.getSpace(); int ndims = filespace.getSimpleExtentNdims(); assert(ndims == 1); hsize_t dim; filespace.getSimpleExtentDims(&dim); H5::DataSpace memspace (ndims, &dim); std::vector<double> res (dim); ds.read(res.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace); filespace.close(); memspace.close(); ds.close(); file.close(); return res; }
void StateSet::init_from_datafile(std::string filename) { // open other file read-only H5::H5File otherfile; otherfile.openFile(filename, H5F_ACC_RDONLY); H5::Group otherroot = otherfile.openGroup("/"); // check that grid properties match int othersx, othersy, otherN; double otherdx; otherroot.openAttribute("num_states").read(H5::PredType::NATIVE_INT, &otherN); otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersx); otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersy); otherroot.openAttribute("grid_delta").read(H5::PredType::NATIVE_DOUBLE, &otherdx); if (static_cast<int>(N) != otherN) throw GeneralError("Cannot copy state data from datafile: value for num_states does not match."); if (static_cast<int>(datalayout.sizex) != othersx) throw GeneralError("Cannot copy state data from datafile: value for grid_sizex does not match."); if (static_cast<int>(datalayout.sizey) != othersy) throw GeneralError("Cannot copy state data from datafile: value for grid_sizey does not match."); if (datalayout.dx != otherdx) throw GeneralError("Cannot copy state data from datafile: value for grid_delta does not match."); // copy data H5::DataSet other_states_data = otherfile.openDataSet("/states"); other_states_data.read(state_array->get_dataptr(), other_states_data.getArrayType()); }
void pyne::Material::write_hdf5(std::string filename, std::string datapath, std::string nucpath, float row, int chunksize) { // Turn off annoying HDF5 errors H5::Exception::dontPrint(); // Create new/open datafile. H5::H5File db; if (pyne::file_exists(filename)) { bool isH5 = H5::H5File::isHdf5(filename); if (!isH5) throw h5wrap::FileNotHDF5(filename); db = H5::H5File(filename, H5F_ACC_RDWR); } else db = H5::H5File(filename, H5F_ACC_TRUNC); // // Read in nuclist if available, write it out if not // bool nucpath_exists = h5wrap::path_exists(&db, nucpath); std::vector<int> nuclides; int nuc_size; hsize_t nuc_dims[1]; if (nucpath_exists) { nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(&db, nucpath, H5::PredType::NATIVE_INT); nuc_size = nuclides.size(); nuc_dims[0] = nuc_size; } else { nuclides = std::vector<int>(); for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++) nuclides.push_back(i->first); nuc_size = nuclides.size(); // Create the data if it doesn't exist int nuc_data [nuc_size]; for (int n = 0; n != nuc_size; n++) nuc_data[n] = nuclides[n]; nuc_dims[0] = nuc_size; H5::DataSpace nuc_space(1, nuc_dims); H5::DataSet nuc_set = db.createDataSet(nucpath, H5::PredType::NATIVE_INT, nuc_space); nuc_set.write(nuc_data, H5::PredType::NATIVE_INT); db.flush(H5F_SCOPE_GLOBAL); }; // // Write out to the file // H5::DataSet data_set; H5::DataSpace data_space, data_hyperslab; int data_rank = 1; hsize_t data_dims[1] = {1}; hsize_t data_max_dims[1] = {H5S_UNLIMITED}; hsize_t data_offset[1] = {0}; size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*(nuc_size); H5::CompType data_desc(material_struct_size); H5::ArrayType comp_values_array_type (H5::PredType::NATIVE_DOUBLE, 1, nuc_dims); // make the data table type data_desc.insertMember("name", HOFFSET(pyne::material_struct, name), H5::StrType(0, 20)); data_desc.insertMember("mass", HOFFSET(pyne::material_struct, mass), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type); // make the data array, have to over-allocate material_struct * mat_data = (material_struct *) malloc(material_struct_size); int name_len = name.length(); for (int i=0; i < 20; i++) { if (i < name_len) (*mat_data).name[i] = name[i]; else (*mat_data).name[i] = NULL; }; (*mat_data).mass = mass; (*mat_data).atoms_per_mol = atoms_per_mol; for (int n = 0; n != nuc_size; n++) { if (0 < comp.count(nuclides[n])) (*mat_data).comp[n] = comp[nuclides[n]]; else (*mat_data).comp[n] = 0.0; }; // get / make the data set bool datapath_exists = h5wrap::path_exists(&db, datapath); if (datapath_exists) { data_set = db.openDataSet(datapath); data_space = data_set.getSpace(); data_rank = data_space.getSimpleExtentDims(data_dims, data_max_dims); // Determine the row size. int row_num = (int) row; if (std::signbit(row)) row_num = data_dims[0] + row; // careful, row is negative if (data_dims[0] <= row_num) { // row == -0, extend to data set so that we can append, or // row_num is larger than current dimension, resize to accomodate. data_dims[0] = row_num + 1; data_set.extend(data_dims); } else if (data_dims[0] < 0) throw h5wrap::HDF5BoundsError(); data_offset[0] = row_num; } else { // Get full space data_space = H5::DataSpace(1, data_dims, data_max_dims); // Make data set properties to enable chunking H5::DSetCreatPropList data_set_params; hsize_t chunk_dims[1] ={chunksize}; data_set_params.setChunk(1, chunk_dims); material_struct * data_fill_value = (material_struct *) malloc(material_struct_size); for (int i=0; i < 20; i++) (*data_fill_value).name[i] = NULL; (*data_fill_value).mass = -1.0; (*data_fill_value).atoms_per_mol = -1.0; for (int n = 0; n != nuc_size; n++) (*data_fill_value).comp[n] = 0.0; data_set_params.setFillValue(data_desc, &data_fill_value); // Create the data set data_set = db.createDataSet(datapath, data_desc, data_space, data_set_params); data_set.extend(data_dims); // Add attribute pointing to nuc path H5::StrType nuc_attr_type(0, nucpath.length()); H5::DataSpace nuc_attr_space(H5S_SCALAR); H5::Attribute nuc_attr = data_set.createAttribute("nucpath", nuc_attr_type, nuc_attr_space); nuc_attr.write(nuc_attr_type, nucpath); // Remember to de-allocate free(data_fill_value); }; // Get the data hyperslab data_hyperslab = data_set.getSpace(); hsize_t data_count[1] = {1}; data_hyperslab.selectHyperslab(H5S_SELECT_SET, data_count, data_offset); // Get a memory space for writing H5::DataSpace mem_space (1, data_count, data_max_dims); // Write the row... data_set.write(mat_data, data_desc, mem_space, data_hyperslab); // Close out the HDF5 file db.close(); // Remember the milk! // ...by which I mean to deallocate free(mat_data); };