int Channel::read_state_from_hdf5(H5::H5File & H5StateFile, const string & rootStr){ clear_data(); // read waveform data waveform_ = h5array2vector<float>(&H5StateFile, rootStr + "/waveformLib", H5::PredType::NATIVE_INT16); // load state information H5::Group tmpGroup = H5StateFile.openGroup(rootStr); offset_ = h5element2element<float>("offset",&tmpGroup, H5::PredType::NATIVE_FLOAT); scale_ = h5element2element<float>("scale",&tmpGroup, H5::PredType::NATIVE_FLOAT); enabled_ = h5element2element<bool>("enabled",&tmpGroup, H5::PredType::NATIVE_UINT); trigDelay_ = h5element2element<int>("trigDelay",&tmpGroup, H5::PredType::NATIVE_INT); //Load the linklist data //First figure our how many banks there are from the attribute tmpGroup = H5StateFile.openGroup(rootStr + "/linkListData"); USHORT numBanks; numBanks = h5element2element<USHORT>("numBanks",&tmpGroup, H5::PredType::NATIVE_UINT16); tmpGroup.close(); std::ostringstream tmpStream; //Now loop over the number of banks found and add the bank for (USHORT bankct=0; bankct<numBanks; bankct++){ LLBank bank; tmpStream.str(rootStr); tmpStream << "/linkListData/bank" << bankct+1; FILE_LOG(logDEBUG) << "Reading State Bank: " << bankct+1 << " from hdf5"; bank.read_state_from_hdf5( H5StateFile, tmpStream.str()); // banks_.push_back(bank); } return 0; }
arma::Mat<uint16_t> readLUT(const std::string& path) { H5::H5File file (path.c_str(), H5F_ACC_RDONLY); H5::DataSet ds = file.openDataSet("LUT"); H5::DataSpace filespace = ds.getSpace(); int ndims = filespace.getSimpleExtentNdims(); assert(ndims == 2); hsize_t dims[2] = {1, 1}; filespace.getSimpleExtentDims(dims); H5::DataSpace memspace (ndims, dims); arma::Mat<uint16_t> res (dims[0], dims[1]); ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace); filespace.close(); memspace.close(); ds.close(); file.close(); // NOTE: Armadillo stores data in column-major order, while HDF5 uses // row-major ordering. Above, we read the data directly from HDF5 into // the arma matrix, so it was implicitly transposed. The next function // fixes this problem. arma::inplace_trans(res); return res; }
void get_input_pixels(std::string fname, std::vector<std::string> &pix_name) { H5::H5File *file = H5Utils::openFile(fname, H5Utils::READ); file->iterateElems("/photometry/", NULL, fetch_pixel_name, reinterpret_cast<void*>(&pix_name)); delete file; }
H5::DataSet CompartmentReportHDF5::_openDataset( const H5::H5File& file, const uint32_t cellID ) { std::stringstream cellName; cellName << "a" << cellID; const std::string datasetName = "/" + cellName.str() + "/" + _reportName + "/" + dataDatasetName; H5::DataSet dataset; H5E_BEGIN_TRY dataset = file.openDataSet( datasetName ); H5E_END_TRY if( !dataset.getId() ) { LBTHROW( std::runtime_error( "ReportReaderHDF5: " "Dataset " + datasetName + " not found " "in file: " + file.getFileName( ))); } if( dataset.getSpace().getSimpleExtentNdims() != 2 ) { LBTHROW( std::runtime_error("Compartment_Report_HDF5_File_Reader: " "Error, not 2 dimensional array on " + datasetName)); } return dataset; }
void Attribute::write(H5::H5File f, const cpath & dataset_root) { //FIXME we should have a path? cpath fullpath = dataset_root / name; cpath grouppath = fullpath.parent_path(); if (_link.size()) { if (!h5_obj_exists(f, grouppath)) h5_create_path_groups(f, grouppath.c_str()); H5::Group g = f.openGroup(grouppath.generic_string().c_str()); if (h5_obj_exists(f, fullpath)) g.unlink(name.filename().generic_string().c_str()); g.link(H5G_LINK_SOFT, (dataset_root/_link).generic_string().c_str(), name.filename().generic_string().c_str()); } else if (_m.total() == 0) { //FIXME remove this (legacy) case hsize_t *dim = new hsize_t[size.size()+1]; for(uint i=0;i<size.size();i++) dim[i] = size[i]; H5::DataSpace space(size.size(), dim); H5::Attribute attr; H5::Group g; delete[] dim; if (!h5_obj_exists(f, grouppath)) h5_create_path_groups(f, grouppath); g = f.openGroup(grouppath.generic_string().c_str()); uint min, max; H5Pget_attr_phase_change(H5Gget_create_plist(g.getId()), &max, &min); if (min || max) printf("WARNING: could not set dense storage on group, may not be able to write large attributes\n"); //FIXME relative to what? if (H5Aexists(g.getId(), name.filename().generic_string().c_str())) g.removeAttr(name.filename().generic_string().c_str()); attr = g.createAttribute(name.filename().generic_string().c_str(), toH5DataType(type), space); attr.write(toH5NativeDataType(type), data); } else Mat_H5AttrWrite(_m, f, fullpath); }
void Attributes::write(H5::H5File f, const cpath & dataset_root) { for(uint i=0;i<attrs.size();i++) attrs[i].write(f, dataset_root); f.flush(H5F_SCOPE_GLOBAL); }
void Bundle2::storeParameters(H5::H5File& file) const { H5::Group root = file.openGroup("/"); H5::DataSpace scalar; H5::Attribute attr = root.createAttribute("version", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, &version_); attr.close(); unsigned char r2 = parameters_.reduce2?1:0; attr = root.createAttribute("reduce2", H5::PredType::STD_U8LE, scalar); attr.write(H5::PredType::NATIVE_UCHAR, &r2); attr.close(); attr = root.createAttribute("xROI", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, ¶meters_.xROI); attr.close(); attr = root.createAttribute("yROI", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, ¶meters_.yROI); attr.close(); scalar.close(); root.close(); }
void Attributes::open(H5::H5File &f, const cpath &path) { attrs.resize(0); H5::Group group = f.openGroup(path.generic_string().c_str()); attributes_append_group(*this, group, path, path); }
bool hdfutil::HasDataSet (const H5::H5File & h5file, const std::string & name) { hid_t loc_id = h5file.getLocId(); #if H5_VERS_MINOR >= 8 hid_t dataset_id = H5Dopen1( loc_id, name.c_str()); #else hid_t dataset_id = H5Dopen( loc_id, name.c_str()); #endif if(dataset_id < 0) return false; H5Dclose(dataset_id); return true; }
void pyne::Material::from_hdf5(std::string filename, std::string datapath, int row, int protocol) { // Turn off annoying HDF5 errors H5::Exception::dontPrint(); // Check that the file is there if (!pyne::file_exists(filename)) throw pyne::FileNotFound(filename); // Check to see if the file is in HDF5 format. bool isH5 = H5::H5File::isHdf5(filename); if (!isH5) throw h5wrap::FileNotHDF5(filename); // Open the database H5::H5File db (filename, H5F_ACC_RDONLY); bool datapath_exists = h5wrap::path_exists(&db, datapath); if (!datapath_exists) throw h5wrap::PathNotFound(filename, datapath); // Clear current content comp.clear(); // Load via various protocols if (protocol == 0) _load_comp_protocol0(&db, datapath, row); else if (protocol == 1) _load_comp_protocol1(&db, datapath, row); else throw pyne::MaterialProtocolError(); // Close the database db.close(); // Renomalize the composition, just to be safe. norm_comp(); };
bool Bundle2::checkGeometry_(H5::H5File& file) const { bool found = false; H5::Group root = file.openGroup("/"); const hsize_t maxObjs = root.getNumObjs(); for(hsize_t obj = 0; obj < maxObjs; ++obj) { string objName = root.getObjnameByIdx(obj); if(objName == string("Geometry")) found = true; } root.close(); return found; }
std::vector<double> readEloss(const std::string& path) { H5::H5File file (path.c_str(), H5F_ACC_RDONLY); H5::DataSet ds = file.openDataSet("eloss"); H5::DataSpace filespace = ds.getSpace(); int ndims = filespace.getSimpleExtentNdims(); assert(ndims == 1); hsize_t dim; filespace.getSimpleExtentDims(&dim); H5::DataSpace memspace (ndims, &dim); std::vector<double> res (dim); ds.read(res.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace); filespace.close(); memspace.close(); ds.close(); file.close(); return res; }
void StateSet::init_from_datafile(std::string filename) { // open other file read-only H5::H5File otherfile; otherfile.openFile(filename, H5F_ACC_RDONLY); H5::Group otherroot = otherfile.openGroup("/"); // check that grid properties match int othersx, othersy, otherN; double otherdx; otherroot.openAttribute("num_states").read(H5::PredType::NATIVE_INT, &otherN); otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersx); otherroot.openAttribute("grid_sizex").read(H5::PredType::NATIVE_INT, &othersy); otherroot.openAttribute("grid_delta").read(H5::PredType::NATIVE_DOUBLE, &otherdx); if (static_cast<int>(N) != otherN) throw GeneralError("Cannot copy state data from datafile: value for num_states does not match."); if (static_cast<int>(datalayout.sizex) != othersx) throw GeneralError("Cannot copy state data from datafile: value for grid_sizex does not match."); if (static_cast<int>(datalayout.sizey) != othersy) throw GeneralError("Cannot copy state data from datafile: value for grid_sizey does not match."); if (datalayout.dx != otherdx) throw GeneralError("Cannot copy state data from datafile: value for grid_delta does not match."); // copy data H5::DataSet other_states_data = otherfile.openDataSet("/states"); other_states_data.read(state_array->get_dataptr(), other_states_data.getArrayType()); }
bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data) { H5::DataSet dataset = file.openDataSet(name); H5::DataSpace dataspace = dataset.getSpace(); hsize_t dims_out[1]; int rank = dataspace.getSimpleExtentDims( dims_out, NULL); int _type; bool read = getNodeType(dataset, _type); read &= (_type == StorageNode::SEQ); read &= (rank == 1); if (!read) return read; data.resize(dims_out[0]); dataset.read(data.data(), dataset.getDataType()); return true; }
void Bundle2::loadParameters(H5::H5File& file) { H5::Group root = file.openGroup("/"); // Checking version unsigned int fileVersion; H5::Attribute attr = root.openAttribute("version"); attr.read(H5::PredType::NATIVE_UINT, &fileVersion); attr.close(); if(fileVersion != version_) throw std::runtime_error("Incompatible bundle version!"); // Reading number of cameras hsize_t count; H5::Group frame0Group = root.openGroup("POI/Frame 0000"); attr = frame0Group.openAttribute("count"); attr.read(H5::PredType::NATIVE_HSIZE, &count); attr.close(); frame0Group.close(); numCameras_ = count; // Reading parameters unsigned char r2; attr = root.openAttribute("reduce2"); attr.read(H5::PredType::NATIVE_UCHAR, &r2); attr.close(); parameters_.reduce2 = (r2 == 1); attr = root.openAttribute("xROI"); attr.read(H5::PredType::NATIVE_UINT, ¶meters_.xROI); attr.close(); attr = root.openAttribute("yROI"); attr.read(H5::PredType::NATIVE_UINT, ¶meters_.yROI); attr.close(); root.close(); }
int Channel::write_state_to_hdf5(H5::H5File & H5StateFile, const string & rootStr){ // write waveform data FILE_LOG(logDEBUG) << "Writing Waveform: " << rootStr + "/waveformLib"; vector2h5array<float>(waveform_, &H5StateFile, rootStr + "/waveformLib", rootStr + "/waveformLib", H5::PredType::NATIVE_FLOAT); // add channel state information to root group H5::Group tmpGroup = H5StateFile.openGroup(rootStr); element2h5attribute<float>("offset", offset_, &tmpGroup, H5::PredType::NATIVE_FLOAT); element2h5attribute<float>("scale", scale_, &tmpGroup, H5::PredType::NATIVE_FLOAT); element2h5attribute<bool>("enabled", enabled_, &tmpGroup, H5::PredType::NATIVE_UINT); element2h5attribute<int>("trigDelay", trigDelay_, &tmpGroup, H5::PredType::NATIVE_INT); tmpGroup.close(); //Save the linklist data // save number of banks to rootStr + /linkListData attribute "numBanks" // USHORT numBanks; // numBanks = banks_.size();//get number of banks from channel // // // set attribute // FILE_LOG(logDEBUG) << "Creating Group: " << rootStr + "/linkListData"; // tmpGroup = H5StateFile.createGroup(rootStr + "/linkListData"); // element2h5attribute<USHORT>("numBanks", numBanks, &tmpGroup,H5::PredType::NATIVE_UINT16); // tmpGroup.close(); // // std::ostringstream tmpStream; // //Now loop over the number of banks found and add the bank // for (USHORT bankct=0; bankct<numBanks; bankct++) { // tmpStream.str(""); // tmpStream << rootStr << "/linkListData/bank" << bankct+1 ; // FILE_LOG(logDEBUG) << "Writing State Bank: " << bankct+1 << " from hdf5"; // banks_[bankct].write_state_to_hdf5(H5StateFile, tmpStream.str() ); // } return 0; }
void pyne::Material::write_hdf5(std::string filename, std::string datapath, std::string nucpath, float row, int chunksize) { // Turn off annoying HDF5 errors H5::Exception::dontPrint(); // Create new/open datafile. H5::H5File db; if (pyne::file_exists(filename)) { bool isH5 = H5::H5File::isHdf5(filename); if (!isH5) throw h5wrap::FileNotHDF5(filename); db = H5::H5File(filename, H5F_ACC_RDWR); } else db = H5::H5File(filename, H5F_ACC_TRUNC); // // Read in nuclist if available, write it out if not // bool nucpath_exists = h5wrap::path_exists(&db, nucpath); std::vector<int> nuclides; int nuc_size; hsize_t nuc_dims[1]; if (nucpath_exists) { nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(&db, nucpath, H5::PredType::NATIVE_INT); nuc_size = nuclides.size(); nuc_dims[0] = nuc_size; } else { nuclides = std::vector<int>(); for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++) nuclides.push_back(i->first); nuc_size = nuclides.size(); // Create the data if it doesn't exist int nuc_data [nuc_size]; for (int n = 0; n != nuc_size; n++) nuc_data[n] = nuclides[n]; nuc_dims[0] = nuc_size; H5::DataSpace nuc_space(1, nuc_dims); H5::DataSet nuc_set = db.createDataSet(nucpath, H5::PredType::NATIVE_INT, nuc_space); nuc_set.write(nuc_data, H5::PredType::NATIVE_INT); db.flush(H5F_SCOPE_GLOBAL); }; // // Write out to the file // H5::DataSet data_set; H5::DataSpace data_space, data_hyperslab; int data_rank = 1; hsize_t data_dims[1] = {1}; hsize_t data_max_dims[1] = {H5S_UNLIMITED}; hsize_t data_offset[1] = {0}; size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*(nuc_size); H5::CompType data_desc(material_struct_size); H5::ArrayType comp_values_array_type (H5::PredType::NATIVE_DOUBLE, 1, nuc_dims); // make the data table type data_desc.insertMember("name", HOFFSET(pyne::material_struct, name), H5::StrType(0, 20)); data_desc.insertMember("mass", HOFFSET(pyne::material_struct, mass), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE); data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type); // make the data array, have to over-allocate material_struct * mat_data = (material_struct *) malloc(material_struct_size); int name_len = name.length(); for (int i=0; i < 20; i++) { if (i < name_len) (*mat_data).name[i] = name[i]; else (*mat_data).name[i] = NULL; }; (*mat_data).mass = mass; (*mat_data).atoms_per_mol = atoms_per_mol; for (int n = 0; n != nuc_size; n++) { if (0 < comp.count(nuclides[n])) (*mat_data).comp[n] = comp[nuclides[n]]; else (*mat_data).comp[n] = 0.0; }; // get / make the data set bool datapath_exists = h5wrap::path_exists(&db, datapath); if (datapath_exists) { data_set = db.openDataSet(datapath); data_space = data_set.getSpace(); data_rank = data_space.getSimpleExtentDims(data_dims, data_max_dims); // Determine the row size. int row_num = (int) row; if (std::signbit(row)) row_num = data_dims[0] + row; // careful, row is negative if (data_dims[0] <= row_num) { // row == -0, extend to data set so that we can append, or // row_num is larger than current dimension, resize to accomodate. data_dims[0] = row_num + 1; data_set.extend(data_dims); } else if (data_dims[0] < 0) throw h5wrap::HDF5BoundsError(); data_offset[0] = row_num; } else { // Get full space data_space = H5::DataSpace(1, data_dims, data_max_dims); // Make data set properties to enable chunking H5::DSetCreatPropList data_set_params; hsize_t chunk_dims[1] ={chunksize}; data_set_params.setChunk(1, chunk_dims); material_struct * data_fill_value = (material_struct *) malloc(material_struct_size); for (int i=0; i < 20; i++) (*data_fill_value).name[i] = NULL; (*data_fill_value).mass = -1.0; (*data_fill_value).atoms_per_mol = -1.0; for (int n = 0; n != nuc_size; n++) (*data_fill_value).comp[n] = 0.0; data_set_params.setFillValue(data_desc, &data_fill_value); // Create the data set data_set = db.createDataSet(datapath, data_desc, data_space, data_set_params); data_set.extend(data_dims); // Add attribute pointing to nuc path H5::StrType nuc_attr_type(0, nucpath.length()); H5::DataSpace nuc_attr_space(H5S_SCALAR); H5::Attribute nuc_attr = data_set.createAttribute("nucpath", nuc_attr_type, nuc_attr_space); nuc_attr.write(nuc_attr_type, nucpath); // Remember to de-allocate free(data_fill_value); }; // Get the data hyperslab data_hyperslab = data_set.getSpace(); hsize_t data_count[1] = {1}; data_hyperslab.selectHyperslab(H5S_SELECT_SET, data_count, data_offset); // Get a memory space for writing H5::DataSpace mem_space (1, data_count, data_max_dims); // Write the row... data_set.write(mat_data, data_desc, mem_space, data_hyperslab); // Close out the HDF5 file db.close(); // Remember the milk! // ...by which I mean to deallocate free(mat_data); };
std::string read_string_attr(H5::H5File &f, const char *parent_group_str, const char *name) { H5::Group group = f.openGroup(parent_group_str); return read_string_attr(f, group, name); }
void Bundle2::loadGeometry_(H5::H5File& file) { H5::Group geometryGroup = file.openGroup("/Geometry"); // Loading poses H5::DataSet posesDataSet = geometryGroup.openDataSet("Poses"); double* posesData = (double*)malloc(frames_.size()*12*sizeof(double)); posesDataSet.read((void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL); posesDataSet.close(); size_t i = 0; for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); ++it) { Pose* pose = new Pose; pose->sett(core::RealPoint3D<double>(posesData[i*12], posesData[i*12 + 1], posesData[i*12 + 2])); core::Matrix<double> R(3, 3); R[0][0] = posesData[i*12 + 3]; R[1][0] = posesData[i*12 + 4]; R[2][0] = posesData[i*12 + 5]; R[0][1] = posesData[i*12 + 6]; R[1][1] = posesData[i*12 + 7]; R[2][1] = posesData[i*12 + 8]; R[0][2] = posesData[i*12 + 9]; R[1][2] = posesData[i*12 + 10]; R[2][2] = posesData[i*12 + 11]; pose->setR(R); pose->calcEulerAngles(); pose->setorientationSynchronWithAngles(true); pose->setderivationsSynchronWithAngles(false); (*it)->setpose(pose); ++i; } free((void*)posesData); // Loading points H5::DataSet pointsDataSet = geometryGroup.openDataSet("Points"); double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double)); pointsDataSet.read((void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL); pointsDataSet.close(); i = 0; for(deque<Track*>::iterator it = tracks_.begin(); it != tracks_.end(); it++) { Point* point = new Point(core::RealPoint3D<double>(pointsData[i*3], pointsData[i*3 + 1], pointsData[i*3 + 2])); (*it)->setpoint(point); ++i; } free((void*)pointsData); // Loading inlier information H5::DataSet inliersDataSet = geometryGroup.openDataSet("Inliers"); hvl_t* inliersData = (hvl_t*)malloc(frames_.size()*sizeof(hvl_t)); H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR); inliersDataSet.read((void*)inliersData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL); memType.close(); inliersDataSet.close(); i = 0; for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); it++) { unsigned char* inl = (unsigned char*)(inliersData[i].p); size_t k = 0; for(size_t j = 0; j < (*it)->size(); ++j) { View& v = (**it)[j]; for(unsigned int cam = 0; cam < v.numCameras(); ++cam) { if(v.inCamera(cam)) { Ray ray; if(inl[k]) ray.setinlier(true); else ray.setinlier(false); v.addRay(cam, ray); ++k; } } } ++i; } for(size_t j = 0; j < frames_.size(); ++j) free(inliersData[j].p); free((void*)inliersData); // Loading curves if they exists bool curvesFound = false; const hsize_t maxObjs = geometryGroup.getNumObjs(); for(hsize_t obj = 0; obj < maxObjs; ++obj) { string objName = geometryGroup.getObjnameByIdx(obj); if(objName == string("Curves")) curvesFound = true; } if(curvesFound) { H5::DataSet curvesDataSet = geometryGroup.openDataSet("Curves"); hsize_t curvesDim[1]; H5::DataSpace curvesDS = curvesDataSet.getSpace(); curvesDS.getSimpleExtentDims(curvesDim); curvesDS.close(); hvl_t* curvesData = (hvl_t*)malloc(curvesDim[0]*sizeof(hvl_t)); H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE); curvesDataSet.read((void*)curvesData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL); memType.close(); curvesDataSet.close(); for(size_t c = 0; c < curvesDim[0]; ++c) { const size_t cur_c = addCurve(); for(size_t p = 0; p < curvesData[c].len; ++p) { curves_[cur_c].addPoint(((size_t*)(curvesData[c].p))[p]); } } for(size_t i = 0; i < curvesDim[0]; ++i) free(curvesData[i].p); free((void*)curvesData); } geometryGroup.close(); }
/// Takes the "/" group at the top of the file. group (H5::H5File f) : _g(f.openGroup("/")) {} // can not fail, right ?
group(hid_t id_, bool is_group) { if (is_group) { _g.setId(id_); } else { H5::H5File f; f.setId(id_); *this = group(f); } }
Bundle2::Bundle2(const boost::filesystem::path& fileName, bool loadGeometry): version_(BUNDLE_VERSION), poiFirstFrame_(0) { // Opening file H5::H5File bundleFile; bundleFile.openFile(fileName.string(), H5F_ACC_RDONLY); loadParameters(bundleFile); // Loading POI H5::Group poiGroup = bundleFile.openGroup("/POI"); hsize_t count; H5::Attribute attr = poiGroup.openAttribute("count"); attr.read(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(size_t frame = 0; frame < count; ++frame) { cout.flush(); const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % frame); H5::Group frameGroup = poiGroup.openGroup(frameGroupName); addPOIFrame(); for(size_t camera = 0; camera < numCameras_; ++camera) poi_[poi_.size() - 1][camera].load(frameGroup, camera); frameGroup.close(); } poiGroup.close(); // Loading frames H5::Group bundleGroup = bundleFile.openGroup("/Bundle"); H5::Group framesGroup = bundleGroup.openGroup("Frames"); attr = framesGroup.openAttribute("count"); attr.read(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(size_t frame = 0; frame < count; ++frame) { Frame* f = new Frame(framesGroup, frame, numCameras_); frames_.push_back(f); } framesGroup.close(); // Loading tracks H5::DataSet tracksDataset = bundleGroup.openDataSet("Tracks"); hsize_t tracksDim[2]; H5::DataSpace tracksDS = tracksDataset.getSpace(); tracksDS.getSimpleExtentDims(tracksDim); tracksDS.close(); for(size_t i = 0; i < tracksDim[0]; ++i) { size_t j = addTrack(); tracks_[j]->load(tracksDataset, frames_, i); } tracksDataset.close(); bundleGroup.close(); if(loadGeometry && checkGeometry_(bundleFile)) loadGeometry_(bundleFile); bundleFile.close(); }
bool TStellarData::save(const std::string& fname, const std::string& group, const std::string &dset, int compression) { if((compression < 0) || (compression > 9)) { std::cerr << "! Invalid gzip compression level: " << compression << std::endl; return false; } hsize_t nstars = star.size(); if(nstars == 0) { std::cerr << "! No stars to write." << std::endl; return false; } H5::Exception::dontPrint(); H5::H5File *file = H5Utils::openFile(fname); if(file == NULL) { return false; } H5::Group *gp = H5Utils::openGroup(file, group); if(gp == NULL) { delete file; return false; } /* * Photometry */ // Datatype hsize_t nbands = NBANDS; H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands); H5::ArrayType u4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands); H5::CompType dtype(sizeof(TFileData)); dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64); dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE); dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE); dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr); dtype.insertMember("err", HOFFSET(TFileData, err), f4arr); dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr); dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr); dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT); // Dataspace hsize_t dim = nstars; H5::DataSpace dspace(1, &dim); // Property List H5::DSetCreatPropList plist; plist.setChunk(1, &nstars); plist.setDeflate(compression); // Dataset H5::DataSet dataset = gp->createDataSet(dset, dtype, dspace, plist); // Write dataset TFileData* data = new TFileData[nstars]; for(size_t i=0; i<nstars; i++) { data[i].obj_id = star[i].obj_id; data[i].l = star[i].l; data[i].b = star[i].b; for(size_t k=0; k<NBANDS; k++) { data[i].mag[k] = star[i].m[k]; data[i].err[k] = star[i].err[k]; data[i].maglimit[k] = star[i].maglimit[k]; } data[i].EBV = star[i].EBV; } dataset.write(data, dtype); /* * Attributes */ dim = 1; H5::DataSpace att_dspace(1, &dim); H5::PredType att_dtype = H5::PredType::NATIVE_UINT64; H5::Attribute att_healpix_index = dataset.createAttribute("healpix_index", att_dtype, att_dspace); att_healpix_index.write(att_dtype, &healpix_index); att_dtype = H5::PredType::NATIVE_UINT32; H5::Attribute att_nside = dataset.createAttribute("nside", att_dtype, att_dspace); att_nside.write(att_dtype, &nside); att_dtype = H5::PredType::NATIVE_UCHAR; H5::Attribute att_nested = dataset.createAttribute("nested", att_dtype, att_dspace); att_nested.write(att_dtype, &nested); att_dtype = H5::PredType::NATIVE_DOUBLE; H5::Attribute att_l = dataset.createAttribute("l", att_dtype, att_dspace); att_l.write(att_dtype, &l); att_dtype = H5::PredType::NATIVE_DOUBLE; H5::Attribute att_b = dataset.createAttribute("b", att_dtype, att_dspace); att_b.write(att_dtype, &b); att_dtype = H5::PredType::NATIVE_DOUBLE; H5::Attribute att_EBV = dataset.createAttribute("EBV", att_dtype, att_dspace); att_EBV.write(att_dtype, &EBV); file->close(); delete[] data; delete gp; delete file; return true; }
bool save_mat_image(cv::Mat& img, TRect& rect, std::string fname, std::string group_name, std::string dset_name, std::string dim1, std::string dim2, int compression) { assert((img.dims == 2) && (img.rows == rect.N_bins[0]) && (img.cols == rect.N_bins[1])); if((compression<0) || (compression > 9)) { std::cerr << "! Invalid gzip compression level: " << compression << std::endl; return false; } H5::Exception::dontPrint(); H5::H5File *file = H5Utils::openFile(fname); if(file == NULL) { return false; } H5::Group *group = H5Utils::openGroup(file, group_name); if(group == NULL) { delete file; return false; } /* * Image Data */ // Creation property list H5::DSetCreatPropList plist; int rank = 2; hsize_t dim[2] = {rect.N_bins[0], rect.N_bins[1]}; plist.setDeflate(compression); // gzip compression level float fillvalue = 0; plist.setFillValue(H5::PredType::NATIVE_FLOAT, &fillvalue); plist.setChunk(rank, &(dim[0])); H5::DataSpace dspace(rank, &(dim[0])); H5::DataSet* dataset; try { dataset = new H5::DataSet(group->createDataSet(dset_name, H5::PredType::NATIVE_FLOAT, dspace, plist)); } catch(H5::FileIException create_dset_err) { std::cerr << "Unable to create dataset '" << dset_name << "'." << std::endl; delete group; delete file; return false; } float *buf = new float[rect.N_bins[0]*rect.N_bins[1]]; for(size_t j=0; j<rect.N_bins[0]; j++) { for(size_t k=0; k<rect.N_bins[1]; k++) { buf[rect.N_bins[1]*j + k] = img.at<double>(j,k); /*float tmp = img.at<double>(j,k); if(tmp > 0.) { std::cerr << j << ", " << k << " --> " << j + rect.N_bins[0]*k << " --> " << tmp << std::endl; }*/ } } dataset->write(buf, H5::PredType::NATIVE_FLOAT); /* * Attributes */ hsize_t att_dim = 2; H5::DataSpace att_dspace(1, &att_dim); H5::PredType att_dtype = H5::PredType::NATIVE_UINT32; H5::Attribute att_N = dataset->createAttribute("N_pix", att_dtype, att_dspace); att_N.write(att_dtype, &(rect.N_bins)); att_dtype = H5::PredType::NATIVE_DOUBLE; H5::Attribute att_min = dataset->createAttribute("min", att_dtype, att_dspace); att_min.write(att_dtype, &(rect.min)); att_dtype = H5::PredType::NATIVE_DOUBLE; H5::Attribute att_max = dataset->createAttribute("max", att_dtype, att_dspace); att_max.write(att_dtype, &(rect.max)); att_dim = 1; H5::StrType vls_type(0, H5T_VARIABLE); H5::DataSpace att_space_str(H5S_SCALAR); H5::Attribute att_name_1 = dataset->createAttribute("dim_name_1", vls_type, att_space_str); att_name_1.write(vls_type, dim1); H5::Attribute att_name_2 = dataset->createAttribute("dim_name_2", vls_type, att_space_str); att_name_2.write(vls_type, dim2); file->close(); delete[] buf; delete dataset; delete group; delete file; return true; }
void Bundle2::saveGeometry(const boost::filesystem::path& fileName) const { H5::H5File bundleFile; bundleFile.openFile(fileName.string(), H5F_ACC_RDWR); H5::Group rootGroup = bundleFile.openGroup("/"); // If the group "Geometry" exists, delete it! if(checkGeometry_(bundleFile)) { rootGroup.unlink("Geometry"); } // Creating group Geometry H5::Group geometryGroup = rootGroup.createGroup("Geometry"); // Saving poses const hsize_t posesChunkDim[] = { 3, 12 }; H5::DSetCreatPropList posesPropList; posesPropList.setLayout(H5D_CHUNKED); posesPropList.setChunk(2, posesChunkDim); posesPropList.setDeflate(9); const hsize_t posesMaxDim[] = { H5S_UNLIMITED, 12 }; const hsize_t posesCurDim[] = { frames_.size(), 12 }; H5::DataSpace posesDS(2, posesCurDim, posesMaxDim); H5::DataSet posesDataSet = geometryGroup.createDataSet("Poses", H5::PredType::IEEE_F64LE, posesDS, posesPropList); double* posesData = (double*)malloc(frames_.size()*12*sizeof(double)); size_t i = 0; for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) { posesData[i*12] = (*it)->pose()->t().x(); posesData[i*12 + 1] = (*it)->pose()->t().y(); posesData[i*12 + 2] = (*it)->pose()->t().z(); core::Matrix<double> R = (*it)->pose()->R(); posesData[i*12 + 3] = R[0][0]; posesData[i*12 + 4] = R[1][0]; posesData[i*12 + 5] = R[2][0]; posesData[i*12 + 6] = R[0][1]; posesData[i*12 + 7] = R[1][1]; posesData[i*12 + 8] = R[2][1]; posesData[i*12 + 9] = R[0][2]; posesData[i*12 + 10] = R[1][2]; posesData[i*12 + 11] = R[2][2]; ++i; } posesDataSet.write((const void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL); free((void*)posesData); posesDataSet.close(); posesDS.close(); // Saving points const hsize_t pointsChunkDim[] = {10, 3}; H5::DSetCreatPropList pointsPropList; pointsPropList.setLayout(H5D_CHUNKED); pointsPropList.setChunk(2, pointsChunkDim); pointsPropList.setDeflate(9); const hsize_t pointsMaxDim[] = { H5S_UNLIMITED, 3 }; const hsize_t pointsCurDim[] = { tracks_.size(), 3 }; H5::DataSpace pointsDS(2, pointsCurDim, pointsMaxDim); H5::DataSet pointsDataSet = geometryGroup.createDataSet("Points", H5::PredType::IEEE_F64LE, pointsDS, pointsPropList); double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double)); i = 0; for(deque<Track*>::const_iterator it = tracks_.begin(); it != tracks_.end(); it++) { pointsData[i*3] = (*it)->point()->coords().x(); pointsData[i*3 + 1] = (*it)->point()->coords().y(); pointsData[i*3 + 2] = (*it)->point()->coords().z(); ++i; } pointsDataSet.write((const void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL); free((void*)pointsData); pointsDataSet.close(); pointsDS.close(); // Saving inlier information const hsize_t inliersChunkDim[] = { 3 }; H5::DSetCreatPropList inliersPropList; inliersPropList.setLayout(H5D_CHUNKED); inliersPropList.setChunk(1, inliersChunkDim); inliersPropList.setDeflate(9); const hsize_t inliersMaxDim[] = { H5S_UNLIMITED }; const hsize_t inliersCurDim[] = { frames_.size() }; H5::DataSpace inliersDS(1, inliersCurDim, inliersMaxDim); H5::VarLenType inliersType(&H5::PredType::STD_U8LE); H5::DataSet inliersDataSet = geometryGroup.createDataSet("Inliers", inliersType, inliersDS, inliersPropList); i = 0; for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) { hvl_t inliersLine; size_t inliersLineSize = 0; for(size_t j = 0; j < (*it)->size(); ++j) { View& v = (**it)[j]; for(unsigned int cam = 0; cam < v.numCameras(); ++cam) { if(v.inCamera(cam)) ++inliersLineSize; } } inliersLine.len = inliersLineSize; inliersLine.p = malloc(inliersLineSize*sizeof(unsigned char)); size_t k = 0; for(size_t j = 0; j < (*it)->size(); ++j) { View& v = (**it)[j]; for(unsigned int cam = 0; cam < v.numCameras(); ++cam) { if(v.inCamera(cam)) { ((unsigned char*)(inliersLine.p))[k] = v.ray(cam).inlier()?1:0; ++k; } } } const hsize_t dsOffset[] = { i }; const hsize_t dsCount[] = { 1 }; H5::DataSpace inliersCurDS = inliersDataSet.getSpace(); inliersCurDS.selectHyperslab(H5S_SELECT_SET, dsCount, dsOffset); const hsize_t memDim[] = { 1 }; H5::DataSpace memDS(1, memDim, memDim); H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR); inliersDataSet.write((const void*)&inliersLine, memType, memDS, inliersCurDS); memType.close(); memDS.close(); inliersCurDS.close(); free(inliersLine.p); ++i; } inliersDataSet.close(); inliersType.close(); inliersDS.close(); // Saving curves if(!curves_.empty()) { const hsize_t chunkDim[] = { 5 }; H5::DSetCreatPropList propList; propList.setLayout(H5D_CHUNKED); propList.setChunk(1, chunkDim); propList.setDeflate(9); H5::VarLenType curveDatasetType(&H5::PredType::STD_U64LE); hsize_t curvesDim[] = { curves_.size() }; hsize_t curvesMaxDim[] = { H5S_UNLIMITED }; H5::DataSpace curvesDataspace(1, curvesDim, curvesMaxDim); H5::DataSet curvesDataset = geometryGroup.createDataSet("Curves", curveDatasetType, curvesDataspace, propList); for(size_t i = 0; i < curves_.size(); ++i) { hvl_t curveLine; curveLine.len = curves_[i].size(); curveLine.p = malloc(curves_[i].size()*sizeof(size_t)); for(size_t j = 0; j < curves_[i].size(); ++j) ((size_t*)(curveLine.p))[j] = curves_[i].track(j); const hsize_t dsOffset[] = { i }; const hsize_t dsCount[] = { 1 }; H5::DataSpace curDS = curvesDataset.getSpace(); curDS.selectHyperslab(H5S_SELECT_SET, dsCount, dsOffset); const hsize_t memDim[] = { 1 }; H5::DataSpace memDS(1, memDim, memDim); H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE); curvesDataset.write((const void*)&curveLine, memType, memDS, curDS); memType.close(); memDS.close(); curDS.close(); free(curveLine.p); } curvesDataset.close(); curvesDataspace.close(); curveDatasetType.close(); propList.close(); } geometryGroup.close(); rootGroup.close(); bundleFile.close(); }