void HDF5IO::saveStdVector(const std::string& GroupName, const std::string& Name, const std::vector<std::complex<double> >& V) { try{ H5::CompType ComplexDataType = openCompType("complex"); hsize_t Dim[1] = {hsize_t(V.size())}; H5::DataSpace dataspace(1,Dim); H5::Group FG = getGroup( GroupName.c_str() ); try{ H5::Exception::dontPrint(); H5::DataSet dataset = FG.openDataSet(Name.c_str()); dataset.write(V.data(), ComplexDataType, dataspace); } catch( const H5::GroupIException not_found_error ){ H5::DataSet dataset = FG.createDataSet(Name.c_str(), ComplexDataType, dataspace); dataset.write(V.data(), ComplexDataType); } catch( const H5::FileIException error){ error.printError(); } catch( const H5::DataSetIException error){ error.printError(); } FG.close(); } catch( const H5::Exception err ){ err.printError(); RUNTIME_ERROR("HDF5IO::saveComplexStdVector. "); } }
void Bundle2::storeParameters(H5::H5File& file) const { H5::Group root = file.openGroup("/"); H5::DataSpace scalar; H5::Attribute attr = root.createAttribute("version", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, &version_); attr.close(); unsigned char r2 = parameters_.reduce2?1:0; attr = root.createAttribute("reduce2", H5::PredType::STD_U8LE, scalar); attr.write(H5::PredType::NATIVE_UCHAR, &r2); attr.close(); attr = root.createAttribute("xROI", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, ¶meters_.xROI); attr.close(); attr = root.createAttribute("yROI", H5::PredType::STD_U32LE, scalar); attr.write(H5::PredType::NATIVE_UINT, ¶meters_.yROI); attr.close(); scalar.close(); root.close(); }
int Channel::read_state_from_hdf5(H5::H5File & H5StateFile, const string & rootStr){ clear_data(); // read waveform data waveform_ = h5array2vector<float>(&H5StateFile, rootStr + "/waveformLib", H5::PredType::NATIVE_INT16); // load state information H5::Group tmpGroup = H5StateFile.openGroup(rootStr); offset_ = h5element2element<float>("offset",&tmpGroup, H5::PredType::NATIVE_FLOAT); scale_ = h5element2element<float>("scale",&tmpGroup, H5::PredType::NATIVE_FLOAT); enabled_ = h5element2element<bool>("enabled",&tmpGroup, H5::PredType::NATIVE_UINT); trigDelay_ = h5element2element<int>("trigDelay",&tmpGroup, H5::PredType::NATIVE_INT); //Load the linklist data //First figure our how many banks there are from the attribute tmpGroup = H5StateFile.openGroup(rootStr + "/linkListData"); USHORT numBanks; numBanks = h5element2element<USHORT>("numBanks",&tmpGroup, H5::PredType::NATIVE_UINT16); tmpGroup.close(); std::ostringstream tmpStream; //Now loop over the number of banks found and add the bank for (USHORT bankct=0; bankct<numBanks; bankct++){ LLBank bank; tmpStream.str(rootStr); tmpStream << "/linkListData/bank" << bankct+1; FILE_LOG(logDEBUG) << "Reading State Bank: " << bankct+1 << " from hdf5"; bank.read_state_from_hdf5( H5StateFile, tmpStream.str()); // banks_.push_back(bank); } return 0; }
void HDF5IO::saveMatrix(const std::string& GroupName, const std::string& Name, const ComplexMatrixType& M) { try{ H5::CompType ComplexDataType = this->openCompType("complex"); hsize_t Dims[2] = {hsize_t(M.rows()),hsize_t(M.cols())}; H5::DataSpace dataspace(2,Dims); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet dset = FG.openDataSet(Name.c_str()); // dset.extend( Dims );not working dset.write(M.data(), ComplexDataType); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet dset = FG.createDataSet(Name.c_str(), ComplexDataType, dataspace); dset.write(M.data(), ComplexDataType); } catch ( const H5::DataSetIException error ){ error.printError(); RUNTIME_ERROR("HDF5IO::saveComplexMatrix at "); } FG.close(); } catch( const H5::Exception error ){ error.printError(); RUNTIME_ERROR("HDF5IO::saveComplexMatrix at "); } }
// Stream saving mode void Bundle2::openSaveStream(const boost::filesystem::path& fileName) { // Open file for streaming streamFile_ = new H5::H5File(fileName.string(), H5F_ACC_TRUNC); storeParameters(*streamFile_); // Creating basic file structure H5::Group group = streamFile_->createGroup("/POI"); group.close(); initFrameStream_(); initGeometryStream_(); }
int HDF5IO::loadInt(const std::string& GroupName, const std::string& Name) { try{ H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet( Name.c_str()); int x; DataSet.read(&x,H5::PredType::NATIVE_INT); FG.close(); return x; }catch( H5::GroupIException not_found_error ){ RUNTIME_ERROR("No dataset found in loadInt. "); } }
size_t HDF5IO::loadUlong(const std::string& GroupName, const std::string& Name) { try{ H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet( Name.c_str() ); size_t x; DataSet.read(&x, H5::PredType::NATIVE_ULONG); return x; FG.close(); }catch( H5::GroupIException not_found_error ){ INFO("In Group - " << GroupName << ", and Name is " << Name); RUNTIME_ERROR("No dataset found in loadUlong. "); } }
bool Bundle2::checkGeometry_(H5::H5File& file) const { bool found = false; H5::Group root = file.openGroup("/"); const hsize_t maxObjs = root.getNumObjs(); for(hsize_t obj = 0; obj < maxObjs; ++obj) { string objName = root.getObjnameByIdx(obj); if(objName == string("Geometry")) found = true; } root.close(); return found; }
void Bundle2::streamPOI(size_t frame) { H5::DataSpace scalar; H5::Group poiGroup = streamFile_->openGroup("/POI"); for(size_t i = poiFirstFrame_; i <= frame; ++i) { const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % i); H5::Group frameGroup = poiGroup.createGroup(frameGroupName); hsize_t count = poi_[(ptrdiff_t)i - (ptrdiff_t)poiFirstFrame_].size(); H5::Attribute attr = frameGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar); attr.write(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(size_t camera = 0; camera < poi_[(ptrdiff_t)i - (ptrdiff_t)poiFirstFrame_].size(); ++camera) poi_[(ptrdiff_t)i - (ptrdiff_t)poiFirstFrame_][camera].save(frameGroup, camera); frameGroup.close(); } poiGroup.close(); scalar.close(); poi_.erase(poi_.begin(), poi_.begin() + (ptrdiff_t)frame - (ptrdiff_t)poiFirstFrame_ + 1); poiFirstFrame_ = frame + 1; }
void HDF5IO::saveNumber(const std::string& GroupName, const std::string& Name, unsigned long x) { H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet dataset = FG.openDataSet( Name.c_str() ); dataset.write(&x, H5::PredType::NATIVE_ULONG); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet dataset = FG.createDataSet( Name.c_str(), H5::PredType::NATIVE_ULONG, H5::DataSpace()); dataset.write(&x, H5::PredType::NATIVE_ULONG); } FG.close(); }
ComplexType HDF5IO::loadComplex(const std::string& GroupName, const std::string& Name) { try{ H5::CompType ComplexDataType = this->openCompType("complex"); H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); ComplexType C; RealType RealImag[2]; DataSet.read(RealImag, ComplexDataType); FG.close(); return ComplexType(RealImag[0],RealImag[1]); }catch( H5::GroupIException not_found_error ){ RUNTIME_ERROR("No dataset found in loadComplex. "); } }
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name, std::vector<RealType>& V) { try{ H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); H5::DataSpace DataSpace = DataSet.getSpace(); if(DataSpace.getSimpleExtentNdims() != 1) throw(H5::DataSpaceIException("HDF5IO::loadRealVector()","Unexpected multidimentional dataspace.")); V.resize(DataSpace.getSimpleExtentNpoints()); DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE); FG.close(); } catch( const H5::Exception err ){ RUNTIME_ERROR("HDF5IO::loadRealStdVector"); } }
void HDF5IO::loadVector(const std::string& GroupName, const std::string& Name, RealVectorType& V) { H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); H5::DataSpace DataSpace = DataSet.getSpace(); if(DataSpace.getSimpleExtentNdims() != 1) throw(H5::DataSpaceIException("HDF5IO::loadRealVector()", "Unexpected multidimentional dataspace.")); V.resize(DataSpace.getSimpleExtentNpoints()); try{ DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE); }catch( H5::GroupIException not_found_error ){ RUNTIME_ERROR("No dataset found in loadRealVector. "); } FG.close(); }
void Bundle2::closeSaveStream() { H5::DataSpace scalar; // Saving remaining POI information H5::Group poiGroup = streamFile_->openGroup("/POI"); H5::Attribute attr = poiGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar); hsize_t count = poiFirstFrame_; attr.write(H5::PredType::NATIVE_HSIZE, &count); attr.close(); poiGroup.close(); scalar.close(); // Closing HDF5 file streamFile_->close(); delete streamFile_; streamFile_ = NULL; }
void HDF5IO::saveNumber(const std::string& GroupName, const std::string& Name, ComplexType C) { H5::CompType ComplexDataType = openCompType("complex"); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet dataset = FG.openDataSet(Name.c_str()); RealType RealImag[2] = {real(C),imag(C)}; dataset.write(RealImag, ComplexDataType); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet dataset = FG.createDataSet(Name.c_str(), ComplexDataType, H5::DataSpace()); RealType RealImag[2] = {real(C),imag(C)}; dataset.write(RealImag, ComplexDataType); } FG.close(); }
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name, std::vector<ComplexType>& V) { try{ H5::CompType ComplexDataType = this->openCompType("complex"); H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); H5::DataSpace DataSpace = DataSet.getSpace(); if(DataSpace.getSimpleExtentNdims() != 1) throw(H5::DataSpaceIException("HDF5IO::loadComplexVector()","Unexpected multidimentional dataspace.")); V.resize(DataSpace.getSimpleExtentNpoints()); DataSet.read(V.data(),ComplexDataType); FG.close(); } catch( const H5::Exception err ){ RUNTIME_ERROR("HDF5IO::loadComplexStdVector"); } }
/* only for Eigen3 matrix/vector */ void HDF5IO::saveVector(const std::string& GroupName, const std::string& Name, const RealVectorType& V) { hsize_t Dim[1] = {hsize_t(V.size())}; H5::DataSpace dspace(1,Dim); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); DataSet.write(V.data(),H5::PredType::NATIVE_DOUBLE, dspace); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet DataSet = FG.createDataSet(Name.c_str(), H5::PredType::NATIVE_DOUBLE,dspace); DataSet.write(V.data(),H5::PredType::NATIVE_DOUBLE); } FG.close(); }
void HDF5IO::loadMatrix(const std::string& GroupName, const std::string& Name, ComplexMatrixType& M) { try{ H5::CompType ComplexDataType = this->openCompType("complex"); H5::Group FG = getGroup( GroupName ); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); H5::DataSpace DataSpace = DataSet.getSpace(); if(DataSpace.getSimpleExtentNdims() != 2) throw(H5::DataSpaceIException("HDF5IO::loadMatrix()","A dataspace must be precisely two-dimensional.")); hsize_t Dims[2]; DataSpace.getSimpleExtentDims(Dims); M.resize(Dims[0],Dims[1]); DataSet.read(M.data(), ComplexDataType); FG.close(); } catch( const H5::Exception err ){ RUNTIME_ERROR("HDF5IO::loadComplexMatrix at "); } }
void HDF5IO::saveMatrix(const std::string& GroupName, const std::string& Name, const RealMatrixType& M) { try{ hsize_t Dims[2] = {hsize_t(M.rows()),hsize_t(M.cols())}; H5::DataSpace dataspace(2,Dims); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); DataSet.write(M.data(),H5::PredType::NATIVE_DOUBLE,dataspace); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet DataSet = FG.createDataSet(Name.c_str(),H5::PredType::NATIVE_DOUBLE,dataspace); DataSet.write(M.data(),H5::PredType::NATIVE_DOUBLE); } FG.close(); } catch( const H5::Exception err ){ RUNTIME_ERROR("HDF5IO::saveRealMatrix"); } }
void HDF5IO::saveStdVector(const std::string& GroupName, const std::string& Name, const std::vector<double>& V) { try{ hsize_t Dim[1] = {hsize_t(V.size())}; H5::DataSpace dataspace(1,Dim); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet dataset = FG.openDataSet(Name.c_str()); dataset.write(V.data(),H5::PredType::NATIVE_DOUBLE, dataspace); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet dataset = FG.createDataSet(Name.c_str(), H5::PredType::NATIVE_DOUBLE,dataspace); dataset.write(V.data(),H5::PredType::NATIVE_DOUBLE); } FG.close(); } catch( const H5::Exception err ){ RUNTIME_ERROR("HDF5IO::saveRealStdVector"); } }
int APSRack::save_bulk_state_file(string & stateFile){ if (stateFile.length() == 0) { stateFile += "cache_APSRack.h5"; } FILE_LOG(logDEBUG) << "Writing Bulk State File " << stateFile; H5::H5File H5StateFile(stateFile, H5F_ACC_TRUNC); // loop through available APS Units and save state for(unsigned int apsct = 0; apsct < APSs_.size(); apsct++) { string rootStr = "/"; rootStr += APSs_[apsct].deviceSerial_ ; FILE_LOG(logDEBUG) << "Creating Group: " << rootStr; H5::Group tmpGroup = H5StateFile.createGroup(rootStr); tmpGroup.close(); APSs_[apsct].write_state_to_hdf5(H5StateFile, rootStr); } //Close the file H5StateFile.close(); return 0; }
void Bundle2::loadParameters(H5::H5File& file) { H5::Group root = file.openGroup("/"); // Checking version unsigned int fileVersion; H5::Attribute attr = root.openAttribute("version"); attr.read(H5::PredType::NATIVE_UINT, &fileVersion); attr.close(); if(fileVersion != version_) throw std::runtime_error("Incompatible bundle version!"); // Reading number of cameras hsize_t count; H5::Group frame0Group = root.openGroup("POI/Frame 0000"); attr = frame0Group.openAttribute("count"); attr.read(H5::PredType::NATIVE_HSIZE, &count); attr.close(); frame0Group.close(); numCameras_ = count; // Reading parameters unsigned char r2; attr = root.openAttribute("reduce2"); attr.read(H5::PredType::NATIVE_UCHAR, &r2); attr.close(); parameters_.reduce2 = (r2 == 1); attr = root.openAttribute("xROI"); attr.read(H5::PredType::NATIVE_UINT, ¶meters_.xROI); attr.close(); attr = root.openAttribute("yROI"); attr.read(H5::PredType::NATIVE_UINT, ¶meters_.yROI); attr.close(); root.close(); }
int Channel::write_state_to_hdf5(H5::H5File & H5StateFile, const string & rootStr){ // write waveform data FILE_LOG(logDEBUG) << "Writing Waveform: " << rootStr + "/waveformLib"; vector2h5array<float>(waveform_, &H5StateFile, rootStr + "/waveformLib", rootStr + "/waveformLib", H5::PredType::NATIVE_FLOAT); // add channel state information to root group H5::Group tmpGroup = H5StateFile.openGroup(rootStr); element2h5attribute<float>("offset", offset_, &tmpGroup, H5::PredType::NATIVE_FLOAT); element2h5attribute<float>("scale", scale_, &tmpGroup, H5::PredType::NATIVE_FLOAT); element2h5attribute<bool>("enabled", enabled_, &tmpGroup, H5::PredType::NATIVE_UINT); element2h5attribute<int>("trigDelay", trigDelay_, &tmpGroup, H5::PredType::NATIVE_INT); tmpGroup.close(); //Save the linklist data // save number of banks to rootStr + /linkListData attribute "numBanks" // USHORT numBanks; // numBanks = banks_.size();//get number of banks from channel // // // set attribute // FILE_LOG(logDEBUG) << "Creating Group: " << rootStr + "/linkListData"; // tmpGroup = H5StateFile.createGroup(rootStr + "/linkListData"); // element2h5attribute<USHORT>("numBanks", numBanks, &tmpGroup,H5::PredType::NATIVE_UINT16); // tmpGroup.close(); // // std::ostringstream tmpStream; // //Now loop over the number of banks found and add the bank // for (USHORT bankct=0; bankct<numBanks; bankct++) { // tmpStream.str(""); // tmpStream << rootStr << "/linkListData/bank" << bankct+1 ; // FILE_LOG(logDEBUG) << "Writing State Bank: " << bankct+1 << " from hdf5"; // banks_[bankct].write_state_to_hdf5(H5StateFile, tmpStream.str() ); // } return 0; }
void HDF5IO::saveVector(const std::string& GroupName, const std::string& Name, const ComplexVectorType& V) { try{ H5::CompType ComplexDataType = this->openCompType("complex"); hsize_t Dim[1] = {hsize_t(V.size())}; H5::DataSpace dspace(1,Dim); H5::Group FG = getGroup( GroupName ); try{ H5::Exception::dontPrint(); H5::DataSet DataSet = FG.openDataSet(Name.c_str()); DataSet.write(V.data(), ComplexDataType, dspace); } catch ( const H5::GroupIException not_found_error ){ H5::DataSet DataSet = FG.createDataSet(Name.c_str(), ComplexDataType, dspace); DataSet.write(V.data(), ComplexDataType); } FG.close(); } catch ( const H5::DataSetIException error ){ error.printError(); RUNTIME_ERROR("HDF5IO::saveComplexVector at "); } }
PcaModel PcaModel::loadStatismoModel(path h5file, PcaModel::ModelType modelType) { logging::Logger logger = Loggers->getLogger("shapemodels"); PcaModel model; // Load the shape or color model from the .h5 file string h5GroupType; if (modelType == ModelType::SHAPE) { h5GroupType = "shape"; } else if (modelType == ModelType::COLOR) { h5GroupType = "color"; } H5::H5File h5Model; try { h5Model = H5::H5File(h5file.string(), H5F_ACC_RDONLY); } catch (H5::Exception& e) { string errorMessage = "Could not open HDF5 file: " + string(e.getCDetailMsg()); logger.error(errorMessage); throw errorMessage; } // Load either the shape or texture mean string h5Group = "/" + h5GroupType + "/model"; H5::Group modelReconstructive = h5Model.openGroup(h5Group); // Read the mean H5::DataSet dsMean = modelReconstructive.openDataSet("./mean"); hsize_t dims[2]; dsMean.getSpace().getSimpleExtentDims(dims, NULL); // dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea. //H5::DataSpace dsp = dsMean.getSpace(); //dsp.close(); Loggers->getLogger("shapemodels").debug("Dimensions of the model mean: " + lexical_cast<string>(dims[0])); model.mean = Mat(1, dims[0], CV_32FC1); // Use a row-vector, because of faster memory access and I'm not sure the memory block is allocated contiguously if we have multiple rows. dsMean.read(model.mean.ptr<float>(0), H5::PredType::NATIVE_FLOAT); model.mean = model.mean.t(); // Transpose it to a col-vector dsMean.close(); // Read the eigenvalues dsMean = modelReconstructive.openDataSet("./pcaVariance"); dsMean.getSpace().getSimpleExtentDims(dims, NULL); Loggers->getLogger("shapemodels").debug("Dimensions of the pcaVariance: " + lexical_cast<string>(dims[0])); model.eigenvalues = Mat(1, dims[0], CV_32FC1); dsMean.read(model.eigenvalues.ptr<float>(0), H5::PredType::NATIVE_FLOAT); model.eigenvalues = model.eigenvalues.t(); dsMean.close(); // Read the PCA basis matrix dsMean = modelReconstructive.openDataSet("./pcaBasis"); dsMean.getSpace().getSimpleExtentDims(dims, NULL); Loggers->getLogger("shapemodels").debug("Dimensions of the PCA basis matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1])); model.pcaBasis = Mat(dims[0], dims[1], CV_32FC1); dsMean.read(model.pcaBasis.ptr<float>(0), H5::PredType::NATIVE_FLOAT); dsMean.close(); modelReconstructive.close(); // close the model-group // Read the noise variance (not implemented) /*dsMean = modelReconstructive.openDataSet("./noiseVariance"); float noiseVariance = 10.0f; dsMean.read(&noiseVariance, H5::PredType::NATIVE_FLOAT); dsMean.close(); */ // Read the triangle-list string representerGroupName = "/" + h5GroupType + "/representer"; H5::Group representerGroup = h5Model.openGroup(representerGroupName); dsMean = representerGroup.openDataSet("./reference-mesh/triangle-list"); dsMean.getSpace().getSimpleExtentDims(dims, NULL); Loggers->getLogger("shapemodels").debug("Dimensions of the triangle-list: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1])); Mat triangles(dims[0], dims[1], CV_32SC1); dsMean.read(triangles.ptr<int>(0), H5::PredType::NATIVE_INT32); dsMean.close(); representerGroup.close(); model.triangleList.resize(triangles.rows); for (unsigned int i = 0; i < model.triangleList.size(); ++i) { model.triangleList[i][0] = triangles.at<int>(i, 0); model.triangleList[i][1] = triangles.at<int>(i, 1); model.triangleList[i][2] = triangles.at<int>(i, 2); } // Load the landmarks mappings: // load the reference-mesh representerGroup = h5Model.openGroup(representerGroupName); dsMean = representerGroup.openDataSet("./reference-mesh/vertex-coordinates"); dsMean.getSpace().getSimpleExtentDims(dims, NULL); Loggers->getLogger("shapemodels").debug("Dimensions of the reference-mesh vertex-coordinates matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1])); Mat referenceMesh(dims[0], dims[1], CV_32FC1); dsMean.read(referenceMesh.ptr<float>(0), H5::PredType::NATIVE_FLOAT); dsMean.close(); representerGroup.close(); // convert to 3 vectors with the x, y and z coordinates for easy searching vector<float> refx(referenceMesh.col(0).clone()); vector<float> refy(referenceMesh.col(1).clone()); vector<float> refz(referenceMesh.col(2).clone()); // load the landmarks info (mapping name <-> reference (x, y, z)-coords) H5::Group landmarksGroup = h5Model.openGroup("/metadata/landmarks"); dsMean = landmarksGroup.openDataSet("./text"); H5std_string outputString; Loggers->getLogger("shapemodels").debug("Reading landmark information from the model."); dsMean.read(outputString, dsMean.getStrType()); dsMean.close(); landmarksGroup.close(); vector<string> landmarkLines; boost::split(landmarkLines, outputString, boost::is_any_of("\n"), boost::token_compress_on); for (const auto& l : landmarkLines) { if (l == "") { continue; } vector<string> line; boost::split(line, l, boost::is_any_of(" "), boost::token_compress_on); string name = line[0]; int visibility = lexical_cast<int>(line[1]); float x = lexical_cast<float>(line[2]); float y = lexical_cast<float>(line[3]); float z = lexical_cast<float>(line[4]); // Find the x, y and z values in the reference const auto ivx = std::find(begin(refx), end(refx), x); const auto ivy = std::find(begin(refy), end(refy), y); const auto ivz = std::find(begin(refz), end(refz), z); // TODO Check for .end()! const auto vertexIdX = std::distance(begin(refx), ivx); const auto vertexIdY = std::distance(begin(refy), ivy); const auto vertexIdZ = std::distance(begin(refz), ivz); // assert vx=vy=vz // Hmm this is not perfect. If there's another vertex where 1 or 2 coords are the same, it fails. // We should do the search differently: Find _all_ the vertices that are equal, then take the one that has the right x, y and z. model.landmarkVertexMap.insert(make_pair(name, vertexIdX)); } h5Model.close(); return model; }
inline vtkPolyData* vtkStandardMeshRepresenter::LoadRef(const H5::Group& fg) const { statismo::MatrixType vertexMat; HDF5Utils::readMatrix(fg, "./points", vertexMat); typedef statismo::GenericEigenType<unsigned int>::MatrixType UIntMatrixType; UIntMatrixType cellsMat; HDF5Utils::readMatrixOfType<unsigned int>(fg, "./cells", cellsMat); // create the reference from this information vtkPolyData* ref = vtkPolyData::New(); unsigned nVertices = vertexMat.cols(); unsigned nCells = cellsMat.cols(); vtkFloatArray* pcoords = vtkFloatArray::New(); pcoords->SetNumberOfComponents(3); pcoords->SetNumberOfTuples(nVertices); for (unsigned i = 0; i < nVertices; i++) { pcoords->SetTuple3(i, vertexMat(0, i), vertexMat(1, i), vertexMat(2, i)); } vtkPoints* points = vtkPoints::New(); points->SetData(pcoords); ref->SetPoints(points); vtkCellArray* cell = vtkCellArray::New(); unsigned cellDim = cellsMat.rows(); for (unsigned i = 0; i < nCells; i++) { cell->InsertNextCell(cellDim); for (unsigned d = 0; d < cellDim; d++) { cell->InsertCellPoint(cellsMat(d, i)); } } if (cellDim == 2) { ref->SetLines(cell); } else { ref->SetPolys(cell); } // read the point and cell data assert(ref->GetPointData() != 0); if (HDF5Utils::existsObjectWithName(fg, "pointData")) { H5::Group pdGroup = fg.openGroup("./pointData"); if (HDF5Utils::existsObjectWithName(pdGroup, "scalars")) { ref->GetPointData()->SetScalars(GetAsDataArray(pdGroup, "scalars")); } if (HDF5Utils::existsObjectWithName(pdGroup, "vectors")) { ref->GetPointData()->SetVectors(GetAsDataArray(pdGroup, "vectors")); } if (HDF5Utils::existsObjectWithName(pdGroup, "normals")) { ref->GetPointData()->SetNormals(GetAsDataArray(pdGroup, "normals")); } pdGroup.close(); } if (HDF5Utils::existsObjectWithName(fg, "cellData")) { H5::Group cdGroup = fg.openGroup("./cellData"); assert(ref->GetCellData() != 0); if (HDF5Utils::existsObjectWithName(cdGroup, "scalars")) { ref->GetPointData()->SetScalars(GetAsDataArray(cdGroup, "scalars")); } if (HDF5Utils::existsObjectWithName(cdGroup, "vectors")) { ref->GetPointData()->SetVectors(GetAsDataArray(cdGroup, "vectors")); } if (HDF5Utils::existsObjectWithName(cdGroup, "normals")) { ref->GetPointData()->SetNormals(GetAsDataArray(cdGroup, "normals")); } cdGroup.close(); } return ref; }
void ossim_hdf5::iterateGroupForDatasetNames( H5::H5File* file, const std::string& groupName, std::vector<std::string>& datasetNames, ossim_uint32& recursedCount ) { if ( file && groupName.size() ) { ++recursedCount; // std::cout << "iterateGroup: " << groupName << std::endl; H5::Group* group = new H5::Group( file->openGroup(groupName) ); const hsize_t OBJ_COUNT = group->getNumObjs(); for ( hsize_t i = 0; i < OBJ_COUNT; ++i ) { std::string objName = group->getObjnameByIdx(i); if ( objName.size() ) { char separator = '/'; std::string combinedName; combine( groupName, objName, separator, combinedName ); H5G_obj_t objType = group->getObjTypeByIdx(i); #if 0 std::cout << "combinedName: " << combinedName << "\ngetObjnameByIdx[" << i << "]: " << objName << "\ngetObjTypeByIdx[" << i << "]: " << objType << std::endl; #endif if ( objType == H5G_GROUP ) { // Recursive call: if ( recursedCount < ossim_hdf5::MAX_RECURSION_LEVEL ) { ossim_hdf5::iterateGroupForDatasetNames( file, combinedName, datasetNames, recursedCount ); } else { ossimNotify(ossimNotifyLevel_WARN) << "ossim_hdf5::iterateGroupForDatasetNames WARNING!" << "\nMax iterations reached!" << std::endl; } } else if ( objType == H5G_DATASET ) { datasetNames.push_back( combinedName ); } else { ossimNotify(ossimNotifyLevel_WARN) << "ossim_hdf5::iterateGroupForDatasetNames WARNING!" << "\nUnhandled object type: " << objType << std::endl; } } } group->close(); delete group; group = 0; --recursedCount; } // Matches: if ( file ) } // End: void ossim_hdf5::iterateGroupForDatasetNames
void ossim_hdf5::printIterative( H5::H5File* file, const std::string& groupName, const std::string& prefix, ossim_uint32& recursedCount, std::ostream& out ) { if ( file && groupName.size() ) { ++recursedCount; H5::Group* group = new H5::Group( file->openGroup(groupName) ); // Print attributes: const ossim_uint32 ATTRS_COUNT = group->getNumAttrs(); for ( ossim_uint32 aIdx = 0; aIdx < ATTRS_COUNT; ++aIdx ) { H5::Attribute attr( group->openAttribute( aIdx ) ); ossim_hdf5::printAttribute( attr, prefix, out ); attr.close(); } const hsize_t OBJ_COUNT = group->getNumObjs(); for ( hsize_t i = 0; i < OBJ_COUNT; ++i ) { std::string objName = group->getObjnameByIdx(i); if ( objName.size() ) { char separator = '/'; std::string combinedName; combine( groupName, objName, separator, combinedName ); separator = '.'; std::string combinedPrefix; combine( prefix, objName, separator, combinedPrefix ); H5G_obj_t objType = group->getObjTypeByIdx(i); #if 0 std::cout << "combinedName: " << combinedName << "\ncombinedPrefix: " << combinedPrefix << "\ngetObjnameByIdx[" << i << "]: " << objName << "\ngetObjTypeByIdx[" << i << "]: " << objType << std::endl; #endif if ( objType == H5G_GROUP ) { // Recursive call: if ( recursedCount < ossim_hdf5::MAX_RECURSION_LEVEL ) { ossim_hdf5::printIterative( file, combinedName, combinedPrefix, recursedCount, out ); } else { ossimNotify(ossimNotifyLevel_WARN) << "ossim_hdf5::printIterative WARNING!" << "\nMax iterations reached!" << std::endl; } } else if ( objType == H5G_DATASET ) { printObject( file, combinedName, combinedPrefix, out ); } else { ossimNotify(ossimNotifyLevel_WARN) << "ossim_hdf5::printIterative WARNING!" << "\nUnhandled object type: " << objType << std::endl; } } } group->close(); delete group; group = 0; --recursedCount; } // Matches: if ( file ) } // End: void ossim_hdf5::printIterative method.
void Bundle2::initGeometryStream_() { // Creating group Geometry H5::Group geometryGroup = streamFile_->createGroup("/Geometry"); // Saving poses const hsize_t posesChunkDim[] = { 3, 12 }; H5::DSetCreatPropList posesPropList; posesPropList.setLayout(H5D_CHUNKED); posesPropList.setChunk(2, posesChunkDim); posesPropList.setDeflate(9); const hsize_t posesMaxDim[] = { H5S_UNLIMITED, 12 }; const hsize_t posesCurDim[] = { frames_.size(), 12 }; H5::DataSpace posesDS(2, posesCurDim, posesMaxDim); H5::DataSet posesDataSet = geometryGroup.createDataSet("Poses", H5::PredType::IEEE_F64LE, posesDS, posesPropList); double* posesData = (double*)malloc(frames_.size()*12*sizeof(double)); size_t i = 0; for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) { posesData[i*12] = (*it)->pose()->t().x(); posesData[i*12 + 1] = (*it)->pose()->t().y(); posesData[i*12 + 2] = (*it)->pose()->t().z(); core::Matrix<double> R = (*it)->pose()->R(); posesData[i*12 + 3] = R[0][0]; posesData[i*12 + 4] = R[1][0]; posesData[i*12 + 5] = R[2][0]; posesData[i*12 + 6] = R[0][1]; posesData[i*12 + 7] = R[1][1]; posesData[i*12 + 8] = R[2][1]; posesData[i*12 + 9] = R[0][2]; posesData[i*12 + 10] = R[1][2]; posesData[i*12 + 11] = R[2][2]; ++i; } posesDataSet.write((const void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL); free((void*)posesData); posesDataSet.close(); posesDS.close(); // Creating points dataset const hsize_t pointsChunkDim[] = {10, 3}; H5::DSetCreatPropList pointsPropList; pointsPropList.setLayout(H5D_CHUNKED); pointsPropList.setChunk(2, pointsChunkDim); pointsPropList.setDeflate(9); const hsize_t pointsMaxDim[] = { H5S_UNLIMITED, 3 }; const hsize_t pointsCurDim[] = { 0, 3 }; H5::DataSpace pointsDS(2, pointsCurDim, pointsMaxDim); H5::DataSet pointsDataSet = geometryGroup.createDataSet("Points", H5::PredType::IEEE_F64LE, pointsDS, pointsPropList); pointsDataSet.close(); pointsDS.close(); // Creating inliers dataset const hsize_t inliersChunkDim[] = { 3 }; H5::DSetCreatPropList inliersPropList; inliersPropList.setLayout(H5D_CHUNKED); inliersPropList.setChunk(1, inliersChunkDim); inliersPropList.setDeflate(9); const hsize_t inliersMaxDim[] = { H5S_UNLIMITED }; const hsize_t inliersCurDim[] = { frames_.size() }; H5::DataSpace inliersDS(1, inliersCurDim, inliersMaxDim); H5::VarLenType inliersType(&H5::PredType::STD_U8LE); H5::DataSet inliersDataSet = geometryGroup.createDataSet("Inliers", inliersType, inliersDS, inliersPropList); inliersDataSet.close(); inliersType.close(); inliersDS.close(); // Creating curve dataset const hsize_t chunkDim[] = { 5 }; H5::DSetCreatPropList propList; propList.setLayout(H5D_CHUNKED); propList.setChunk(1, chunkDim); propList.setDeflate(9); H5::VarLenType curveDatasetType(&H5::PredType::STD_U64LE); hsize_t curvesDim[] = { 0 }; hsize_t curvesMaxDim[] = { H5S_UNLIMITED }; H5::DataSpace curvesDataspace(1, curvesDim, curvesMaxDim); H5::DataSet curvesDataset = geometryGroup.createDataSet("Curves", curveDatasetType, curvesDataspace, propList); curvesDataset.close(); curvesDataspace.close(); curveDatasetType.close(); propList.close(); geometryGroup.close(); }
// Bundle management void Bundle2::save(const boost::filesystem::path& fileName) const { // Creating HDF5 file H5::H5File bundleFile(fileName.string(), H5F_ACC_TRUNC); storeParameters(bundleFile); H5::DataSpace scalar; // Saving POI H5::Group poiGroup = bundleFile.createGroup("/POI"); H5::Attribute attr = poiGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar); hsize_t count = poi_.size(); attr.write(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(size_t frame = 0; frame < poi_.size(); ++frame) { const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % frame); H5::Group frameGroup = poiGroup.createGroup(frameGroupName); count = poi_[frame].size(); attr = frameGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar); attr.write(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(size_t camera = 0; camera < poi_[frame].size(); ++camera) poi_[frame][camera].save(frameGroup, camera); frameGroup.close(); } poiGroup.close(); // Saving key frames H5::Group bundleGroup = bundleFile.createGroup("/Bundle"); H5::Group framesGroup = bundleGroup.createGroup("Frames"); count = frames_.size(); attr = framesGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar); attr.write(H5::PredType::NATIVE_HSIZE, &count); attr.close(); for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) { (*it)->save(framesGroup); } framesGroup.close(); // Saving tracks const hsize_t chunkDim[] = { 2, 1 }; H5::DSetCreatPropList propList; propList.setLayout(H5D_CHUNKED); propList.setChunk(2, chunkDim); propList.setDeflate(9); H5::VarLenType tracksDatasetType(&H5::PredType::STD_U64LE); hsize_t tracksDim[] = { tracks_.size(), 2 }; hsize_t tracksMaxDim[] = { H5S_UNLIMITED, 2 }; H5::DataSpace tracksDataspace(2, tracksDim, tracksMaxDim); H5::DataSet tracksDataset = bundleGroup.createDataSet("Tracks", tracksDatasetType, tracksDataspace, propList); for(size_t i = 0; i < tracks_.size(); ++i) tracks_[i]->save(tracksDataset, i); tracksDataset.close(); tracksDataspace.close(); tracksDatasetType.close(); propList.close(); bundleGroup.close(); scalar.close(); bundleFile.close(); }