bool readTBlock(const StormParams::Shape& blockOffset, const StormParams::Shape& blockShape, MultiArrayView<STORMPARAMS_N, T> & array, FileType type, const StormParams::Shape &shape, void *ptr) { switch(type) { case TIFF: { ImageImportInfo* info = reinterpret_cast<ImageImportInfo*>(ptr); vigra_precondition(blockOffset[0]==0 && blockOffset[1]==0 && blockShape[0]==shape[0] && blockShape[1]==shape[1], "for Tiff images only complete Frames are currently supported as ROIs"); vigra_precondition(array.size(2)==blockShape[2],"array shape and number of images in ROI for tiff file differ."); vigra_precondition(blockShape[2] <= info->numImages(), "block shape larger than number of frames in the image"); for(int i = 0; i < blockShape[2]; ++i) { MultiArrayView <2, T> img = array.bindOuter(i); BasicImageView <T> v = makeBasicImageView(img); info->setImageIndex(i+blockOffset[2]); vigra::importImage(*info, destImage(v)); } return true; } #ifdef HDF5_FOUND case HDF5: { HDF5File* info = reinterpret_cast<HDF5File*>(ptr); info->readBlock("/data", blockOffset, blockShape, array); return true; } #endif // HDF5_FOUND default: return false; } }
/* * HDF5NumAttributeTest.cpp * * Created on: 20 oct. 2012 * Author: boubad */ #ifndef NO_GTEST ///////////////////////////// #include <gtest/gtest.h> ///////////////////////////// #include <hdf5store.h> ///////////////////////////// template <typename T> class HDF5NumAttributeTest : public ::testing::Test { public: typedef std::vector<T> Vector; public: HDF5NumAttributeTest():m_filename("testresults/mytest.h5"),m_grpname("g_initial"){} virtual ~HDF5NumAttributeTest(){} protected: T value_; std::string m_filename; std::string m_grpname; }; typedef ::testing::Types<char, signed char,unsigned char,short,unsigned short,int, unsigned int, long, unsigned long, long long, unsigned long long> MyTypes; TYPED_TEST_CASE(HDF5NumAttributeTest, MyTypes); /////////////////////////////////////// using namespace statdata; using namespace statdata::hdf5; ////////////////////////////////////// TYPED_TEST(HDF5NumAttributeTest,ScalarAttribute) { TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("att_") + std::string(tt.name()) + std::string("_scalar"); HDF5Attribute att(&oGroup,name); bRet = att.write_scalar(v); ASSERT_TRUE(bRet); TypeParam actual; bRet = att.read_scalar(actual); ASSERT_TRUE(bRet); ASSERT_EQ(v, actual); }/* Scalar Attribute */ TYPED_TEST(HDF5NumAttributeTest,VectorAttribute) { size_t n = 10; typename TestFixture::Vector values(n); for (size_t i = 0; i < n; ++i){ values[i] = (TypeParam)(68 + i); }// i TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("att_") + std::string(tt.name()) + std::string("_vector"); HDF5Attribute att(&oGroup,name); bRet = att.write_vector(values); ASSERT_TRUE(bRet); typename TestFixture::Vector actual; bRet = att.read_vector(actual); ASSERT_TRUE(bRet); ASSERT_EQ(n, actual.size()); for (size_t i = 0; i < n; ++i){ TypeParam v1 = values[i]; TypeParam v2 = actual[i]; ASSERT_EQ(v1, v2); }// i }/* Vector Attribute */ TYPED_TEST(HDF5NumAttributeTest,VectorDataset) { size_t n = 10; typename TestFixture::Vector values(n); for (size_t i = 0; i < n; ++i){ values[i] = (TypeParam)(68 + i); }// i TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("dataset_") + std::string(tt.name()) + std::string("_vector"); HDF5Dataset att(&oGroup,name); bRet = att.write_vector(values); ASSERT_TRUE(bRet); typename TestFixture::Vector actual; bRet = att.read_vector(actual); ASSERT_TRUE(bRet); ASSERT_EQ(n, actual.size()); for (size_t i = 0; i < n; ++i){ TypeParam v1 = values[i]; TypeParam v2 = actual[i]; ASSERT_EQ(v1, v2); }// i }/* Vector VectorDataset */
// TODO: 0 size plane matrices are not output due to a bug in an older version of HDF5 void RigidBody2DSim::writeBinaryState( HDF5File& output_file ) const { // Output the configuration output_file.write( "q", m_state.q() ); // Output the velocity output_file.write( "v", m_state.v() ); // Output the mass { // Assemble the mass into a single flat vector like q, v, and r assert( unsigned(m_state.M().nonZeros()) == 3 * m_state.nbodies() ); const VectorXs m{ Eigen::Map<const VectorXs>{ &m_state.M().data().value(0), m_state.q().size() } }; output_file.write( "m", m ); } { VectorXu fixed{ numBodies() }; for( int body_index = 0; body_index < fixed.size(); ++body_index ) { fixed( body_index ) = isKinematicallyScripted( body_index ) ? 1 : 0; } output_file.write( "kinematically_scripted", fixed ); } // Output the simulated geometry RigidBody2DStateOutput::writeGeometryIndices( m_state.geometry(), m_state.geometryIndices(), "geometry", output_file ); RigidBody2DStateOutput::writeGeometry( m_state.geometry(), "geometry", output_file ); // Output the static geometry if( !m_state.planes().empty() ) { RigidBody2DStateOutput::writeStaticPlanes( m_state.planes(), "static_geometry", output_file ); } if( !m_state.planarPortals().empty() ) { RigidBody2DStateOutput::writePlanarPortals( m_state.planarPortals(), "static_geometry", output_file ); } }
bool readTVolume(MultiArrayView<STORMPARAMS_N, T> & array, FileType type, void *ptr) { switch(type) { case TIFF: { ImageImportInfo* info = reinterpret_cast<ImageImportInfo*>(ptr); vigra_precondition(array.size(2)==info->numImages(),"array shape and number of images in tiff file differ."); for(int i = 0; i < info->numImages(); ++i) { MultiArrayView <2, T> img = array.bindOuter(i); BasicImageView <T> v = makeBasicImageView(img); info->setImageIndex(i); vigra::importImage(*info, destImage(v)); } return true; } #ifdef HDF5_FOUND case HDF5: { HDF5File* info = reinterpret_cast<HDF5File*>(ptr); info->read("/data", array); return true; } #endif // HDF5_FOUND default: return false; } }
void CAMCode<B,T,D>::Exec () { _timer.Start (); MomInit (); CalcE (_B, _Ua, _dn); Timer thyb; do { if (_time.Restart ()) { _time.Print (); CheckPointLoad (); _time.SetRestart (false); } else { _sensmng.SaveAll (_time); _sensmng.SetNextOutput (_time); } CheckPointSave (); DBG_INFO ("iteration time : "<<thyb); _timer.Update (); DBG_INFO ("wallclock time : "<<_timer); thyb.Reset (); thyb.Start (); int nit = _time.HybIters () -_time.Iter (); Hyb (); thyb.Stop (); thyb /= nit; if (Mpi::Rank () == 0) { _itertime.Push (thyb.GetWallclockTime ()); String fname = _sensmng.GetIOManager().GetFileName ("Stat"); HDF5File file (fname, IOFile::suff); file.Write (_itertime, "Iter"); } } while (_time.Iter () < _time.ItersMax ()); _sensmng.SaveAll (_time); _sensmng.SetNextOutput (_time); }
/// \brief Open a group if possible, otherwise create a new one HDF5Group(const HDF5File &file, const std::string &groupname) : HDF5ID<HDF5Group>(-1) { if (!file) { return; } bool print_error = hdf5_error_printing(false); id_ = ::H5Gopen2(file.id(), groupname.c_str(), H5P_DEFAULT); hdf5_error_printing(print_error); if (!good()) { id_ = ::H5Gcreate2(file.id(), groupname.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } }
template<> Recorder<SAVE>::~Recorder () { size_t n_samples = _states.size(); if (n_samples == 0) return; HDF5File f ("simout.h5", OUT); NDData<double> times (n_samples); NDData<double> data (n_samples,3); for (size_t i = 0; i < n_samples; ++i) { times [i] = _times[i]; data (i,0) = _states[i][0]; data (i,1) = _states[i][1]; data (i,2) = _states[i][2]; } f.Write (times, "times"); f.Write (data, "data"); fclose (f); }
TYPED_TEST(HDF5NumAttributeTest,ScalarAttribute) { TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("att_") + std::string(tt.name()) + std::string("_scalar"); HDF5Attribute att(&oGroup,name); bRet = att.write_scalar(v); ASSERT_TRUE(bRet); TypeParam actual; bRet = att.read_scalar(actual); ASSERT_TRUE(bRet); ASSERT_EQ(v, actual); }/* Scalar Attribute */
/* * HDF5NumAttributeTest.cpp * * Created on: 20 oct. 2012 * Author: boubad */ #ifndef NO_GTEST ///////////////////////////// #include <gtest/gtest.h> ///////////////////////////// #include <hdf5store.h> ///////////////////////////// template <typename T> class HDF5NumAttributeTest : public ::testing::Test { public: typedef std::vector<T> Vector; public: HDF5NumAttributeTest():m_filename("testresults/mytest.h5"),m_grpname("g_initial"){} virtual ~HDF5NumAttributeTest(){} protected: T value_; std::string m_filename; std::string m_grpname; }; typedef ::testing::Types<char, signed char,unsigned char,short,unsigned short,int, unsigned int, long, unsigned long, long long, unsigned long long> MyTypes; TYPED_TEST_CASE(HDF5NumAttributeTest, MyTypes); /////////////////////////////////////// using namespace statdata; using namespace statdata::hdf5; ////////////////////////////////////// TYPED_TEST(HDF5NumAttributeTest,ScalarAttribute) { TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("att_") + std::string(tt.name()) + std::string("_scalar"); HDF5Attribute att(&oGroup,name); bRet = att.write_scalar(v); ASSERT_TRUE(bRet); TypeParam actual; bRet = att.read_scalar(actual); ASSERT_TRUE(bRet); ASSERT_EQ(v, actual); }/* Scalar Attribute */ TYPED_TEST(HDF5NumAttributeTest,VectorAttribute) { size_t n = 10; typename TestFixture::Vector values(n); for (size_t i = 0; i < n; ++i){ values[i] = (TypeParam)(68 + i); }// i TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("att_") + std::string(tt.name()) + std::string("_vector"); HDF5Attribute att(&oGroup,name); bRet = att.write_vector(values); ASSERT_TRUE(bRet); typename TestFixture::Vector actual; bRet = att.read_vector(actual); ASSERT_TRUE(bRet); ASSERT_EQ(n, actual.size()); for (size_t i = 0; i < n; ++i){ TypeParam v1 = values[i]; TypeParam v2 = actual[i]; ASSERT_EQ(v1, v2); }// i }/* Vector Attribute */ TYPED_TEST(HDF5NumAttributeTest,VectorDataset) { size_t n = 10; typename TestFixture::Vector values(n); for (size_t i = 0; i < n; ++i){ values[i] = (TypeParam)(68 + i); }// i TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("dataset_") + std::string(tt.name()) + std::string("_vector"); HDF5Dataset att(&oGroup,name); bRet = att.write_vector(values); ASSERT_TRUE(bRet); typename TestFixture::Vector actual; bRet = att.read_vector(actual); ASSERT_TRUE(bRet); ASSERT_EQ(n, actual.size()); for (size_t i = 0; i < n; ++i){ TypeParam v1 = values[i]; TypeParam v2 = actual[i]; ASSERT_EQ(v1, v2); }// i }/* Vector VectorDataset */ TYPED_TEST(HDF5NumAttributeTest,ArrayDataset) { size_t nr = 5; size_t nc = 3; size_t n = (size_t)(nr * nc); typename TestFixture::Vector values(n); for (size_t i = 0; i < nr; ++i){ for (size_t j = 0; j < nc; ++j){ values[i * nc + j] = (TypeParam)(68 + i * nc + j); } }// i TypeParam v = (TypeParam)68; boost::any vv(v); const std::type_info &tt = vv.type(); HDF5File oFile; bool bRet = oFile.create(this->m_filename); ASSERT_TRUE(bRet); HDF5Group oGroup(&oFile, this->m_grpname, true); std::string name = std::string("dataset_") + std::string(tt.name()) + std::string("_array"); HDF5Dataset att(&oGroup,name); bRet = att.write_array(values,nr,nc); ASSERT_TRUE(bRet); typename TestFixture::Vector actual; size_t nra = 0; size_t nca = 0; bRet = att.read_array(actual,nra,nca); ASSERT_TRUE(bRet); ASSERT_EQ(nr, nra); ASSERT_EQ(nc, nca); ASSERT_EQ(n, actual.size()); for (size_t i = 0; i < nr; ++i){ for (size_t j = 0; j < nc; ++j){ size_t k = i * nc + j; TypeParam v1 = values[k]; TypeParam v2 = actual[k]; ASSERT_EQ(v1, v2); }// j }// i }/* Vector VectorDataset */
template <class T> inline static bool write (const Matrix<T> A) { HDF5File nf (fname, WRITE); nf.Write (A, mname); return true; }
void ImpactFrictionMap::exportConstraintForcesToBinaryFile( const VectorXs& q, const std::vector<std::unique_ptr<Constraint>>& constraints, const MatrixXXsc& contact_bases, const VectorXs& alpha, const VectorXs& beta, const scalar& dt, HDF5File& output_file ) { const unsigned ncons = constraints.size(); assert( ncons == alpha.size() ); assert( std::vector<std::unique_ptr<Constraint>>::size_type( ncons ) == constraints.size() ); assert( alpha.size() == ncons ); const unsigned ambient_space_dims{ static_cast<unsigned>( contact_bases.rows() ) }; assert( ambient_space_dims == 2 || ambient_space_dims == 3 ); assert( beta.size() == ncons * ( ambient_space_dims - 1 ) ); // Write out the number of collisions output_file.writeScalar( "", "collision_count", ncons ); // NB: Prior to version 1.8.7, HDF5 did not support zero sized dimensions. // Some versions of Ubuntu still have old versions of HDF5, so workaround. // TODO: Remove once our servers are updated. if( ncons == 0 ) { return; } // Write out the indices of all bodies involved { // Place all indices into a single matrix for output Matrix2Xic collision_indices{ 2, ncons }; for( unsigned con = 0; con < ncons; ++con ) { std::pair<int,int> indices; assert( constraints[con] != nullptr ); getCollisionIndices( *constraints[con], indices ); collision_indices.col( con ) << indices.first, indices.second; } // Output the indices output_file.writeMatrix( "", "collision_indices", collision_indices ); } // Write out the world space contact points { // Place all contact points into a single matrix for output MatrixXXsc collision_points{ ambient_space_dims, ncons }; for( unsigned con = 0; con < ncons; ++con ) { VectorXs contact_point; assert( constraints[con] != nullptr ); constraints[con]->getWorldSpaceContactPoint( q, contact_point ); assert( contact_point.size() == ambient_space_dims ); collision_points.col( con ) = contact_point; } // Output the points output_file.writeMatrix( "", "collision_points", collision_points ); } // Write out the world space contact normals { // Place all contact normals into a single matrix for output MatrixXXsc collision_normals{ ambient_space_dims, ncons }; for( unsigned con = 0; con < ncons; ++con ) { collision_normals.col( con ) = contact_bases.col( ambient_space_dims * con ); } #ifndef NDEBUG for( unsigned con = 0; con < ncons; ++con ) { assert( fabs( collision_normals.col( con ).norm() - 1.0 ) <= 1.0e-6 ); } #endif // Output the normals output_file.writeMatrix( "", "collision_normals", collision_normals ); } // Write out the collision forces { // Place all contact forces into a single matrix for output MatrixXXsc collision_forces{ ambient_space_dims, ncons }; for( unsigned con = 0; con < ncons; ++con ) { // Contribution from normal collision_forces.col( con ) = alpha( con ) * contact_bases.col( ambient_space_dims * con ); // Contribution from friction for( unsigned friction_sample = 0; friction_sample < ambient_space_dims - 1; ++friction_sample ) { assert( ( ambient_space_dims - 1 ) * con + friction_sample < beta.size() ); const scalar impulse{ beta( ( ambient_space_dims - 1 ) * con + friction_sample ) }; const unsigned column_number{ ambient_space_dims * con + friction_sample + 1 }; assert( column_number < contact_bases.cols() ); assert( fabs( contact_bases.col( ambient_space_dims * con ).dot( contact_bases.col( column_number ) ) ) <= 1.0e-6 ); collision_forces.col( con ) += impulse * contact_bases.col( column_number ); } } // Output the forces output_file.writeMatrix( "", "collision_forces", collision_forces ); } }
HDF5DataSet(const HDF5File &file, const std::string &name, const HDF5DataType &type, const HDF5DataSpace &space) : HDF5ID<HDF5DataSet>(::H5Dcreate2(file.id(), name.c_str(), type.id(), space.id(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) { }
HDF5DataSet(const HDF5File &file, const std::string &name) : HDF5ID<HDF5DataSet>(::H5Dopen2(file.id(), name.c_str(), H5P_DEFAULT)) { }
HDF5Attribute(const HDF5File &file, const std::string &name) : HDF5ID<HDF5Attribute>( ::H5Aopen(file.id(), name.c_str(), H5P_DEFAULT)) { }
bool commit(const HDF5File &file, const std::string &name) const { return ::H5Tcommit2(file.id(), name.c_str(), id_, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); }