예제 #1
0
void hdfutil::WriteString(const H5::CommonFG& group, const std::string & dsname, const std::string & str) {
    hsize_t dims[] = {1};
    H5::DataSpace dataspace(1, dims);       // 1 string
    H5::StrType   strtype  (0, str.size()); // string length
    H5::DataSet dset = group.createDataSet(dsname, strtype, dataspace, CreatePropList());
    dset.write(&str[0], strtype);
}
예제 #2
0
bool ossimHdfGridModel::setGridNodes( H5::H5File* h5File,
                                      const std::string& latDataSetName,
                                      const std::string& lonDataSetName,
                                      ossim_uint32 imageRows,
                                      ossim_uint32 imageCols )

{
    bool status = false;

    if ( h5File )
    {
        H5::DataSet latDataSet = h5File->openDataSet( latDataSetName );
        H5::DataSet lonDataSet = h5File->openDataSet( lonDataSetName );

        try
        {
            status = setGridNodes( &latDataSet, &lonDataSet, imageRows, imageCols );
        }
        catch ( const ossimException& e )
        {
            if ( traceDebug() )
            {
                ossimNotify(ossimNotifyLevel_WARN)
                        << "ossimHdfGridModel::setGridNodes caught exception\n"
                        << e.what() << std::endl;
            }
        }

        latDataSet.close();
        lonDataSet.close();
    }

    return status;
}
예제 #3
0
void addrow( H5::DataSet& ds, const std::vector<double>& rowtowrite )
{
  //Get the space (since it may have grown in length since last time of course )
  H5::DataSpace origspace = ds.getSpace();

  //get the rank, even though I know it is 2
  int rank = origspace.getSimpleExtentNdims();

  //Get the actual dimensions of the ranks.
  hsize_t dims[rank];
  int ndims = origspace.getSimpleExtentDims( dims, NULL);

  //Want to ADD a row, so need to offset at row = nrows, and col = 0;
  hsize_t offset[rank] = { dims[0], 0 }; 
  hsize_t dims_toadd[rank] = { 1, rowtowrite.size() }; //will write 1 row, ncols columns.

  //Compute "new" size (extended by 1 row).
  hsize_t size[rank] = { dims[0]+dims_toadd[0], rowtowrite.size() };

  //Do the extension.
  ds.extend( size );

  //Get the new (extended) space, and select the hyperslab to write the row to.
  origspace = ds.getSpace();
  origspace.selectHyperslab( H5S_SELECT_SET, dims_toadd, offset );

  //Make the "memory" data space?
  H5::DataSpace toaddspace(rank, dims_toadd);

  ds.write(  rowtowrite.data(), H5::PredType::NATIVE_DOUBLE, toaddspace, origspace );

  //Can close toaddspace/origspace with no effect.
  //Can also close/open data set at the beginning of each time with no effect.
}
예제 #4
0
파일: utils.cpp 프로젝트: jbradt/mcopt
arma::Mat<uint16_t> readLUT(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("LUT");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 2);
    hsize_t dims[2] = {1, 1};
    filespace.getSimpleExtentDims(dims);

    H5::DataSpace memspace (ndims, dims);

    arma::Mat<uint16_t> res (dims[0], dims[1]);

    ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();

    // NOTE: Armadillo stores data in column-major order, while HDF5 uses
    // row-major ordering. Above, we read the data directly from HDF5 into
    // the arma matrix, so it was implicitly transposed. The next function
    // fixes this problem.
    arma::inplace_trans(res);
    return res;
}
예제 #5
0
void compare_datasets(H5::DataSet const& ds1, H5::DataSet const& ds2)
{
    std::vector<hsize_t> dims(ds1.getSpace().getSimpleExtentNdims());
    ds1.getSpace().getSimpleExtentDims(&*dims.begin());

    // compare 2 or 3-dimensional list of vectors, e.g., box edges
    BOOST_REQUIRE( dims.size() == 2 );
    BOOST_REQUIRE( dims[1] == 2 || dims[1] == 3 );
    if (dims[1] == 3) {
        std::vector<halmd::fixed_vector<double, 3> > array1, array2;
        h5xx::read_dataset(ds1, array1);
        h5xx::read_dataset(ds2, array2);
        BOOST_CHECK_EQUAL_COLLECTIONS(
            array1.begin(), array1.end()
          , array2.begin(), array2.end()
        );
    }
    else if (dims[1] == 2) {
        std::vector<halmd::fixed_vector<double, 2> > array1, array2;
        h5xx::read_dataset(ds1, array1);
        h5xx::read_dataset(ds2, array2);
        BOOST_CHECK_EQUAL_COLLECTIONS(
            array1.begin(), array1.end()
          , array2.begin(), array2.end()
        );
    }
}
예제 #6
0
H5::DataSet CompartmentReportHDF5::_openDataset( const H5::H5File& file,
                                                 const uint32_t cellID )
{
    std::stringstream cellName;
    cellName << "a" << cellID;
    const std::string datasetName = "/" + cellName.str() + "/" + _reportName +
                                    "/" + dataDatasetName;
    H5::DataSet dataset;
    H5E_BEGIN_TRY
        dataset = file.openDataSet( datasetName );
    H5E_END_TRY
    if( !dataset.getId() )
    {
        LBTHROW(
            std::runtime_error( "ReportReaderHDF5: "
                              "Dataset " + datasetName + " not found "
                              "in file: " + file.getFileName( )));
    }

    if( dataset.getSpace().getSimpleExtentNdims() != 2 )
    {
        LBTHROW(
            std::runtime_error("Compartment_Report_HDF5_File_Reader: "
                             "Error, not 2 dimensional array on " +
                             datasetName));
    }

    return dataset;
}
예제 #7
0
bool CompartmentReportHDF5::writeCompartments( const uint32_t gid,
                                               const uint16_ts& counts )
{
    lunchbox::ScopedWrite mutex( detail::_hdf5Lock );

    try
    {
        const size_t compCount = std::accumulate( counts.begin(),
                                                  counts.end(), 0 );
        LBASSERT( !counts.empty( ));
        LBASSERTINFO( compCount > 0, gid );
        H5::DataSet dataset = _createDataset( gid, compCount );

        const size_t sections = counts.size();
        LBASSERT( sections > 0 );
        dataset.openAttribute( 1 ).write( H5::PredType::NATIVE_INT, &sections );

//        dataset.openAttribute( 2 ).write( H5::PredType::NATIVE_INT, &somas );
//        dataset.openAttribute( 3 ).write( H5::PredType::NATIVE_INT, &axons );
//        dataset.openAttribute( 4 ).write( H5::PredType::NATIVE_INT, &basals );
//        dataset.openAttribute( 5 ).write( H5::PredType::NATIVE_INT, &apics );

        boost::scoped_array< float > mapping( new float[compCount] );
        size_t i = 0;
        for( size_t j = 0; j < counts.size(); ++j )
            for( size_t k = 0; k < counts[j]; ++k )
                mapping[i++] = j;

        dataset.write( mapping.get(), H5::PredType::NATIVE_FLOAT );
        return true;
    }
    CATCH_HDF5ERRORS
    return false;
}
예제 #8
0
void HDF5IO::saveMatrix(const std::string& GroupName, const std::string& Name,
    const ComplexMatrixType& M)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    hsize_t Dims[2] = {hsize_t(M.rows()),hsize_t(M.cols())};
    H5::DataSpace dataspace(2,Dims);
    H5::Group FG = getGroup( GroupName );
    try{
      H5::Exception::dontPrint();
      H5::DataSet dset = FG.openDataSet(Name.c_str());
      // dset.extend( Dims );not working
      dset.write(M.data(), ComplexDataType);
    } catch ( const H5::GroupIException not_found_error ){
      H5::DataSet dset = FG.createDataSet(Name.c_str(), ComplexDataType, dataspace);
      dset.write(M.data(), ComplexDataType);
    } catch ( const H5::DataSetIException error ){
      error.printError();
      RUNTIME_ERROR("HDF5IO::saveComplexMatrix at ");
    }
    FG.close();
  } catch( const H5::Exception error ){
    error.printError();
    RUNTIME_ERROR("HDF5IO::saveComplexMatrix at ");
  }
}
예제 #9
0
void writeArray(H5::Group &group, const std::string &name,
                const std::string &value) {
  StrType dataType(0, value.length() + 1);
  DataSpace dataSpace = getDataSpace(1);
  H5::DataSet data = group.createDataSet(name, dataType, dataSpace);
  data.write(value, dataType);
}
예제 #10
0
void HDF5IO::saveStdVector(const std::string& GroupName, const std::string& Name,
    const std::vector<std::complex<double> >& V)
{
  try{
    H5::CompType ComplexDataType = openCompType("complex");
    hsize_t Dim[1] = {hsize_t(V.size())};
    H5::DataSpace dataspace(1,Dim);
    H5::Group FG = getGroup( GroupName.c_str() );
    try{
      H5::Exception::dontPrint();
      H5::DataSet dataset = FG.openDataSet(Name.c_str());
      dataset.write(V.data(), ComplexDataType, dataspace);
    } catch( const H5::GroupIException not_found_error ){
      H5::DataSet dataset = FG.createDataSet(Name.c_str(), ComplexDataType,
        dataspace);
      dataset.write(V.data(), ComplexDataType);
    } catch( const H5::FileIException error){
      error.printError();
    } catch( const H5::DataSetIException error){
      error.printError();
    }
    FG.close();
  } catch( const H5::Exception err ){
    err.printError();
    RUNTIME_ERROR("HDF5IO::saveComplexStdVector. ");
  }
}
예제 #11
0
void pyne::Material::_load_comp_protocol0(H5::H5File * db, std::string datapath, int row)
{
  H5::Group matgroup = (*db).openGroup(datapath);
  H5::DataSet nucset;

  double nucvalue;
  hsize_t matG = matgroup.getNumObjs();

  // Iterate over datasets in the group.
  for (int matg = 0; matg < matG; matg++)
  {
    std::string nuckey = matgroup.getObjnameByIdx(matg);
    nucset = matgroup.openDataSet(nuckey);
    nucvalue = h5wrap::get_array_index<double>(&nucset, row);

    if (nuckey == "Mass" || nuckey == "MASS" || nuckey == "mass")
      mass = nucvalue;
    else
      comp[pyne::nucname::zzaaam(nuckey)] = nucvalue;

    nucset.close();
  };

  // Set meta data
  name = datapath.substr(datapath.rfind("/")+1, datapath.length());
  atoms_per_mol = -1.0;
};
예제 #12
0
	/**
	 * @param attribute_id
	 * @return String representing the name of the attribute specified by attribute_id
	 */
	std::string HDF5FileReader::getVariableAttributeName(long attribute_id)
	{
		H5::DataSet dataset = this->variableGroup->openDataSet(this->variableGroup->getObjnameByIdx(0));
		H5::Attribute attribute = dataset.openAttribute(attribute_id);
		std::string buffer = attribute.getName();
		cout << "Attribute Name: '" << buffer << "'" << endl;
		return buffer;
	}
예제 #13
0
	/**
	 * Gets the number of variable attributes.
	 * @return The number of variable attributes in the opened file.
	 */
	int HDF5FileReader::getNumberOfVariableAttributes()
	{
		int numVAttributes;

		//get the first variable and see how many attributes. They should all be the same.
		H5::DataSet dataset = this->variableGroup->openDataSet(this->variableGroup->getObjnameByIdx(0));
		numVAttributes = dataset.getNumAttrs();
		return numVAttributes;
	}
예제 #14
0
	/**
	 * @param variable
	 * @return
	 */
	long HDF5FileReader::getNumberOfRecords(const std::string& variable)
	{
		//std::cout << "reading " << variable << std::endl;
		//get variable number
		H5::DataSet dataset = this->variableGroup->openDataSet(variable);

		H5::DataSpace dataspace = dataset.getSpace();
		hsize_t count[1];
//		int ndims = dataspace.getSimpleExtentDims(count, NULL);
		return (long)count[0];
	}
예제 #15
0
	/**
	 * @brief Returns a pointer to a std::vector<float> containing the values of the selected variable
	 *
	 * This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 *
	 * @param variable
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(const std::string& variable)
	{
		std::vector<float>* variableData = new std::vector<float>();

		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//std::cout << "variableNum for " << variable << ": " << variableNum << std::endl;
			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1];
			hsize_t offset[1] = {0};
//			int ndims = dataspace.getSimpleExtentDims(count, NULL);

			//std::cout << "count[0]: " << count[0] << std::endl;
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			H5::DataSpace memspace( rank, count);
			memspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);
			//std::cout << "after read" << std::endl;

			//add data to vector type, and delete original array
			variableData->reserve(count[0]);
			for (int i = 0; i < count[0]; i++)
			{
				variableData->push_back(buffer[i]);
			}
			//std::cout << "after adding to variableData vector" << std::endl;

			delete[] buffer;
			delete dataset;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;

		}

		return variableData;
	}
예제 #16
0
int HDF5IO::loadInt(const std::string& GroupName, const std::string& Name)
{
    try{
      H5::Group FG = getGroup( GroupName );
      H5::DataSet DataSet = FG.openDataSet( Name.c_str());
      int x;
      DataSet.read(&x,H5::PredType::NATIVE_INT);
      FG.close();
      return x;
    }catch( H5::GroupIException not_found_error ){
      RUNTIME_ERROR("No dataset found in loadInt. ");
    }
}
예제 #17
0
파일: HDF5.hpp 프로젝트: rseal/HDF5R
   const std::vector<hsize_t> TableDims(){

      if(flags_ != hdf5::READ) 
         throw std::runtime_error("TableDims() is only valid in READ mode");

      H5::DataSet dSet = file_->openDataSet("T00000000");
      H5::DataSpace dSpace = dSet.getSpace();

      std::vector<hsize_t> dims(dSpace.getSimpleExtentNdims());
      dSpace.getSimpleExtentDims(&dims[0]);

      return dims;
   }
예제 #18
0
void HDF5IO::saveNumber(const std::string& GroupName, const std::string& Name,
    unsigned long x)
{
    H5::Group FG = getGroup( GroupName );
    try{
      H5::Exception::dontPrint();
      H5::DataSet dataset = FG.openDataSet( Name.c_str() );
      dataset.write(&x, H5::PredType::NATIVE_ULONG);
    } catch ( const H5::GroupIException not_found_error ){
      H5::DataSet dataset = FG.createDataSet( Name.c_str(), H5::PredType::NATIVE_ULONG, H5::DataSpace());
      dataset.write(&x, H5::PredType::NATIVE_ULONG);
    }
    FG.close();
}
예제 #19
0
	/**
	 * @return
	 */
	std::vector<std::string> HDF5FileReader::getVariableAttributeNames()
	{
		std::vector<std::string> attributeNames;
		int numAttributes = this->getNumberOfVariableAttributes();
		H5::DataSet dataset = this->variableGroup->openDataSet(this->variableGroup->getObjnameByIdx(0));
		for (int i = 0; i < numAttributes; i++)
		{
			std::string value = "";
			H5::Attribute attribute = dataset.openAttribute(i);
			attributeNames.push_back(attribute.getName());

		}
		return attributeNames;
	}
예제 #20
0
ossimRefPtr<ossimImageGeometry> ossimH5Reader::getInternalImageGeometry()
{
   ossimRefPtr<ossimImageGeometry> geom = new ossimImageGeometry();

   if ( m_projection.valid() )
   {
      // Stored projection, currently shared by all entries.
      geom->setProjection( m_projection.get() );
   }
   else if ( isOpen() )
   {
      // Find the "Latitude" and "Longitude" datasets if present.
      std::string latName;
      std::string lonName;
      if ( getLatLonDatasetNames(  m_h5File, latName, lonName ) )
      {
         H5::DataSet latDataSet = m_h5File->openDataSet( latName );
         H5::DataSet lonDataSet = m_h5File->openDataSet( lonName );

         // Get the valid rectangle of the dataset.
         ossimIrect validRect = m_entries[m_currentEntry].getValidImageRect();

         // Try for a coarse projection first:
         ossimRefPtr<ossimProjection> proj =
            processCoarseGridProjection( latDataSet,
                                         lonDataSet,
                                         validRect );
         
         if ( proj.valid() == false )
         {
            ossimIrect rect;
            proj = ossim_hdf5::getBilinearProjection( latDataSet, lonDataSet, validRect );
         }
         
         if ( proj.valid() )
         {
            // Store it for next time:
            m_projection = proj;
            
            // Set the geometry projection
            geom->setProjection( proj.get() ); 
         }
               
         latDataSet.close();
         lonDataSet.close();
      }
   }
 
   return geom;
}
예제 #21
0
size_t HDF5IO::loadUlong(const std::string& GroupName, const std::string& Name)
{
  try{
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet( Name.c_str() );
    size_t x;
    DataSet.read(&x, H5::PredType::NATIVE_ULONG);
    return x;
    FG.close();
  }catch( H5::GroupIException not_found_error ){
    INFO("In Group - " << GroupName << ", and Name is " << Name);
    RUNTIME_ERROR("No dataset found in loadUlong. ");
  }
}
예제 #22
0
bool ossim_hdf5::getDatasetAttributeValue( H5::H5File* file,
                                           const std::string& objectName,
                                           const std::string& key,
                                           std::string& value )
{
   static const char MODULE[] = "ossim_hdf5::getDatasetAttributeValue";

   bool result = false;
   
   if (  file )
   {
      try // HDF5 library throws exceptions so wrap with try{}catch...
      {
         // Open the dataset:
         H5::DataSet dataset = file->openDataSet( objectName );
         
         // Lookw for key:
         H5::Attribute attr = dataset.openAttribute( key );

         std::string  name = attr.getName();
         H5::DataType type = attr.getDataType();
         H5T_class_t  typeClass = attr.getTypeClass();
         
         if ( ( name == key ) && ( typeClass == H5T_STRING ) )
         {
            attr.read( type, value );
            result = true;
         }

         // Cleanup:
         attr.close();
         dataset.close();
      }
      catch( const H5::Exception& e )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught exception!\n"
            << e.getDetailMsg() << std::endl;
      }
      catch( ... )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught unknown exception!" << std::endl;
      }      
   }

   return result;
   
} // End: ossim_hdf5::getDatasetAttributeValue
예제 #23
0
ComplexType HDF5IO::loadComplex(const std::string& GroupName, const std::string& Name)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    ComplexType C;
    RealType RealImag[2];
    DataSet.read(RealImag, ComplexDataType);
    FG.close();
    return ComplexType(RealImag[0],RealImag[1]);
  }catch( H5::GroupIException not_found_error ){
    RUNTIME_ERROR("No dataset found in loadComplex. ");
  }
}
예제 #24
0
NDArray<T, Nd> NDArray<T,Nd>::ReadFromH5(const H5::DataSet& h5Dset) {
  H5::DataSpace dspace = h5Dset.getSpace(); 
  int ndim = dspace.getSimpleExtentNdims(); 
  if (ndim>Nd) 
      throw std::range_error("Too many dimensions in H5 dataset for NDArray");
  hsize_t dimSize[ndim];
  dspace.getSimpleExtentDims(dimSize); 
  std::array<std::size_t, Nd> dimSizeArr;
  for (int i=0; i<Nd; ++i) dimSizeArr[i] = dimSize[i];
  NDArray<T, Nd> arr(dimSizeArr);
  // Read in data here
  H5::DataType h5DType = GetH5DataType<T>();
  h5Dset.read(arr.mData, h5DType);
  return arr;
}
예제 #25
0
void ossimH5Reader::addImageDatasetEntries(const std::vector<std::string>& names)
{
   if ( m_h5File && names.size() )
   {
      std::vector<std::string>::const_iterator i = names.begin();
      while ( i != names.end() )
      {
         if ( ossim_hdf5::isExcludedDataset( *i ) == false )
         {
            H5::DataSet dataset = m_h5File->openDataSet( *i );

            // Get the class of the datatype that is used by the dataset.
            H5T_class_t type_class = dataset.getTypeClass();
            
            if ( ( type_class == H5T_INTEGER ) || ( type_class == H5T_FLOAT ) )
            {
               // Get the extents:
               std::vector<ossim_uint32> extents;
               ossim_hdf5::getExtents( &dataset, extents ); 

               if ( extents.size() >= 2 )
               {
                  if ( ( extents[0] > 1 ) && ( extents[1] > 1 ) )
                  {
                     ossimH5ImageDataset hids;
                     hids.initialize( dataset, *i );
                     m_entries.push_back( hids );
                  }     
               }
            }

            dataset.close();
         }
            
         ++i;
      }
   }
   
#if 0 /* Please leave for debug. (drb) */
   std::vector<ossimH5ImageDataset>::const_iterator i = m_entries.begin();
   while ( i != m_entries.end() )
   {
      std::cout << (*i) << endl;
      ++i;
   }
#endif
      
} // End: ossimH5Reader::addImageDatasetEntries
예제 #26
0
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name,
    std::vector<RealType>& V)
{
  try{
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 1)
      throw(H5::DataSpaceIException("HDF5IO::loadRealVector()","Unexpected multidimentional dataspace."));
    V.resize(DataSpace.getSimpleExtentNpoints());
    DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadRealStdVector");
  }
}
예제 #27
0
파일: HDF5.hpp 프로젝트: rseal/HDF5R
      void ReadTable(const int& tableNum, void* buf, 
            const H5::DataType& dType){

         std::string tNum = Num2Table(tableNum);
         dSet_ = file_->openDataSet(tNum);
         dSet_.read( buf, dType);
      }
예제 #28
0
    inline void
    read_values(H5::DataSet& dataset, H5::DataSpace& data_space,
        dimension const& dimx, dimension const& dimy, dimension const& dimz,
        double* values)
    {
        using namespace H5;

        // Define the hyperslab for file based data.
        hsize_t data_offset[dimension::dim] = {
            dimx.offset_, dimy.offset_, dimz.offset_
        };
        hsize_t data_count[dimension::dim] = {
            dimx.count_, dimy.count_, dimz.count_
        };
        data_space.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);

        // Memory dataspace.
        DataSpace mem_space (dimension::dim, data_count);

        // Define the hyperslab for data in memory.
        hsize_t mem_offset[dimension::dim] = { 0, 0, 0 };
        mem_space.selectHyperslab(H5S_SELECT_SET, data_count, mem_offset);

        // Read data to memory.
        dataset.read(values, PredType::NATIVE_DOUBLE, mem_space, data_space);
    }
예제 #29
0
파일: chain.cpp 프로젝트: gregreen/bayestar
void TImgWriteBuffer::write(const std::string& fname, const std::string& group, const std::string& img) {
	H5::H5File* h5file = H5Utils::openFile(fname);
	H5::Group* h5group = H5Utils::openGroup(h5file, group);
	
	// Dataset properties: optimized for reading/writing entire buffer at once
	int rank = 3;
	hsize_t dim[3] = {length_, rect_.N_bins[0], rect_.N_bins[1]};
	hsize_t chunk_dim[3] = {length_, rect_.N_bins[0], rect_.N_bins[1]};
	if(length_ > 1000) {
		float div = ceil((float)length_ / 1000.);
		chunk_dim[0] = (int)ceil(length_ / div);
		std::cerr << "! Changing chunk length to " << chunk_dim[0] << " stars." << std::endl;
	}
	H5::DataSpace dspace(rank, &(dim[0]));
	H5::DSetCreatPropList plist;
	plist.setDeflate(9);	// gzip compression level
	plist.setChunk(rank, &(chunk_dim[0]));
	float fillvalue = 0;
	plist.setFillValue(H5::PredType::NATIVE_FLOAT, &fillvalue);
	
	H5::DataSet* dataset = new H5::DataSet(h5group->createDataSet(img, H5::PredType::NATIVE_FLOAT, dspace, plist));
	dataset->write(buf, H5::PredType::NATIVE_FLOAT);
	
	/*
	 *  Attributes
	 */
	
	hsize_t att_dim = 2;
	H5::DataSpace att_dspace(1, &att_dim);
	
	H5::PredType att_dtype = H5::PredType::NATIVE_UINT32;
	H5::Attribute att_N = dataset->createAttribute("nPix", att_dtype, att_dspace);
	att_N.write(att_dtype, &(rect_.N_bins));
	
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	H5::Attribute att_min = dataset->createAttribute("min", att_dtype, att_dspace);
	att_min.write(att_dtype, &(rect_.min));
	
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	H5::Attribute att_max = dataset->createAttribute("max", att_dtype, att_dspace);
	att_max.write(att_dtype, &(rect_.max));
	
	delete dataset;
	delete h5group;
	delete h5file;
}
예제 #30
0
void CompartmentReportHDF5::_createMappingAttributes( H5::DataSet& dataset )
{
//    const std::string type =
//                         boost::lexical_cast< std::string >( _spec.type( ));
    const std::string type = "1";   // COMPARTMENT_REPORT
    detail::addStringAttribute( dataset, mappingAttributes[0], type );
    dataset.createAttribute( mappingAttributes[1], H5::PredType::NATIVE_INT,
                             H5S_SCALAR );
    dataset.createAttribute( mappingAttributes[2], H5::PredType::NATIVE_INT,
                             H5S_SCALAR );
    dataset.createAttribute( mappingAttributes[3], H5::PredType::NATIVE_INT,
                             H5S_SCALAR );
    dataset.createAttribute( mappingAttributes[4], H5::PredType::NATIVE_INT,
                             H5S_SCALAR );
    dataset.createAttribute( mappingAttributes[5], H5::PredType::NATIVE_INT,
                             H5S_SCALAR );
}