Example #1
0
//'@title Function for dummy read
//' 
//'@param chunkName the name of the chunk to be read back
//'@param filePath the path to the h5 file
//'@return int 0
// [[Rcpp::export]]
int h5DummyRead(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  
  dataset.close();
  file->close();
  
  return 0;
}
Example #2
0
    void extract_data(std::string const& datafilename, char const* name,
        double* values, dimension const& dimx, dimension const& dimy,
        dimension const& dimz)
    {
        try {
            using namespace H5;

            // Turn off the auto-printing when failure occurs
            Exception::dontPrint();

            H5File file(datafilename, H5F_ACC_RDONLY);
            DataSet dataset = file.openDataSet(name);
            DataSpace dataspace = dataset.getSpace();

            // Verify number of dimensions.
            HPX_ASSERT(dataspace.getSimpleExtentNdims() == dimension::dim);

            // Read the data subset.
            detail::read_values(dataset, dataspace, dimx, dimy, dimz, values);
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "sheneos::extract_data",
                e.getDetailMsg());
        }
    }
Example #3
0
    void extract_data(std::string const& datafilename, char const* name,
        double* values, hsize_t offset, hsize_t count)
    {
        try {
            using namespace H5;

            // Turn off auto-printing on failure.
            Exception::dontPrint();

            // Try to open the file.
            H5File file(datafilename, H5F_ACC_RDONLY);

            // Try to open the specified dataset.
            DataSet dataset = file.openDataSet(name);
            DataSpace dataspace = dataset.getSpace();

            // Verify number of dimensions.
            HPX_ASSERT(dataspace.getSimpleExtentNdims() == 1);

            // Read the data subset.
            detail::read_values(dataset, dataspace, offset, count, values);
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "sheneos::extract_data",
                e.getDetailMsg());
        }
    }
Example #4
0
    void extract_data(std::string const& datafilename, double* values,
      std::size_t offset, std::size_t count)
    {
        try {
            using namespace H5;

            // Turn off the auto-printing when failure occurs
            Exception::dontPrint();

            H5File file(datafilename, H5F_ACC_RDONLY);
            DataSet dataset = file.openDataSet("sine"); // name of data to read
            DataSpace dataspace = dataset.getSpace();

            // number of dimensions
            int numdims = dataspace.getSimpleExtentNdims();

            if (numdims != 1)
            {
                HPX_THROW_EXCEPTION(hpx::no_success, "extract_data",
                    "number of dimensions was not 1");
            }

            // Get the dimension size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);

            read_values(dataset, dataspace, offset, count, values);
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "extract_data",
                e.getDetailMsg());
        }
    }
Example #5
0
// Function to return a selected part of a data frame as a list
List ch5ChunkSel(string chunkName, CharacterVector selCols, string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  // converting the R object to a numeric matrix
  NumericMatrix M = as<NumericMatrix>(data);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  
  // Create the output
  List DF;
  string colName;
  string colClass;
  NumericVector vec;
  CharacterVector levels;
  int n = selCols.size();
  IntegerVector sel(n);
  int selN;
  // First we need to find which of the columns has been selected
  sel = match(selCols, colNames);
  
  for(int i = 0; i < n; i++)
  {
    colName = selCols[i];
    selN = sel[i] - 1;
    colClass = colClasses[selN];
    if(colClass != "factor")
    {
      DF[colName] = M(_, selN); 
    }else{
      vec = M(_, selN);
      levels = (CharacterVector)ch5ReadFactor(colName, filePath);
      DF[colName] = cCreateFactor(vec, levels);
    }
    
  }
  
  dataset.close();
  file->close();
  
  return DF;
}
Example #6
0
 index_system (DataSpace const & ds, bool is_complex) { 
  int rf = ds.getSimpleExtentNdims();
  if ( rf != rank_full  + (is_complex ? 1 : 0) ) TRIQS_RUNTIME_ERROR <<  "H5 : dimension error";
  //int ndims = ds.getSimpleExtentDims( &lens_[0], NULL);
  ds.getSimpleExtentDims( &lens_[0], NULL);
  for (size_t i =0; i<rank; ++i) { dims[i] = lens_[i]; stri_[i] = 1; off_[i]= 0; }
  total_lens_=dims;
  mydomain = domain_type (dims);
 }
Example #7
0
//'@title Legacy function to return a data frame chunk as a list
//'
//'@description Experimental function not intended for use at all
//'
//'@param chunkName the name of the chunk to be read
//'@param filePath the path to the h5 file
//'@return List of the data frame chunk
// [[Rcpp::export]]
SEXP h5ReadDoubleMat3(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  //double (*buf)[dims[1]]*[dims[0]] = malloc(dims[1]]*[dims[0] * sizeof *buf);
  //buf[dims[1]][dims[0]] = 0.0;
  double **buf = (double**) calloc (dims[1]*dims[0], sizeof(double));
  buf[dims[1]][dims[0]] = 0.0;
  //double buf[dims[1]][dims[0]];
  dataset.read(buf, PredType::NATIVE_DOUBLE, dataspace);
  // Attempt tp append the contents to a list
  List out;
  NumericVector vec(dims[0]);
  NumericMatrix M(dims[0], dims[1]);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  string colName;
  for(int i = 0; i < dims[1]; i++)
  {
    NumericVector vec(dims[0]);
    for(int j = 0; j < dims[0]; j++)
    {
      M(j,i) = buf[i][j];
      vec(j) = buf[i][j];
    }
    colName = colNames[i];
    if(colClasses[i] == "factor")
    {
      CharacterVector levels;
      levels = h5ReadFactor(colName, filePath);
      IntegerVector fact(vec.size());
      fact = cCreateFactor(vec, levels);
      out[colName] = fact;
    }else{
      out[colName] = vec;
    }
    
  }
  free(buf);
  
  dataset.close(); //nn
  file->close();
  
  return wrap(out);
}
void read_feature_size(H5File h5f, Size &size_out, const char *name)
{
    DataSet dataset = h5f.openDataSet(name);
    DataSpace dspace = dataset.getSpace();
    assert (dspace.getSimpleExtentNdims() == 2);
    hsize_t dims[2];
    dspace.getSimpleExtentDims(dims);
    size_out.height = dims[0];
    size_out.width = dims[1];
}
Example #9
0
//'@title Legacy function to return a data frame chunk as a list
//'
//'@description Experimental function not intended for use at all
//'
//'@param chunkName the name of the chunk to be read
//'@param filePath the path to the h5 file
//'@return List of the data frame chunk
// [[Rcpp::export]]
SEXP h5ReadDoubleMat2(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  // converting the R object to a numeric matrix
  NumericMatrix M = as<NumericMatrix>(data);
  List out;
  NumericVector vec(dims[0]);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  string colName;
  for(int i = 0; i < dims[1]; i++)
  {
    NumericVector vec(dims[0]);
    for(int j = 0; j < dims[0]; j++)
    {
      vec(j) = M(j,i);
    }
    colName = colNames[i];
    if(colClasses[i] == "factor")
    {
      CharacterVector levels;
      levels = ch5ReadFactor(colName, filePath);
      IntegerVector fact(vec.size());
      fact = cCreateFactor(vec, levels);
      out[colName] = fact;
    }else{
      out[colName] = vec;
    }
    
  }
  dataset.close(); //nn
  file->close();
  // Returning the data
  return wrap(out);
}
 h5_read (h5::group_or_file f, std::string const & name,  S & A) {
  if (!f.exists(name))  TRIQS_RUNTIME_ERROR << "no such dataset : "<<name <<" in file ";
  try {
   DataSet ds = f->openDataSet( name.c_str() );
   DataSpace dataspace = ds.getSpace();
   int rank = dataspace.getSimpleExtentNdims();
   if (rank != 0) TRIQS_RUNTIME_ERROR << "triqs::array::h5::read. Rank mismatch : expecting a scalar (rank =0)"
    <<" while the array stored in the hdf5 file has rank = "<<rank;
   ds.read( (void *)(&A), data_type_mem_scalar(A), DataSpace() , DataSpace() );
  }
  TRIQS_ARRAYS_H5_CATCH_EXCEPTION;
 }
 /**
  * \brief Read a string from an hdf5 file
  * \param f The h5 file or group of type H5::H5File or H5::Group
  * \param name The name of the hdf5 array in the file/group where the stack will be stored
  * \param value The string to fill
  * \exception The HDF5 exceptions will be caught and rethrown as TRIQS_RUNTIME_ERROR (with a full stackstrace, cf triqs doc).
  */
 inline void h5_read (h5::group_or_file f, std::string const & name, std::string & value) {
  if (!f.exists(name))  TRIQS_RUNTIME_ERROR << "no such dataset : "<<name <<" in file ";
  try {
   DataSet ds = f->openDataSet( name.c_str() );
   DataSpace dataspace = ds.getSpace();
   int rank = dataspace.getSimpleExtentNdims();
   if (rank != 0) TRIQS_RUNTIME_ERROR << "Reading a string and got rank !=0";
   size_t size = ds.getStorageSize();
   StrType strdatatype(PredType::C_S1, size);
   std::vector<char> buf(size+1, 0x00);
   ds.read( (void *)(&buf[0]), strdatatype, DataSpace(), DataSpace() );
   value = ""; value.append( &(buf.front()) );
  }
  TRIQS_ARRAYS_H5_CATCH_EXCEPTION;
 }
Example #12
0
void Weather::load(const std::string& name)
{
	std::cout << "Loading " << name << std::endl;
	H5File file(name, H5F_ACC_RDONLY);
	DataSet dataset = file.openDataSet("weather_data");
	std::cout << "Number of attributes: " << dataset.getNumAttrs() << std::endl;
	dataset.openAttribute("resolution").read(PredType::NATIVE_UINT, &resolution);
	//float bounds[4];
	dataset.openAttribute("bounds").read(PredType::NATIVE_DOUBLE, &bounds);

	std::cout << "Resolution: " << resolution << std::endl;
	std::cout << "Bounds: " << bounds.minx << "," << bounds.miny << "," << bounds.maxx << "," << bounds.maxy << std::endl;
	DataSpace ds = dataset.getSpace();
	int dim = ds.getSimpleExtentNdims();
	std::cout << "Dimensions: " << dim << std::endl;
	hsize_t dims_out[3];
	ds.getSimpleExtentDims(dims_out, NULL);
	std::cout << "Size: " << dims_out[0] << "," << dims_out[1] << "," << dims_out[2] << std::endl;
	dimX = dims_out[1];
	dimY = dims_out[2];
	numScenarios = dims_out[0];
	std::cout << "Size: " << dims_out[0] * dims_out[1] * dims_out[2] << std::endl;
	std::cout << "Dataset typeclass: " << dataset.getTypeClass() << "," << H5T_COMPOUND << std::endl;
	std::cout << "Dataset size: " << dataset.getInMemDataSize() << "," << H5T_COMPOUND << std::endl;


	CompType mtype2(sizeof(WeatherData));
	mtype2.insertMember("wind_xcomp", HOFFSET(WeatherData, wind_xcomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("wind_ycomp", HOFFSET(WeatherData, wind_ycomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("hs", HOFFSET(WeatherData, hs), PredType::NATIVE_FLOAT);
	mtype2.insertMember("light", HOFFSET(WeatherData, light), PredType::NATIVE_CHAR);
	//WeatherData wd[106938134];

	try {
		weatherData = (WeatherData *)malloc(ds.getSimpleExtentNpoints() * sizeof(WeatherData));
		dataset.read(weatherData, mtype2);
		std::cout << "Finished" << std::endl;


		//size_t ix = i1*dims_out[1] * dims_out[2] + i2 * dims_out[2] + i3;
		//printf("%f %f %f %d\n", wd[ix].wind_xcomp, wd[ix].wind_ycomp, wd[ix].hs, wd[ix].light);
	}
	catch (int e)
	{
		std::cout << "An exception occurred. Exception Nr. " << e << '\n';
	}
}
Example #13
0
    std::uint64_t extract_data_range(std::string const& datafilename,
        char const* name, double& minval, double& maxval, double& delta,
        std::size_t start, std::size_t end)
    {
        try {
            using namespace H5;

            // Turn off auto-printing on failure.
            Exception::dontPrint();

            // Try to open the file.
            H5File file(datafilename, H5F_ACC_RDONLY);

            // Try to open the specified dataset.
            DataSet dataset = file.openDataSet(name);
            DataSpace dataspace = dataset.getSpace();

            // Verify number of dimensions
            HPX_ASSERT(dataspace.getSimpleExtentNdims() == 1);

            // Get the size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);
            if (end == std::size_t(-1))
                end = dims[0];

            // Read the minimum and maximum values.
            detail::read_values(dataset, dataspace, start, 1, &minval);
            detail::read_values(dataset, dataspace, end - 1, 1, &maxval);

            // Read the delta value.
            detail::read_values(dataset, dataspace, start + 1, 1, &delta);
            delta -= minval;

            // Return size of dataset.
            return dims[0];
        }
        catch (H5::Exception e) {
            std::string msg = e.getDetailMsg().c_str();
            HPX_THROW_EXCEPTION(hpx::no_success,
                "sheneos::extract_data_range", msg);
        }

        // This return statement keeps the compiler from whining.
        return 0;
    }
void HDF5HandlerBase::save(const std::vector<int> &dataPoints) {

    // Return if no data to add
    if (dataPoints.size() < 1)
        return;

    // dataset.write needs not const value of data
    int *data = const_cast<int*>(&dataPoints[0]);

    // Determine value of
    hsize_t dimsext[1];
    dimsext[0] = dataPoints.size();

    hsize_t size[1];
    hsize_t offset[1];


    try {
        DataSpace filespace = dataset.getSpace();
        int ndims = filespace.getSimpleExtentNdims();
        hsize_t dims[ndims];
        filespace.getSimpleExtentDims(dims);

        size[0] = dims[0] + dimsext[0];
        dataset.extend(size);

        offset[0] = dims[0];
        filespace = dataset.getSpace();
        filespace.selectHyperslab(H5S_SELECT_SET, dimsext, offset);

        DataSpace memspace = DataSpace(1, dimsext, NULL);

        dataset.write(data, PredType::NATIVE_INT, memspace, filespace);

        filespace.close();
        memspace.close();

    } catch (Exception &error) {
        throw;
    }


}
Example #15
0
    boost::uint64_t extract_data_range (std::string const& datafilename,
        double& minval, double& maxval, double& delta,
        std::size_t start, std::size_t end)
    {
        try {
            using namespace H5;

            // Turn off the auto-printing when failure occurs
            Exception::dontPrint();

            H5File file(datafilename, H5F_ACC_RDONLY);
            DataSet dataset = file.openDataSet("x");    // name of data to read
            DataSpace dataspace = dataset.getSpace();

            // number of dimensions
            int numdims = dataspace.getSimpleExtentNdims();

            if (numdims != 1)
            {
                HPX_THROW_EXCEPTION(hpx::no_success, "extract_data_range",
                    "number of dimensions was not 1");
            }

            // Get the dimension size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);
            if (end == std::size_t(-1))
                end = dims[0];

            read_values(dataset, dataspace, start, 1, &minval);
            read_values(dataset, dataspace, end-1, 1, &maxval);
            read_values(dataset, dataspace, start+1, 1, &delta);

            delta -= minval;
            return dims[0];     // return size of dataset
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "extract_data_range",
                e.getDetailMsg());
        }
        return 0;   // keep compiler happy
    }
Example #16
0
// [[Rcpp::export]]
NumericVector GetAttributeDimensions(XPtr<Attribute> attribute) {
	try {
	  DataSpace dataspace = attribute->getSpace();
	  int ndim = dataspace.getSimpleExtentNdims();

	  NumericVector out;
	  if(ndim > 0) {
		vector<hsize_t> dims_out(ndim);
		dataspace.getSimpleExtentDims(&dims_out[0], NULL);
		out = NumericVector(dims_out.begin(), dims_out.end());
	  } else { // Assume scalar Attribute
		out = NumericVector(1);
		out[0] = 1;
	  }
	  return out;
	} catch (Exception& error) {
		 string msg = error.getDetailMsg() + " in " + error.getFuncName();
		 throw Rcpp::exception(msg.c_str());
	}
}
Example #17
0
void read_hdf5_image(H5File h5f, Mat &image_out, const char *name, const Rect &roi=Rect(0,0,0,0))
{
    DataSet dataset = h5f.openDataSet(name);
    DataSpace dspace = dataset.getSpace();
    assert (dspace.getSimpleExtentNdims() == 2);
    hsize_t dims[2];
    dspace.getSimpleExtentDims(dims);
    if ((roi.width == 0) && (roi.height == 0)) {
        image_out.create(dims[0], dims[1], CV_32F);
        dspace.selectAll();
    } else {
        image_out.create(roi.height, roi.width, CV_32F);
        hsize_t _offset[2], _size[2];
        _offset[0] = roi.y; _offset[1] = roi.x;
        _size[0] = roi.height; _size[1] = roi.width;
        dspace.selectHyperslab(H5S_SELECT_SET, _size, _offset);
    }
    
    DataSpace imspace;
    float *imdata;
    if (image_out.isContinuous()) {
        dims[0] = image_out.size().height; dims[1] = image_out.size().width;
        imspace = DataSpace(2, dims);
        imspace.selectAll();
        imdata = image_out.ptr<float>();
    } else {
        // we are working with an ROI
        assert (image_out.isSubmatrix());
        Size parent_size; Point parent_ofs;
        image_out.locateROI(parent_size, parent_ofs);
        hsize_t parent_count[2];
        parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
        imspace.setExtentSimple(2, parent_count);
        hsize_t im_offset[2], im_size[2];
        im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
        im_size[0] = image_out.size().height; im_size[1] = image_out.size().width;
        imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
        imdata = image_out.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
    }
    dataset.read(imdata, PredType::NATIVE_FLOAT, imspace, dspace);
}
Example #18
0
		template<class T> Matrix<T>
		Read (const std::string& uri) const {

			T         t;
			DataSet   dataset = m_file.openDataSet(uri);
			DataSpace space   = dataset.getSpace();
			std::vector<hsize_t> dims (space.getSimpleExtentNdims());
			size_t    ndim    = space.getSimpleExtentDims(&dims[0], NULL);

			if (this->m_verb) {
				printf ("Reading dataset %s ... ", uri.c_str());
				fflush(stdout);
			}

			if (is_complex(t)) {
				dims.pop_back();
				--ndim;
			}

			std::vector<size_t> mdims (ndim,1);

			for (size_t i = 0; i < ndim; ++i)
				mdims[i] = dims[ndim-i-1];

			PredType* type = HDF5Traits<T>::PType();

			Matrix<T> M (mdims);
			dataset.read (&M[0], *type);


			if (this->m_verb)
				printf ("O(%s) done\n", DimsToCString(M));

			space.close();
			dataset.close();

			return M;

		}
 void read_array (group_or_file f, std::string const & name,  ArrayType & A, bool C_reorder = true) {
  typedef typename ArrayType::value_type V;
  if (!h5::exists(f, name))  TRIQS_RUNTIME_ERROR << "no such dataset : "<<name <<" in file ";
  try {
   DataSet ds = f->openDataSet( name.c_str() );
   DataSpace dataspace = ds.getSpace();
   static const unsigned int Rank =  ArrayType::rank + (boost::is_complex<typename ArrayType::value_type>::value ? 1 : 0);
   int rank = dataspace.getSimpleExtentNdims();
   if (rank != Rank) TRIQS_RUNTIME_ERROR << "triqs::array::h5::read. Rank mismatch : the array has rank = "
    <<Rank<<" while the array stored in the hdf5 file has rank = "<<rank;
   mini_vector<hsize_t,Rank> dims_out;
   //int ndims = dataspace.getSimpleExtentDims( &dims_out[0], NULL);
   dataspace.getSimpleExtentDims( &dims_out[0], NULL);
   mini_vector<size_t,ArrayType::rank > d2; for (size_t u=0; u<ArrayType::rank ; ++u) d2[u] = dims_out[u];
   resize_or_check(A, d2 );
   if (C_reorder) {
    BOOST_AUTO(C,  make_cache(A, Option::C() ));
    ds.read( data(C.view()), data_type_mem(C.view()), data_space(C.view()) , dataspace );
   }
   else { ds.read( data(A), data_type_mem(A), data_space(A) , dataspace ); }
  }
  TRIQS_ARRAYS_H5_CATCH_EXCEPTION;
 }
    double* GalacticusReader::readDoubleDataSet(const std::string s, long &nvalues) {
        // read a double-type dataset
        //std::string s2("Outputs/Output79/nodeData/blackHoleCount");
        // DataSet dataset = fp->openDataSet(s);
        // rather need pointer to dataset in order to delete it later on:

        //cout << "Reading DataSet '" << s << "'" << endl;

        DataSet *dptr = new DataSet(fp->openDataSet(s));
        DataSet dataset = *dptr; // for convenience

        // check class type
        H5T_class_t type_class = dataset.getTypeClass();
        if (type_class != H5T_FLOAT) {
            cout << "Data does not have double type!" << endl;
            abort();
        }
        // check byte order
        FloatType intype = dataset.getFloatType();
        H5std_string order_string;
        H5T_order_t order = intype.getOrder(order_string);
        //cout << order_string << endl;

        // check again data sizes
        if (sizeof(double) != intype.getSize()) {
            cout << "Mismatch of double data type." << endl;
            abort();
        }

        size_t dsize = intype.getSize();
        //cout << "Data size is " << dsize << endl;

        // get dataspace of the dataset (the array length or so)
        DataSpace dataspace = dataset.getSpace();
        //hid_t dataspace = H5Dget_space(dataset); --> this does not work!! At least not with dataset defined as above!

        // get number of dimensions in dataspace
        int rank = dataspace.getSimpleExtentNdims();
        //cout << "Dataspace rank is " << rank << endl;
        // I expect this to be 1 for all Galacticus datasets!
        // There are no 2 (or more) dimensional arrays stored in one dataset, are there?
        if (rank > 1) {
            cout << "ERROR: Cannot cope with multi-dimensional datasets!" << endl;
            abort();
        }

        hsize_t dims_out[1];
        int ndims = dataspace.getSimpleExtentDims(dims_out, NULL);
        //cout << "dimension " << (unsigned long)(dims_out[0]) << endl;
        nvalues = dims_out[0];

        // read data
        double *buffer = new double[nvalues];
        dataset.read(buffer,PredType::NATIVE_DOUBLE);

        // the data is stored in buffer now, so we can delete the dataset;
        // to do this, call delete on the pointer to the dataset
        dataset.close();
        delete dptr;

        /*cout << "First values: ";
        for (int j = 0; j < 10; j++) {
            cout << buffer[j] << " ";
        }
        cout << endl;
        */

        DataBlock b;
        b.nvalues = nvalues;
        b.doubleval = buffer;
        b.name = s;
        datablocks.push_back(b);


        return buffer;
    }
    long* GalacticusReader::readLongDataSet(const std::string s, long &nvalues) {
        // read a long-type dataset
        //std::string s2("Outputs/Output79/nodeData/blackHoleCount");
        // DataSet dataset = fp->openDataSet(s);
        // rather need pointer to dataset in order to delete it later on:

        //cout << "Reading DataSet '" << s << "'" << endl;

        DataSet *dptr = new DataSet(fp->openDataSet(s)); // need pointer because of "new ..."
        DataSet dataset = *dptr; // for convenience

        // check class type
        H5T_class_t type_class = dataset.getTypeClass();
        if (type_class != H5T_INTEGER) {
            cout << "Data does not have long type!" << endl;
            abort();
        }
        // check byte order
        IntType intype = dataset.getIntType();
        H5std_string order_string;
        H5T_order_t order = intype.getOrder(order_string);
        //cout << order_string << endl;

        // check again data sizes
        if (sizeof(long) != intype.getSize()) {
            cout << "Mismatch of long data type." << endl;
            abort();
        }

        size_t dsize = intype.getSize();
        //cout << "Data size is " << dsize << endl;

        // get dataspace of the dataset (the array length or so)
        DataSpace dataspace = dataset.getSpace();
        ////hid_t dataspace = H5Dget_space(dataset); --> this does not work!! At least not with dataset defined as above!

        // get number of dimensions in dataspace
        int rank = dataspace.getSimpleExtentNdims();
        //cout << "Dataspace rank is " << rank << endl;
        // I expect this to be 1 for all Galacticus datasets!
        // There are no 2 (or more) dimensional arrays stored in one dataset, are there?
        if (rank > 1) {
            cout << "ERROR: Cannot cope with multi-dimensional datasets!" << endl;
            abort();
        }

        hsize_t dims_out[1];
        int ndims = dataspace.getSimpleExtentDims(dims_out, NULL);
        //cout << "dimension " << (unsigned long)(dims_out[0]) << endl;
        nvalues = dims_out[0];

        // alternative way of determining data size (needed for buffer memory allocation!)
        //size_t size = dataset.getInMemDataSize();
        //cout << size << endl;
        //int nvalues = size/sizeof(long);

        // read data
        long *buffer = new long[nvalues]; // = same as malloc
        dataset.read(buffer,PredType::NATIVE_LONG);

        // the data is stored in buffer now, so we can delete the dataset;
        // to do this, call delete on the pointer to the dataset
        dataset.close();
        // delete dataset is not necessary, if it is a variable on the heap.
        // Then it is removed automatically when the function ends.
        delete dptr;

        //std::vector<int> data_out(NX);
        //H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data_out[0]);
        // --> this did not work, do not know why.
        //cout << "status: " << status << endl;
        //int data_out2[dims_out[0]];
        //dataset.read(data_out, PredType::NATIVE_LONG, memspace, filespace);
        // --> this caused problems with incompatible memspace and filespace etc.

        /*cout << "First values: ";
        for (int j = 0; j < 10; j++) {
            cout << buffer[j] << " ";
        }
        cout << endl;
        */

        DataBlock b;
        b.nvalues = nvalues;
        b.longval = buffer;
        b.name = s;
        datablocks.push_back(b);
        // b is added to datablocks-vector now


        return buffer;
    }
Example #22
0
/****************************************************************
**
**  test_attr_mult_read(): Test reading multiple attributes.
**
****************************************************************/
static void test_attr_mult_read()
{
    int     read_data1[ATTR1_DIM1]={0}; // Buffer for reading 1st attribute
    int     read_data2[ATTR2_DIM1][ATTR2_DIM2]={{0}}; // Buffer for reading 2nd attribute
    double  read_data3[ATTR3_DIM1][ATTR3_DIM2][ATTR3_DIM3]={{{0}}}; // Buffer for reading 3rd attribute
    int     i,j,k;

	// Output message about test being performed
    SUBTEST("Multiple Attribute Reading Functions");

    try {
	// Open file
	H5File fid1(FILENAME, H5F_ACC_RDWR);

	// Open the dataset
	DataSet dataset = fid1.openDataSet(DSET1_NAME);

	// Verify the correct number of attributes
	int num_attrs = dataset.getNumAttrs();
	verify_val(num_attrs, 3, "H5Object::getNumAttrs", __LINE__, __FILE__);

	// Open 1st attribute for the dataset
	Attribute attr = dataset.openAttribute((unsigned)0);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	DataSpace space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	int rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR1_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	hsize_t dims[ATTR_MAX_DIMS];    // Attribute dimensions
	int ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR1_DIM1)
	    TestErrPrintf("%d:attribute dimensions different: dims[0]=%d, should be %d\n",__LINE__,(int)dims[0],ATTR1_DIM1);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        H5T_class_t type_class = attr.getTypeClass();

        // Verify that the type is of integer datatype
        verify_val(type_class, H5T_INTEGER, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the integer datatype
        IntType i_type1 = attr.getIntType();

	// Get and verify the order of this type
	H5T_order_t order = i_type1.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size_t size = i_type1.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_INT, read_data1);

	// Verify values read in
	for(i=0; i<ATTR1_DIM1; i++)
	    if(attr_data1[i]!=read_data1[i])
		TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d,read_data1[%d]=%d\n",__LINE__,i,attr_data1[i],i,read_data1[i]);

	// Verify Name
	H5std_string attr_name = attr.getName();
	verify_val(attr_name, ATTR1_NAME, "DataType::getName", __LINE__, __FILE__);

	attr.close();
	space.close();

	// Open 2nd attribute for the dataset
	attr = dataset.openAttribute((unsigned)1);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR2_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR2_DIM1)
	    TestErrPrintf("%d:attribute dimensions different: dims[0]=%d, should be %d\n",__LINE__,(int)dims[0],ATTR2_DIM1);
	if(dims[1]!=ATTR2_DIM2)
	    TestErrPrintf("%d:attribute dimensions different: dims[1]=%d, should be %d\n",__LINE__,(int)dims[1],ATTR2_DIM2);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        type_class = attr.getTypeClass();

        // Verify that the type is of integer datatype
        verify_val(type_class, H5T_INTEGER, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the integer datatype
        IntType i_type2 = attr.getIntType();

	// Get and verify the order of this type
	order = i_type2.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size = i_type2.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_INT, read_data2);
	//attr.read(i_type, read_data2);

	// Verify values read in
	for(i=0; i<ATTR2_DIM1; i++)
	  for(j=0; j<ATTR2_DIM2; j++)
            if(attr_data2[i][j]!=read_data2[i][j])
                TestErrPrintf("%d: attribute data different: attr_data2[%d][%d]=%d, read_data2[%d][%d]=%d\n",__LINE__,i,j,attr_data2[i][j],i,j,read_data2[i][j]);

	// Verify Name
	attr_name = attr.getName();
	verify_val(attr_name, ATTR2_NAME, "DataType::getName", __LINE__, __FILE__);
	attr.close();
	space.close();

	// Open 3rd attribute for the dataset
	attr = dataset.openAttribute((unsigned)2);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR3_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	ndims = space.getSimpleExtentDims(dims);
	verify_val((long)dims[0],(long)ATTR3_DIM1,"attribute dimensions",__FILE__,__LINE__);
	verify_val((long)dims[1],(long)ATTR3_DIM2,"attribute dimensions",__FILE__,__LINE__);
	verify_val((long)dims[2],(long)ATTR3_DIM3,"attribute dimensions",__FILE__,__LINE__);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        type_class = attr.getTypeClass();

        // Verify that the type is of compound datatype
        verify_val(type_class, H5T_FLOAT, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the double datatype
        FloatType f_type = attr.getFloatType();

	// Get and verify the order of this type
	order = f_type.getOrder();
	verify_val(order, PredType::NATIVE_DOUBLE.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size = f_type.getSize();
	verify_val(size, PredType::NATIVE_DOUBLE.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_DOUBLE, read_data3);

	// Verify values read in
	for(i=0; i<ATTR3_DIM1; i++)
	    for(j=0; j<ATTR3_DIM2; j++)
		for(k=0; k<ATTR3_DIM3; k++)
		    if(attr_data3[i][j][k]!=read_data3[i][j][k])
			TestErrPrintf("%d: attribute data different: attr_data3[%d][%d][%d]=%f, read_data3[%d][%d][%d]=%f\n",__LINE__,i,j,k,attr_data3[i][j][k],i,j,k,read_data3[i][j][k]);

	// Verify Name
	attr_name = attr.getName();
	verify_val(attr_name, ATTR3_NAME, "DataType::getName", __LINE__, __FILE__);

	PASSED();
    } // end try block

    catch (Exception E) {
	issue_fail_msg("test_attr_mult_read()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}   // test_attr_mult_read()
Example #23
0
/****************************************************************
**
**  test_attr_compound_read(): Test basic H5A (attribute) code.
**
****************************************************************/
static void test_attr_compound_read()
{
    hsize_t dims[ATTR_MAX_DIMS];	// Attribute dimensions
    size_t      size;   // Attribute datatype size as stored in file
    size_t      offset; // Attribute datatype field offset
    struct attr4_struct read_data4[ATTR4_DIM1][ATTR4_DIM2]; // Buffer for reading 4th attribute
    int     i,j;

    // Output message about test being performed
    SUBTEST("Basic Attribute Functions");

    try {
	// Open file
	H5File fid1(FILENAME, H5F_ACC_RDWR);

	// Open the dataset
	DataSet dataset = fid1.openDataSet(DSET1_NAME);

	// Verify the correct number of attributes
	int num_attrs = dataset.getNumAttrs();
	verify_val(num_attrs, 1, "H5Object::getNumAttrs", __LINE__, __FILE__);

	// Open 1st attribute for the dataset
	Attribute attr = dataset.openAttribute((unsigned)0);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	DataSpace space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	int rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR4_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	int ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR4_DIM1)
        verify_val((long)dims[0], (long)ATTR4_DIM1, "DataSpace::getSimpleExtentDims",__LINE__, __FILE__);
        verify_val((long)dims[1], (long)ATTR4_DIM2, "DataSpace::getSimpleExtentDims",__LINE__, __FILE__);

	// Get the class of the datatype that is used by attr
	H5T_class_t type_class = attr.getTypeClass();

	// Verify that the type is of compound datatype
	verify_val(type_class, H5T_COMPOUND, "Attribute::getTypeClass", __LINE__, __FILE__);

	// Get the compound datatype
	CompType datatype = attr.getCompType();

	// Verify the number of fields in the datatype, which must be 3
	int fields = datatype.getNmembers();
	verify_val(fields, 3, "CompType::getNmembers", __LINE__, __FILE__);

	// Verify that the fields have the same names as when the type
	// was created
	for(i=0; i<fields; i++)
	{
	    H5std_string fieldname = datatype.getMemberName(i);
	    if(!((fieldname == ATTR4_FIELDNAME1) ||
		(fieldname == ATTR4_FIELDNAME2) ||
		(fieldname == ATTR4_FIELDNAME3)))
            TestErrPrintf("%d:invalid field name for field #%d: %s\n",__LINE__,i,fieldname.c_str());
	} /* end for */

	offset = datatype.getMemberOffset(0);
	verify_val(offset, attr4_field1_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	offset = datatype.getMemberOffset(1);
	verify_val(offset, attr4_field2_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	offset = datatype.getMemberOffset(2);
	verify_val(offset, attr4_field3_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	/* Verify each field's type, class & size */

	// Get and verify the type class of the first member
	type_class = datatype.getMemberClass(0);
	verify_val(type_class, H5T_INTEGER, "DataType::getMemberClass", __LINE__, __FILE__);
	// Get and verify the order of this member's type
	IntType i_type = datatype.getMemberIntType(0);
	H5T_order_t order = i_type.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this member's type
	size = i_type.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Get and verify class, order, and size of the second member's type
	type_class = datatype.getMemberClass(1);
	verify_val(type_class, H5T_FLOAT, "DataType::getMemberClass", __LINE__, __FILE__);
	FloatType f_type = datatype.getMemberFloatType(1);
	order = f_type.getOrder();
	verify_val(order, PredType::NATIVE_DOUBLE.getOrder(), "DataType::getOrder", __LINE__, __FILE__);
	size = f_type.getSize();
	verify_val(size, PredType::NATIVE_DOUBLE.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Get and verify class, order, and size of the third member's type
	type_class = datatype.getMemberClass(2);
	verify_val(type_class, H5T_INTEGER, "DataType::getMemberClass", __LINE__, __FILE__);
	// Note: H5T_INTEGER is correct here!

	StrType s_type = datatype.getMemberStrType(2);
	order = s_type.getOrder();
	verify_val(order, PredType::NATIVE_SCHAR.getOrder(), "DataType::getOrder", __LINE__, __FILE__);
	size = s_type.getSize();
	verify_val(size, PredType::NATIVE_SCHAR.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(datatype, read_data4);

	// Verify values read in
	for(i=0; i<ATTR4_DIM1; i++)
	    for(j=0; j<ATTR4_DIM2; j++)
		if(HDmemcmp(&attr_data4[i][j],&read_data4[i][j],sizeof(struct attr4_struct))) {
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].i=%d, read_data4[%d][%d].i=%d\n",__LINE__,i,j,attr_data4[i][j].i,i,j,read_data4[i][j].i);
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].d=%f, read_data4[%d][%d].d=%f\n",__LINE__,i,j,attr_data4[i][j].d,i,j,read_data4[i][j].d);
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].c=%c, read_data4[%d][%d].c=%c\n",__LINE__,i,j,attr_data4[i][j].c,i,j,read_data4[i][j].c);
             } /* end if */

	// Verify name
	H5std_string attr_name = attr.getName();
	verify_val(attr_name, ATTR4_NAME, "Attribute::getName", __LINE__, __FILE__);
	PASSED();
    } // end try block

    catch (Exception E) {
	issue_fail_msg("test_attr_compound_read()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}   // test_attr_compound_read()
void Generic_wrapper_hdf::get_dset_info(std::vector<int> & dims,V_TYPE& vt ,const std::string & dset_name) const
{
  if (!(wrapper_open_))
    throw runtime_error("wrapper must be open to add a dataset");
  
  dims.clear();
  
  
  // get data set
  DataSet dset;
  // open data set  
  if(!group_open_ || dset_name[0] == '/')
  {
    dset = file_->openDataSet(dset_name);
  }
  else if(group_)
  {
    dset = group_->openDataSet(dset_name);
  }
  else
    throw logic_error("generic_wrapper_hdf:: can't add to a closed group");

  // identify type
  H5T_class_t dset_class_t = dset.getTypeClass();
  H5T_sign_t sign;
  switch(dset_class_t)
  {
  case H5T_INTEGER:  
    sign  = dset.getIntType().getSign();
    if(sign  == H5T_SGN_2)
      vt = V_INT;
    else if(sign == H5T_SGN_NONE)
      vt =  V_UINT;
    else
      vt =  V_ERROR;
  case H5T_FLOAT:  
    vt =  V_FLOAT;
  case H5T_STRING:  
  case H5T_TIME:  
  case H5T_BITFIELD:  
  case H5T_OPAQUE:  
  case H5T_COMPOUND:  
  case H5T_REFERENCE:  
  case H5T_ENUM:	    
  case H5T_VLEN:	    
  case H5T_ARRAY:	    
  case H5T_NO_CLASS:
  case H5T_NCLASSES:
    vt =  V_ERROR;
  }
  
  // get the data space
  DataSpace dataspace = dset.getSpace();
  // select everything
  dataspace.selectAll();
  // get the rank
  hsize_t rank = dataspace.getSimpleExtentNdims();
  // make dims the right size
  vector <hsize_t> tdims;
  tdims.resize(rank);
  // get the dimensionality 
  dataspace.getSimpleExtentDims(tdims.data(),NULL);
  // copy to the return vector
  dims.resize(rank);
  for(hsize_t j = 0; j<rank;++j)
    dims[j] = (unsigned int)tdims[j];

  

}
Example #25
0
int main (void)
{
    hsize_t	i, j;

    // Try block to detect exceptions raised by any of the calls inside it
    try
    {
	/*
	 * Turn off the auto-printing when failure occurs so that we can
	 * handle the errors appropriately
	 */
	Exception::dontPrint();

	/*
	 * Open the file and the dataset.
	 */
	H5File file( FILE_NAME, H5F_ACC_RDONLY );
	DataSet dataset = file.openDataSet( DATASET_NAME );

	/*
	 * Get filespace for rank and dimension
	 */
	DataSpace filespace = dataset.getSpace();

	/*
	 * Get number of dimensions in the file dataspace
	 */
	int rank = filespace.getSimpleExtentNdims();

	/*
	 * Get and print the dimension sizes of the file dataspace
	 */
	hsize_t dims[2]; 	// dataset dimensions
	rank = filespace.getSimpleExtentDims( dims );
	cout << "dataset rank = " << rank << ", dimensions "
	     << (unsigned long)(dims[0]) << " x "
	     << (unsigned long)(dims[1]) << endl;

	/*
	 * Define the memory space to read dataset.
	 */
	DataSpace mspace1(RANK, dims);

	/*
	 * Read dataset back and display.
	 */
	int data_out[NX][NY];  // buffer for dataset to be read
	dataset.read( data_out, PredType::NATIVE_INT, mspace1, filespace );

	cout << "\n";
	cout << "Dataset: \n";
	for (j = 0; j < dims[0]; j++)
	{
	    for (i = 0; i < dims[1]; i++)
		cout << data_out[j][i] << " ";
	    cout << endl;
	}

	/*
	 *	    dataset rank 2, dimensions 10 x 5
	 *	    chunk rank 2, dimensions 2 x 5

	 *	    Dataset:
	 *	    1 1 1 3 3
	 *	    1 1 1 3 3
	 *	    1 1 1 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 */

	/*
	 * Read the third column from the dataset.
	 * First define memory dataspace, then define hyperslab
	 * and read it into column array.
	 */
	hsize_t col_dims[1];
	col_dims[0] = 10;
	DataSpace mspace2( RANKC, col_dims );

	/*
	 * Define the column (hyperslab) to read.
	 */
	hsize_t offset[2] = { 0, 2 };
	hsize_t  count[2] = { 10, 1 };
	int column[10];  // buffer for column to be read

	/*
	 * Define hyperslab and read.
	 */
	filespace.selectHyperslab( H5S_SELECT_SET, count, offset );
	dataset.read( column, PredType::NATIVE_INT, mspace2, filespace );

	cout << endl;
	cout << "Third column: " << endl;
	for (i = 0; i < 10; i++)
	    cout << column[i] << endl;

	/*
	 *	    Third column:
	 *	    1
	 *	    1
	 *	    1
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 */

	/*
	 * Get creation properties list.
	 */
	DSetCreatPropList cparms = dataset.getCreatePlist();

	/*
	 * Check if dataset is chunked.
	 */
	hsize_t chunk_dims[2];
	int     rank_chunk;
	if( H5D_CHUNKED == cparms.getLayout() )
	{
	    /*
	     * Get chunking information: rank and dimensions
	     */
	    rank_chunk = cparms.getChunk( 2, chunk_dims);
	    cout << "chunk rank " << rank_chunk << "dimensions "
		<< (unsigned long)(chunk_dims[0]) << " x "
		<< (unsigned long)(chunk_dims[1]) << endl;

	    /*
	     * Define the memory space to read a chunk.
	     */
	    DataSpace mspace3( rank_chunk, chunk_dims );

	    /*
	     * Define chunk in the file (hyperslab) to read.
	     */
	    offset[0] = 2;
	    offset[1] = 0;
	    count[0]  = chunk_dims[0];
	    count[1]  = chunk_dims[1];
	    filespace.selectHyperslab( H5S_SELECT_SET, count, offset );

	    /*
	     * Read chunk back and display.
	     */
	    int chunk_out[2][5];   // buffer for chunk to be read
	    dataset.read( chunk_out, PredType::NATIVE_INT, mspace3, filespace );
	    cout << endl;
	    cout << "Chunk:" << endl;
	    for (j = 0; j < chunk_dims[0]; j++)
	    {
		for (i = 0; i < chunk_dims[1]; i++)
		    cout << chunk_out[j][i] << " ";
		cout << endl;
	    }
	    /*
	     *	 Chunk:
	     *	 1 1 1 0 0
	     *	 2 0 0 0 0
	     */
	}
    }  // end of try block

    // catch failure caused by the H5File operations
    catch( FileIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSet operations
    catch( DataSetIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSpace operations
    catch( DataSpaceIException error )
    {
	error.printErrorStack();
	return -1;
    }
    return 0;
}
void Generic_wrapper_hdf::get_dset_priv(vector<T> & data,std::vector<unsigned int> & dims, const std::string & dset_name,const DataType & mtype) const
{
  if (!(wrapper_open_))
    throw runtime_error("wrapper must be open to read a dataset");
  
  dims.clear();
  data.clear();
  
  // get data set
  DataSet dset;
  // open data set  
  try
  {
    
  if(!group_open_ || dset_name[0] == '/')
  {
    if(file_)
      try
      {
        dset = file_->openDataSet(dset_name);
      }
      catch(FileIException &e)
      {
        throw runtime_error(e.getDetailMsg());
      }
    
    else
      throw runtime_error("there is no open file");
    
  }
  else if(group_)
  {
    dset = group_->openDataSet(dset_name);
  }
  else
    throw logic_error("generic_wrapper_hdf:: can't read from a closed group");
  }
  catch(Exception &e )
  {
    std::string er_msg = "error opening hdf \n" + e.getDetailMsg();
    throw runtime_error(er_msg);
  }
  
  // check type
  H5T_class_t dset_class_t = dset.getTypeClass();

  H5T_class_t mem_class_t = mtype.getClass();
  
  if(dset_class_t != mem_class_t)
    throw runtime_error("Data type miss-match");
  
  // if(mem_class_t == H5T_INTEGER)
  // {
  //   IntType mem_int = IntType(mtype);
  //   H5T_sign_t dsign = dset.getIntType().getSign();
  //   H5T_sign_t msign = mem_int.getSign();

  //   if(dsign  != msign)
  //     throw runtime_error("int signness miss-match ");

  // }
  
  
  // get the data space
  DataSpace dataspace = dset.getSpace();
  // select everything
  dataspace.selectAll();
  // get the rank
  hsize_t rank = dataspace.getSimpleExtentNdims();
  // make dims the right size

  
  vector <hsize_t> tdims;
  tdims.resize(rank);

  // get the dimensionality 
  dataspace.getSimpleExtentDims(tdims.data(),NULL);
  // copy to the return vector
  dims.resize(rank);

  for(hsize_t j = 0; j<rank;++j)
    dims[j] = (unsigned int)tdims[j];


  // get the number of entries
  hsize_t total = dataspace.getSimpleExtentNpoints();
  // resize the data vector
  data.resize(total);
  // read the data out 
  dset.read( data.data(), mtype, dataspace, dataspace );
  

}
Example #27
0
//' @title Fast model frame for activeReg
//' 
//' @description Function returns a scaled down model frame essentially returning list with no NA values.
//' Each item in the list represents a column in the data frame.
//' 
//' @param chunkName character name of the chunk to be read
//' @param selCols character vector of columns to select
//' @param filePath character path to file where chunk is to be read from
//' @return list representing a data frame with no NA values.
//[[Rcpp::export]]
SEXP h5ModelFrame(std::string chunkName, SEXP selCols_, std::string filePath)
{ 
  // Quick conversion of the SEXP column selection to character vector
  CharacterVector selCols(selCols_);
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  // Convert the R object to a numeric matrix
  NumericMatrix M__(data);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  // Create the output
  List DF;
  string colName;
  string colClass;
  NumericVector vect;
  CharacterVector levels;
  int n = selCols.size();
  IntegerVector sel(n);
  int selN;
  NumericMatrix M_(M__.nrow(), n);
  // Find which of the columns has been selected
  sel = match(selCols, colNames);
  // Copy the correct matrix columns
  for(int i = 0; i < n; i++)
  {
    selN = sel[i] - 1;
    M_(_, i) = M__(_, selN);
  }
  // Number of rows in the matrix
  int nr = M_.nrow();
  int goodRow;
  NumericVector goodRows(nr);
  int badRow;
  for(int i = 0; i < nr; i++)
  {
    badRow = sum(is_na(M_(i, _)));
    if(badRow >= 1)
    {
      goodRows[i] = 0;
    }else{
      goodRows[i] = 1;
    }
  }
  //goodRows = goodRows*-1 + 1;
  NumericMatrix M(sum(goodRows), n);
  int j = 0;
  // Remove NA rows
  for(int i = 0; i < nr; i++)
  {
    goodRow = goodRows[i];
    if(goodRow == 1)
    {
      M(j, _) = M_(i, _);
      j++;
    }
  }
  // Compile the list
  for(int i = 0; i < n; i++)
  {
    colName = selCols[i];
    selN = sel[i] - 1;
    colClass = colClasses[selN];
    if(colClass != "factor")
    {
      DF[colName] = M(_, i); 
    }else{
      vect = M(_, i);
      levels = (CharacterVector)ch5ReadFactor(colName, filePath);
      DF[colName] = cCreateFactor(vect, levels);
    }
    
  }
  dataset.close();
  file->close();
  
  return wrap(DF);
}