예제 #1
0
//'@title Function for dummy read
//' 
//'@param chunkName the name of the chunk to be read back
//'@param filePath the path to the h5 file
//'@return int 0
// [[Rcpp::export]]
int h5DummyRead(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  
  dataset.close();
  file->close();
  
  return 0;
}
예제 #2
0
// Function to return a selected part of a data frame as a list
List ch5ChunkSel(string chunkName, CharacterVector selCols, string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  // converting the R object to a numeric matrix
  NumericMatrix M = as<NumericMatrix>(data);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  
  // Create the output
  List DF;
  string colName;
  string colClass;
  NumericVector vec;
  CharacterVector levels;
  int n = selCols.size();
  IntegerVector sel(n);
  int selN;
  // First we need to find which of the columns has been selected
  sel = match(selCols, colNames);
  
  for(int i = 0; i < n; i++)
  {
    colName = selCols[i];
    selN = sel[i] - 1;
    colClass = colClasses[selN];
    if(colClass != "factor")
    {
      DF[colName] = M(_, selN); 
    }else{
      vec = M(_, selN);
      levels = (CharacterVector)ch5ReadFactor(colName, filePath);
      DF[colName] = cCreateFactor(vec, levels);
    }
    
  }
  
  dataset.close();
  file->close();
  
  return DF;
}
예제 #3
0
HDF5RecordingData::HDF5RecordingData(DataSet* data)
{
    DataSpace dSpace;
    DSetCreatPropList prop;
    ScopedPointer<DataSet> dataSet = data;
    hsize_t dims[3], chunk[3];

    dSpace = dataSet->getSpace();
    prop = dataSet->getCreatePlist();

    dimension = dSpace.getSimpleExtentDims(dims);
    prop.getChunk(dimension,chunk);

    this->size[0] = dims[0];
    if (dimension > 1)
        this->size[1] = dims[1];
    else
        this->size[1] = 1;
    if (dimension > 1)
        this->size[2] = dims[2];
    else
        this->size[2] = 1;

    this->xChunkSize = chunk[0];
    this->xPos = dims[0];
    this->dSet = dataSet;
    this->rowXPos.clear();
    this->rowXPos.insertMultiple(0,0,this->size[1]);
}
예제 #4
0
CharacterVector ch5ReadFactor(string charName, string filePath)
{
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  
  // Group Meta Group
  Group* metaGroup = new Group(file->openGroup("/MetaData/Factor"));
  
  // Getting the data set from the file 
  DataSet dataset = metaGroup->openDataSet((H5std_string)charName);
  // Getting the data space from the dataset
  DataSpace dataspace = dataset.getSpace();
  // We know that it is a char vector array so ndim = 1
  hsize_t dims[1];
  // Getting the length of strings
  dataspace.getSimpleExtentDims(dims, NULL);
  
  // for convenience
  int dim = dims[0];
  // String Type
  StrType vlst(0, H5T_VARIABLE);
  // Returning  the data
  char *strRet[dim];
  dataset.read(strRet, vlst);
  CharacterVector out(dim);
  for(int i = 0; i < dim; i++)
  {
    out[i] = strRet[i];
  }
  dataset.close(); //nn
  metaGroup->close();
  file->close();
  return out;
}
예제 #5
0
int HDF5RecordingData::prepareDataBlock(int xDataSize)
{
    hsize_t dim[3];
    DataSpace fSpace;
    if (dimension > 2) return -4; //We're not going to write rows in datasets bigger than 2d.

    dim[2] = size[2];
    dim[1] = size[1];
    dim[0] = xPos + xDataSize;
    try
    {
        dSet->extend(dim);

        fSpace = dSet->getSpace();
        fSpace.getSimpleExtentDims(dim);
        size[0]=dim[0];
    }
    catch (DataSetIException error)
    {
        PROCESS_ERROR;
    }
    rowXPos = xPos;
    rowDataSize = xDataSize;
    xPos += xDataSize;
    return 0;
}
예제 #6
0
    void read_1darray_vector_of_string_impl (group_or_file f, std::string const & name, ArrayVectorOfStringType & V) {
     if (!h5::exists(f, name))  TRIQS_RUNTIME_ERROR << "no such dataset : "<<name <<" in file ";
     try {
      DataSet ds = f->openDataSet( name.c_str() );
      DataSpace dataspace = ds.getSpace();
      mini_vector<hsize_t,1> dims_out;
      int ndims = dataspace.getSimpleExtentDims( &dims_out[0], NULL);
      if (ndims !=1) TRIQS_RUNTIME_ERROR << "triqs::h5 : Trying to read 1d array/vector . Rank mismatch : the array stored in the hdf5 file has rank = "<<ndims;

      size_t Len = dims_out[0];
      V.resize(Len);
      size_t size = ds.getStorageSize();
      StrType strdatatype(PredType::C_S1, size);

      std::vector<char> buf(Len*(size+1), 0x00);

      mini_vector<hsize_t,1> L; L[0]=V.size();
      mini_vector<hsize_t,1> S; S[0]=1;
      auto d_space = dataspace_from_LS<1,false > (L,L,S);

      ds.read( (void *)(&buf[0]),strdatatype, d_space );
      size_t i=0; for (auto & x : V) { x = ""; x.append(&buf[i*(size)]); ++i;}
     }
     TRIQS_ARRAYS_H5_CATCH_EXCEPTION;
    }
예제 #7
0
파일: read_values.cpp 프로젝트: 7ev3n/hpx
    void extract_data(std::string const& datafilename, double* values,
      std::size_t offset, std::size_t count)
    {
        try {
            using namespace H5;

            // Turn off the auto-printing when failure occurs
            Exception::dontPrint();

            H5File file(datafilename, H5F_ACC_RDONLY);
            DataSet dataset = file.openDataSet("sine"); // name of data to read
            DataSpace dataspace = dataset.getSpace();

            // number of dimensions
            int numdims = dataspace.getSimpleExtentNdims();

            if (numdims != 1)
            {
                HPX_THROW_EXCEPTION(hpx::no_success, "extract_data",
                    "number of dimensions was not 1");
            }

            // Get the dimension size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);

            read_values(dataset, dataspace, offset, count, values);
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "extract_data",
                e.getDetailMsg());
        }
    }
예제 #8
0
파일: fMRI_full.cpp 프로젝트: rdevon/DBN
DataSet *load_fMRI3D_DS(std::string filename) {
   std::cout << "Opening file " << filename << std::endl;

   typedef DataSet DBN_DS;
   
   using namespace H5;
   using H5::DataSet;
   
   const H5std_string FILE_NAME(filename);
   
   H5File file(FILE_NAME, H5F_ACC_RDONLY);
   
   int dims[4];
   DataSet datadims = file.openDataSet("dims");
   datadims.read(dims, PredType::NATIVE_INT);
   datadims.close();
   
   DataSet data = file.openDataSet("data");
   DataSpace dsp = data.getSpace();
   
   hsize_t dims_out[2];
   dsp.getSimpleExtentDims(dims_out, NULL);
   int volumes = (int)dims_out[0];
   int voxels = (int)dims_out[1];
   
   std::cout << "Dataset images are of size " << dims[0] << "x" << dims[1] << "x" << dims[2] << "x" << dims[3] << std::endl;
   std::cout << "Training set is of size " << volumes << "x" << voxels << std::endl;
   
   DBN_DS *dataset = new DBN_DS(AOD, volumes, dims[0], dims[1], dims[2], voxels);
   dataset->data_path = filename;
   
   std::cout << "Reading dataset" << std::endl;
   float *out_buffer = new float[voxels*volumes];
   data.read(out_buffer, PredType::NATIVE_FLOAT);
   
   for (int volume = 0; volume < volumes; ++volume) {
      for (int voxel = 0; voxel < voxels; ++voxel) {
         float val = out_buffer[volumes*voxel + volume];
         dataset->data(volume,voxel) = val;
      }
   }
   
   data.close();
   dsp.close();
#if 0
   Vector col = transpose(dataset->data)(9);
   col.save("/Users/devon/Research/matlab/tocmat2");
   exit(1);
#endif
   
   std::cout << "Reading mask" << std::endl;
   DataSet mask_data = file.openDataSet("mask");
   dsp = mask_data.getSpace();
   mask_data.read(dataset->mask.m->data, PredType::NATIVE_FLOAT);
   mask_data.close();
   std::cout << "Done reading mask" << std::endl;
   dataset->applymask = true;
   std::cout << "Done loading dataset" << std::endl;
   return dataset;
}
예제 #9
0
 index_system (DataSpace const & ds, bool is_complex) { 
  int rf = ds.getSimpleExtentNdims();
  if ( rf != rank_full  + (is_complex ? 1 : 0) ) TRIQS_RUNTIME_ERROR <<  "H5 : dimension error";
  //int ndims = ds.getSimpleExtentDims( &lens_[0], NULL);
  ds.getSimpleExtentDims( &lens_[0], NULL);
  for (size_t i =0; i<rank; ++i) { dims[i] = lens_[i]; stri_[i] = 1; off_[i]= 0; }
  total_lens_=dims;
  mydomain = domain_type (dims);
 }
예제 #10
0
//'@title Legacy function to return a data frame chunk as a list
//'
//'@description Experimental function not intended for use at all
//'
//'@param chunkName the name of the chunk to be read
//'@param filePath the path to the h5 file
//'@return List of the data frame chunk
// [[Rcpp::export]]
SEXP h5ReadDoubleMat3(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  //double (*buf)[dims[1]]*[dims[0]] = malloc(dims[1]]*[dims[0] * sizeof *buf);
  //buf[dims[1]][dims[0]] = 0.0;
  double **buf = (double**) calloc (dims[1]*dims[0], sizeof(double));
  buf[dims[1]][dims[0]] = 0.0;
  //double buf[dims[1]][dims[0]];
  dataset.read(buf, PredType::NATIVE_DOUBLE, dataspace);
  // Attempt tp append the contents to a list
  List out;
  NumericVector vec(dims[0]);
  NumericMatrix M(dims[0], dims[1]);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  string colName;
  for(int i = 0; i < dims[1]; i++)
  {
    NumericVector vec(dims[0]);
    for(int j = 0; j < dims[0]; j++)
    {
      M(j,i) = buf[i][j];
      vec(j) = buf[i][j];
    }
    colName = colNames[i];
    if(colClasses[i] == "factor")
    {
      CharacterVector levels;
      levels = h5ReadFactor(colName, filePath);
      IntegerVector fact(vec.size());
      fact = cCreateFactor(vec, levels);
      out[colName] = fact;
    }else{
      out[colName] = vec;
    }
    
  }
  free(buf);
  
  dataset.close(); //nn
  file->close();
  
  return wrap(out);
}
예제 #11
0
int HDF5RecordingData::writeDataRow(int yPos, int xDataSize, HDF5FileBase::DataTypes type, void* data)
{
    hsize_t dim[2],offset[2];
    DataSpace fSpace;
    DataType nativeType;
    if (dimension > 2) return -4; //We're not going to write rows in datasets bigger than 2d.
    //    if (xDataSize != rowDataSize) return -2;
    if ((yPos < 0) || (yPos >= size[1])) return -2;

    try
    {
        if (rowXPos[yPos]+xDataSize > size[0])
        {
            dim[1] = size[1];
            dim[0] = rowXPos[yPos] + xDataSize;
            dSet->extend(dim);

            fSpace = dSet->getSpace();
            fSpace.getSimpleExtentDims(dim);
            size[0]=dim[0];
        }
        if (rowXPos[yPos]+xDataSize > xPos)
        {
            xPos = rowXPos[yPos]+xDataSize;
        }

        dim[0] = xDataSize;
        dim[1] = 1;
        DataSpace mSpace(dimension,dim);

        fSpace = dSet->getSpace();
        offset[0] = rowXPos[yPos];
        offset[1] = yPos;
        fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset);

        nativeType = HDF5FileBase::getNativeType(type);


        dSet->write(data,nativeType,mSpace,fSpace);

        rowXPos.set(yPos,rowXPos[yPos] + xDataSize);
    }
    catch (DataSetIException error)
    {
        PROCESS_ERROR;
    }
    catch (DataSpaceIException error)
    {
        PROCESS_ERROR;
    }
    catch (FileIException error)
    {
        PROCESS_ERROR;
    }
    return 0;
}
예제 #12
0
void read_feature_size(H5File h5f, Size &size_out, const char *name)
{
    DataSet dataset = h5f.openDataSet(name);
    DataSpace dspace = dataset.getSpace();
    assert (dspace.getSimpleExtentNdims() == 2);
    hsize_t dims[2];
    dspace.getSimpleExtentDims(dims);
    size_out.height = dims[0];
    size_out.width = dims[1];
}
예제 #13
0
//'@title Legacy function to return a data frame chunk as a list
//'
//'@description Experimental function not intended for use at all
//'
//'@param chunkName the name of the chunk to be read
//'@param filePath the path to the h5 file
//'@return List of the data frame chunk
// [[Rcpp::export]]
SEXP h5ReadDoubleMat2(std::string chunkName, std::string filePath)
{ 
  // Open the file in Read/Write Mode, H5F_ACC_RDONLY
  H5File *file = new H5File(filePath, H5F_ACC_RDONLY);
  // Opening the data set 
  DataSet dataset = file->openDataSet((H5std_string)chunkName);
  // Opening the data space
  DataSpace dataspace = dataset.getSpace();
  // Get the number of dimensions
  int ndim = dataspace.getSimpleExtentNdims();
  // Create a dimension object to be filled with the dimensions of the data set
  hsize_t dims[ndim];
  // Fill the dimension of the dataset
  dataspace.getSimpleExtentDims(dims, NULL);
  // Create the return data
  // Filling the matrix with data form the dataspace
  SEXP data;
  // Allocating a matrix of the right size and dimension
  data = PROTECT(Rf_allocMatrix(REALSXP, dims[0], dims[1]));
  // Filling the matrix with data form the dataspace
  dataset.read(REAL(data), PredType::NATIVE_DOUBLE, dataspace);
  UNPROTECT(1);
  // converting the R object to a numeric matrix
  NumericMatrix M = as<NumericMatrix>(data);
  List out;
  NumericVector vec(dims[0]);
  CharacterVector colNames = ch5ReadCharVector("ColumnNames", filePath);
  CharacterVector colClasses = ch5ReadCharVector("ColumnClasses", filePath);
  string colName;
  for(int i = 0; i < dims[1]; i++)
  {
    NumericVector vec(dims[0]);
    for(int j = 0; j < dims[0]; j++)
    {
      vec(j) = M(j,i);
    }
    colName = colNames[i];
    if(colClasses[i] == "factor")
    {
      CharacterVector levels;
      levels = ch5ReadFactor(colName, filePath);
      IntegerVector fact(vec.size());
      fact = cCreateFactor(vec, levels);
      out[colName] = fact;
    }else{
      out[colName] = vec;
    }
    
  }
  dataset.close(); //nn
  file->close();
  // Returning the data
  return wrap(out);
}
예제 #14
0
int HDF5RecordingData::writeDataBlock(int xDataSize, int yDataSize, HDF5FileBase::DataTypes type, void* data)
{
    hsize_t dim[3],offset[3];
    DataSpace fSpace;
    DataType nativeType;

    dim[2] = size[2];
    //only modify y size if new required size is larger than what we had.
    if (yDataSize > size[1])
        dim[1] = yDataSize;
    else
        dim[1] = size[1];
    dim[0] = xPos + xDataSize;
    try
    {
        //First be sure that we have enough space
        dSet->extend(dim);

        fSpace = dSet->getSpace();
        fSpace.getSimpleExtentDims(dim);
        size[0]=dim[0];
        if (dimension > 1)
            size[1]=dim[1];

        //Create memory space
        dim[0]=xDataSize;
        dim[1]=yDataSize;
        dim[2] = size[2];

        DataSpace mSpace(dimension,dim);
        //select where to write
        offset[0]=xPos;
        offset[1]=0;
        offset[2]=0;

        fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset);

        nativeType = HDF5FileBase::getNativeType(type);

        dSet->write(data,nativeType,mSpace,fSpace);
        xPos += xDataSize;
    }
    catch (DataSetIException error)
    {
        PROCESS_ERROR;
    }
    catch (DataSpaceIException error)
    {
        PROCESS_ERROR;
    }
    return 0;
}
예제 #15
0
void Weather::load(const std::string& name)
{
	std::cout << "Loading " << name << std::endl;
	H5File file(name, H5F_ACC_RDONLY);
	DataSet dataset = file.openDataSet("weather_data");
	std::cout << "Number of attributes: " << dataset.getNumAttrs() << std::endl;
	dataset.openAttribute("resolution").read(PredType::NATIVE_UINT, &resolution);
	//float bounds[4];
	dataset.openAttribute("bounds").read(PredType::NATIVE_DOUBLE, &bounds);

	std::cout << "Resolution: " << resolution << std::endl;
	std::cout << "Bounds: " << bounds.minx << "," << bounds.miny << "," << bounds.maxx << "," << bounds.maxy << std::endl;
	DataSpace ds = dataset.getSpace();
	int dim = ds.getSimpleExtentNdims();
	std::cout << "Dimensions: " << dim << std::endl;
	hsize_t dims_out[3];
	ds.getSimpleExtentDims(dims_out, NULL);
	std::cout << "Size: " << dims_out[0] << "," << dims_out[1] << "," << dims_out[2] << std::endl;
	dimX = dims_out[1];
	dimY = dims_out[2];
	numScenarios = dims_out[0];
	std::cout << "Size: " << dims_out[0] * dims_out[1] * dims_out[2] << std::endl;
	std::cout << "Dataset typeclass: " << dataset.getTypeClass() << "," << H5T_COMPOUND << std::endl;
	std::cout << "Dataset size: " << dataset.getInMemDataSize() << "," << H5T_COMPOUND << std::endl;


	CompType mtype2(sizeof(WeatherData));
	mtype2.insertMember("wind_xcomp", HOFFSET(WeatherData, wind_xcomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("wind_ycomp", HOFFSET(WeatherData, wind_ycomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("hs", HOFFSET(WeatherData, hs), PredType::NATIVE_FLOAT);
	mtype2.insertMember("light", HOFFSET(WeatherData, light), PredType::NATIVE_CHAR);
	//WeatherData wd[106938134];

	try {
		weatherData = (WeatherData *)malloc(ds.getSimpleExtentNpoints() * sizeof(WeatherData));
		dataset.read(weatherData, mtype2);
		std::cout << "Finished" << std::endl;


		//size_t ix = i1*dims_out[1] * dims_out[2] + i2 * dims_out[2] + i3;
		//printf("%f %f %f %d\n", wd[ix].wind_xcomp, wd[ix].wind_ycomp, wd[ix].hs, wd[ix].light);
	}
	catch (int e)
	{
		std::cout << "An exception occurred. Exception Nr. " << e << '\n';
	}
}
예제 #16
0
    std::uint64_t extract_data_range(std::string const& datafilename,
        char const* name, double& minval, double& maxval, double& delta,
        std::size_t start, std::size_t end)
    {
        try {
            using namespace H5;

            // Turn off auto-printing on failure.
            Exception::dontPrint();

            // Try to open the file.
            H5File file(datafilename, H5F_ACC_RDONLY);

            // Try to open the specified dataset.
            DataSet dataset = file.openDataSet(name);
            DataSpace dataspace = dataset.getSpace();

            // Verify number of dimensions
            HPX_ASSERT(dataspace.getSimpleExtentNdims() == 1);

            // Get the size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);
            if (end == std::size_t(-1))
                end = dims[0];

            // Read the minimum and maximum values.
            detail::read_values(dataset, dataspace, start, 1, &minval);
            detail::read_values(dataset, dataspace, end - 1, 1, &maxval);

            // Read the delta value.
            detail::read_values(dataset, dataspace, start + 1, 1, &delta);
            delta -= minval;

            // Return size of dataset.
            return dims[0];
        }
        catch (H5::Exception e) {
            std::string msg = e.getDetailMsg().c_str();
            HPX_THROW_EXCEPTION(hpx::no_success,
                "sheneos::extract_data_range", msg);
        }

        // This return statement keeps the compiler from whining.
        return 0;
    }
    long GalacticusReader::getNumRowsInDataSet(string s) {
        // get number of rows (data entries) in given dataset
        // just check with the one given dataset and assume that all datasets
        // have the same size!
        //sprintf(outputname, "Outputs/Output%d/nodeData", ioutput);
        long nvalues;

        DataSet *d = new DataSet(fp->openDataSet(s));
        DataSpace dataspace = d->getSpace();

        hsize_t dims_out[1];
        int ndims = dataspace.getSimpleExtentDims(dims_out, NULL);
        //cout << "dimension " << (unsigned long)(dims_out[0]) << endl;
        nvalues = dims_out[0];

        delete d;

        return nvalues;
    }
void HDF5HandlerBase::save(const std::vector<int> &dataPoints) {

    // Return if no data to add
    if (dataPoints.size() < 1)
        return;

    // dataset.write needs not const value of data
    int *data = const_cast<int*>(&dataPoints[0]);

    // Determine value of
    hsize_t dimsext[1];
    dimsext[0] = dataPoints.size();

    hsize_t size[1];
    hsize_t offset[1];


    try {
        DataSpace filespace = dataset.getSpace();
        int ndims = filespace.getSimpleExtentNdims();
        hsize_t dims[ndims];
        filespace.getSimpleExtentDims(dims);

        size[0] = dims[0] + dimsext[0];
        dataset.extend(size);

        offset[0] = dims[0];
        filespace = dataset.getSpace();
        filespace.selectHyperslab(H5S_SELECT_SET, dimsext, offset);

        DataSpace memspace = DataSpace(1, dimsext, NULL);

        dataset.write(data, PredType::NATIVE_INT, memspace, filespace);

        filespace.close();
        memspace.close();

    } catch (Exception &error) {
        throw;
    }


}
예제 #19
0
파일: read_values.cpp 프로젝트: 7ev3n/hpx
    boost::uint64_t extract_data_range (std::string const& datafilename,
        double& minval, double& maxval, double& delta,
        std::size_t start, std::size_t end)
    {
        try {
            using namespace H5;

            // Turn off the auto-printing when failure occurs
            Exception::dontPrint();

            H5File file(datafilename, H5F_ACC_RDONLY);
            DataSet dataset = file.openDataSet("x");    // name of data to read
            DataSpace dataspace = dataset.getSpace();

            // number of dimensions
            int numdims = dataspace.getSimpleExtentNdims();

            if (numdims != 1)
            {
                HPX_THROW_EXCEPTION(hpx::no_success, "extract_data_range",
                    "number of dimensions was not 1");
            }

            // Get the dimension size of each dimension in the dataspace.
            hsize_t dims[1];
            dataspace.getSimpleExtentDims(dims, nullptr);
            if (end == std::size_t(-1))
                end = dims[0];

            read_values(dataset, dataspace, start, 1, &minval);
            read_values(dataset, dataspace, end-1, 1, &maxval);
            read_values(dataset, dataspace, start+1, 1, &delta);

            delta -= minval;
            return dims[0];     // return size of dataset
        }
        catch (H5::Exception const& e) {
            HPX_THROW_EXCEPTION(hpx::no_success, "extract_data_range",
                e.getDetailMsg());
        }
        return 0;   // keep compiler happy
    }
예제 #20
0
파일: Attribute.cpp 프로젝트: cran/h5
// [[Rcpp::export]]
NumericVector GetAttributeDimensions(XPtr<Attribute> attribute) {
	try {
	  DataSpace dataspace = attribute->getSpace();
	  int ndim = dataspace.getSimpleExtentNdims();

	  NumericVector out;
	  if(ndim > 0) {
		vector<hsize_t> dims_out(ndim);
		dataspace.getSimpleExtentDims(&dims_out[0], NULL);
		out = NumericVector(dims_out.begin(), dims_out.end());
	  } else { // Assume scalar Attribute
		out = NumericVector(1);
		out[0] = 1;
	  }
	  return out;
	} catch (Exception& error) {
		 string msg = error.getDetailMsg() + " in " + error.getFuncName();
		 throw Rcpp::exception(msg.c_str());
	}
}
예제 #21
0
void read_hdf5_image(H5File h5f, Mat &image_out, const char *name, const Rect &roi=Rect(0,0,0,0))
{
    DataSet dataset = h5f.openDataSet(name);
    DataSpace dspace = dataset.getSpace();
    assert (dspace.getSimpleExtentNdims() == 2);
    hsize_t dims[2];
    dspace.getSimpleExtentDims(dims);
    if ((roi.width == 0) && (roi.height == 0)) {
        image_out.create(dims[0], dims[1], CV_32F);
        dspace.selectAll();
    } else {
        image_out.create(roi.height, roi.width, CV_32F);
        hsize_t _offset[2], _size[2];
        _offset[0] = roi.y; _offset[1] = roi.x;
        _size[0] = roi.height; _size[1] = roi.width;
        dspace.selectHyperslab(H5S_SELECT_SET, _size, _offset);
    }
    
    DataSpace imspace;
    float *imdata;
    if (image_out.isContinuous()) {
        dims[0] = image_out.size().height; dims[1] = image_out.size().width;
        imspace = DataSpace(2, dims);
        imspace.selectAll();
        imdata = image_out.ptr<float>();
    } else {
        // we are working with an ROI
        assert (image_out.isSubmatrix());
        Size parent_size; Point parent_ofs;
        image_out.locateROI(parent_size, parent_ofs);
        hsize_t parent_count[2];
        parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
        imspace.setExtentSimple(2, parent_count);
        hsize_t im_offset[2], im_size[2];
        im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
        im_size[0] = image_out.size().height; im_size[1] = image_out.size().width;
        imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
        imdata = image_out.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
    }
    dataset.read(imdata, PredType::NATIVE_FLOAT, imspace, dspace);
}
예제 #22
0
//Writes data into an array of HDF5 datatype TYPE
void ArfRecordingData::writeCompoundData(int xDataSize, int yDataSize, DataType type, void* data)
{
    hsize_t dim[3],offset[3];
    DataSpace fSpace;
    DataType nativeType;

    dim[2] = size[2];
    //only modify y size if new required size is larger than what we had.
    if (yDataSize > size[1])
        dim[1] = yDataSize;
    else
        dim[1] = size[1];
    dim[0] = xPos + xDataSize;

        //First be sure that we have enough space
        dSet->extend(dim);

        fSpace = dSet->getSpace();
        fSpace.getSimpleExtentDims(dim);
        size[0]=dim[0];
        if (dimension > 1)
            size[1]=dim[1];

        //Create memory space
        dim[0]=xDataSize;
        dim[1]=yDataSize;
        dim[2] = size[2];

        DataSpace mSpace(dimension,dim);
        //select where to write
        offset[0]=xPos;
        offset[1]=0;
        offset[2]=0;

        fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset);

        dSet->write(data,type,mSpace,fSpace);
        xPos += xDataSize;
}
예제 #23
0
		template<class T> Matrix<T>
		Read (const std::string& uri) const {

			T         t;
			DataSet   dataset = m_file.openDataSet(uri);
			DataSpace space   = dataset.getSpace();
			std::vector<hsize_t> dims (space.getSimpleExtentNdims());
			size_t    ndim    = space.getSimpleExtentDims(&dims[0], NULL);

			if (this->m_verb) {
				printf ("Reading dataset %s ... ", uri.c_str());
				fflush(stdout);
			}

			if (is_complex(t)) {
				dims.pop_back();
				--ndim;
			}

			std::vector<size_t> mdims (ndim,1);

			for (size_t i = 0; i < ndim; ++i)
				mdims[i] = dims[ndim-i-1];

			PredType* type = HDF5Traits<T>::PType();

			Matrix<T> M (mdims);
			dataset.read (&M[0], *type);


			if (this->m_verb)
				printf ("O(%s) done\n", DimsToCString(M));

			space.close();
			dataset.close();

			return M;

		}
예제 #24
0
//write data into a 1-d array, with type wrapped by ArfFileBase::DataTypes, instead of raw HDF5 type
int ArfRecordingData::writeDataChannel(int dataSize, ArfFileBase::DataTypes type, void* data)
{
    //Data is 1-dimensional
    hsize_t dim[3],offset[3];
    DataSpace fSpace;
    
    DataType nativeType = ArfFileBase::getNativeType(type);
    if (xPos+dataSize > size[0])
    {
        dim[0] = xPos + dataSize;
        dim[1] = 0;
        dim[2] = 0;
        dSet->extend(dim);
    }
    
    fSpace = dSet->getSpace();
    fSpace.getSimpleExtentDims(dim);
    size[0]=dim[0];
    if (dimension > 1)
        size[1]=dim[1];

    //Create memory space
    dim[0]= dataSize;
    dim[1]= 0;
    dim[2]= 0;

    DataSpace mSpace(dimension,dim);
    //select where to write
    offset[0]=xPos;
    offset[1]=0;
    offset[2]=0;

    fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset);
    
    
    dSet->write(data,nativeType,mSpace,fSpace);
    xPos = xPos + dataSize;
}
예제 #25
0
 void read_array (group_or_file f, std::string const & name,  ArrayType & A, bool C_reorder = true) {
  typedef typename ArrayType::value_type V;
  if (!h5::exists(f, name))  TRIQS_RUNTIME_ERROR << "no such dataset : "<<name <<" in file ";
  try {
   DataSet ds = f->openDataSet( name.c_str() );
   DataSpace dataspace = ds.getSpace();
   static const unsigned int Rank =  ArrayType::rank + (boost::is_complex<typename ArrayType::value_type>::value ? 1 : 0);
   int rank = dataspace.getSimpleExtentNdims();
   if (rank != Rank) TRIQS_RUNTIME_ERROR << "triqs::array::h5::read. Rank mismatch : the array has rank = "
    <<Rank<<" while the array stored in the hdf5 file has rank = "<<rank;
   mini_vector<hsize_t,Rank> dims_out;
   //int ndims = dataspace.getSimpleExtentDims( &dims_out[0], NULL);
   dataspace.getSimpleExtentDims( &dims_out[0], NULL);
   mini_vector<size_t,ArrayType::rank > d2; for (size_t u=0; u<ArrayType::rank ; ++u) d2[u] = dims_out[u];
   resize_or_check(A, d2 );
   if (C_reorder) {
    BOOST_AUTO(C,  make_cache(A, Option::C() ));
    ds.read( data(C.view()), data_type_mem(C.view()), data_space(C.view()) , dataspace );
   }
   else { ds.read( data(A), data_type_mem(A), data_space(A) , dataspace ); }
  }
  TRIQS_ARRAYS_H5_CATCH_EXCEPTION;
 }
    double* GalacticusReader::readDoubleDataSet(const std::string s, long &nvalues) {
        // read a double-type dataset
        //std::string s2("Outputs/Output79/nodeData/blackHoleCount");
        // DataSet dataset = fp->openDataSet(s);
        // rather need pointer to dataset in order to delete it later on:

        //cout << "Reading DataSet '" << s << "'" << endl;

        DataSet *dptr = new DataSet(fp->openDataSet(s));
        DataSet dataset = *dptr; // for convenience

        // check class type
        H5T_class_t type_class = dataset.getTypeClass();
        if (type_class != H5T_FLOAT) {
            cout << "Data does not have double type!" << endl;
            abort();
        }
        // check byte order
        FloatType intype = dataset.getFloatType();
        H5std_string order_string;
        H5T_order_t order = intype.getOrder(order_string);
        //cout << order_string << endl;

        // check again data sizes
        if (sizeof(double) != intype.getSize()) {
            cout << "Mismatch of double data type." << endl;
            abort();
        }

        size_t dsize = intype.getSize();
        //cout << "Data size is " << dsize << endl;

        // get dataspace of the dataset (the array length or so)
        DataSpace dataspace = dataset.getSpace();
        //hid_t dataspace = H5Dget_space(dataset); --> this does not work!! At least not with dataset defined as above!

        // get number of dimensions in dataspace
        int rank = dataspace.getSimpleExtentNdims();
        //cout << "Dataspace rank is " << rank << endl;
        // I expect this to be 1 for all Galacticus datasets!
        // There are no 2 (or more) dimensional arrays stored in one dataset, are there?
        if (rank > 1) {
            cout << "ERROR: Cannot cope with multi-dimensional datasets!" << endl;
            abort();
        }

        hsize_t dims_out[1];
        int ndims = dataspace.getSimpleExtentDims(dims_out, NULL);
        //cout << "dimension " << (unsigned long)(dims_out[0]) << endl;
        nvalues = dims_out[0];

        // read data
        double *buffer = new double[nvalues];
        dataset.read(buffer,PredType::NATIVE_DOUBLE);

        // the data is stored in buffer now, so we can delete the dataset;
        // to do this, call delete on the pointer to the dataset
        dataset.close();
        delete dptr;

        /*cout << "First values: ";
        for (int j = 0; j < 10; j++) {
            cout << buffer[j] << " ";
        }
        cout << endl;
        */

        DataBlock b;
        b.nvalues = nvalues;
        b.doubleval = buffer;
        b.name = s;
        datablocks.push_back(b);


        return buffer;
    }
    long* GalacticusReader::readLongDataSet(const std::string s, long &nvalues) {
        // read a long-type dataset
        //std::string s2("Outputs/Output79/nodeData/blackHoleCount");
        // DataSet dataset = fp->openDataSet(s);
        // rather need pointer to dataset in order to delete it later on:

        //cout << "Reading DataSet '" << s << "'" << endl;

        DataSet *dptr = new DataSet(fp->openDataSet(s)); // need pointer because of "new ..."
        DataSet dataset = *dptr; // for convenience

        // check class type
        H5T_class_t type_class = dataset.getTypeClass();
        if (type_class != H5T_INTEGER) {
            cout << "Data does not have long type!" << endl;
            abort();
        }
        // check byte order
        IntType intype = dataset.getIntType();
        H5std_string order_string;
        H5T_order_t order = intype.getOrder(order_string);
        //cout << order_string << endl;

        // check again data sizes
        if (sizeof(long) != intype.getSize()) {
            cout << "Mismatch of long data type." << endl;
            abort();
        }

        size_t dsize = intype.getSize();
        //cout << "Data size is " << dsize << endl;

        // get dataspace of the dataset (the array length or so)
        DataSpace dataspace = dataset.getSpace();
        ////hid_t dataspace = H5Dget_space(dataset); --> this does not work!! At least not with dataset defined as above!

        // get number of dimensions in dataspace
        int rank = dataspace.getSimpleExtentNdims();
        //cout << "Dataspace rank is " << rank << endl;
        // I expect this to be 1 for all Galacticus datasets!
        // There are no 2 (or more) dimensional arrays stored in one dataset, are there?
        if (rank > 1) {
            cout << "ERROR: Cannot cope with multi-dimensional datasets!" << endl;
            abort();
        }

        hsize_t dims_out[1];
        int ndims = dataspace.getSimpleExtentDims(dims_out, NULL);
        //cout << "dimension " << (unsigned long)(dims_out[0]) << endl;
        nvalues = dims_out[0];

        // alternative way of determining data size (needed for buffer memory allocation!)
        //size_t size = dataset.getInMemDataSize();
        //cout << size << endl;
        //int nvalues = size/sizeof(long);

        // read data
        long *buffer = new long[nvalues]; // = same as malloc
        dataset.read(buffer,PredType::NATIVE_LONG);

        // the data is stored in buffer now, so we can delete the dataset;
        // to do this, call delete on the pointer to the dataset
        dataset.close();
        // delete dataset is not necessary, if it is a variable on the heap.
        // Then it is removed automatically when the function ends.
        delete dptr;

        //std::vector<int> data_out(NX);
        //H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &data_out[0]);
        // --> this did not work, do not know why.
        //cout << "status: " << status << endl;
        //int data_out2[dims_out[0]];
        //dataset.read(data_out, PredType::NATIVE_LONG, memspace, filespace);
        // --> this caused problems with incompatible memspace and filespace etc.

        /*cout << "First values: ";
        for (int j = 0; j < 10; j++) {
            cout << buffer[j] << " ";
        }
        cout << endl;
        */

        DataBlock b;
        b.nvalues = nvalues;
        b.longval = buffer;
        b.name = s;
        datablocks.push_back(b);
        // b is added to datablocks-vector now


        return buffer;
    }
예제 #28
0
void KWIKFileSource::fillRecordInfo()
{
    Group recordings;

    try
    {
        recordings = sourceFile->openGroup("/recordings");
        int numObjs = recordings.getNumObjs();

        for (int i=0; i < numObjs; i++)
        {
            try
            {
                Group recordN;
                DataSet data;
                Attribute attr;
                DataSpace dSpace;
                float sampleRate;
                float bitVolts;
                hsize_t dims[3];
                RecordInfo info;

                recordN = recordings.openGroup(String(i).toUTF8());
                data = recordN.openDataSet("data");
                attr = recordN.openAttribute("sample_rate");
                attr.read(PredType::NATIVE_FLOAT,&sampleRate);
                attr = recordN.openAttribute("bit_depth");
                attr.read(PredType::NATIVE_FLOAT,&bitVolts);
                dSpace = data.getSpace();
                dSpace.getSimpleExtentDims(dims);

                info.name="Record "+String(i);
                info.numSamples = dims[0];
                info.sampleRate = sampleRate;

                bool foundBitVoltArray = false;
                HeapBlock<float> bitVoltArray(dims[1]);

                try
                {
                    recordN = recordings.openGroup((String(i) + "/application_data").toUTF8());
                    attr=recordN.openAttribute("channel_bit_volts");
                    attr.read(ArrayType(PredType::NATIVE_FLOAT,1,&dims[1]),bitVoltArray);
                    foundBitVoltArray = true;
                } catch (GroupIException)
                {
                } catch (AttributeIException)
                {
                }

                for (int j = 0; j < dims[1]; j++)
                {
                    RecordedChannelInfo c;
                    c.name = "CH" + String(j);

                    if (foundBitVoltArray)
                        c.bitVolts = bitVoltArray[j];
                    else
                        c.bitVolts = bitVolts;

                    info.channels.add(c);
                }
                infoArray.add(info);
                availableDataSets.add(i);
                numRecords++;
            }
            catch (GroupIException)
            {
            }
            catch (DataSetIException)
            {
            }
            catch (AttributeIException)
            {
            }
            catch (DataSpaceIException error)
            {
                PROCESS_ERROR;
            }
        }
    }
    catch (FileIException error)
    {
        PROCESS_ERROR;
    }
    catch (GroupIException error)
    {
        PROCESS_ERROR;
    }
}
예제 #29
0
/****************************************************************
**
**  test_attr_mult_read(): Test reading multiple attributes.
**
****************************************************************/
static void test_attr_mult_read()
{
    int     read_data1[ATTR1_DIM1]={0}; // Buffer for reading 1st attribute
    int     read_data2[ATTR2_DIM1][ATTR2_DIM2]={{0}}; // Buffer for reading 2nd attribute
    double  read_data3[ATTR3_DIM1][ATTR3_DIM2][ATTR3_DIM3]={{{0}}}; // Buffer for reading 3rd attribute
    int     i,j,k;

	// Output message about test being performed
    SUBTEST("Multiple Attribute Reading Functions");

    try {
	// Open file
	H5File fid1(FILENAME, H5F_ACC_RDWR);

	// Open the dataset
	DataSet dataset = fid1.openDataSet(DSET1_NAME);

	// Verify the correct number of attributes
	int num_attrs = dataset.getNumAttrs();
	verify_val(num_attrs, 3, "H5Object::getNumAttrs", __LINE__, __FILE__);

	// Open 1st attribute for the dataset
	Attribute attr = dataset.openAttribute((unsigned)0);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	DataSpace space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	int rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR1_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	hsize_t dims[ATTR_MAX_DIMS];    // Attribute dimensions
	int ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR1_DIM1)
	    TestErrPrintf("%d:attribute dimensions different: dims[0]=%d, should be %d\n",__LINE__,(int)dims[0],ATTR1_DIM1);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        H5T_class_t type_class = attr.getTypeClass();

        // Verify that the type is of integer datatype
        verify_val(type_class, H5T_INTEGER, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the integer datatype
        IntType i_type1 = attr.getIntType();

	// Get and verify the order of this type
	H5T_order_t order = i_type1.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size_t size = i_type1.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_INT, read_data1);

	// Verify values read in
	for(i=0; i<ATTR1_DIM1; i++)
	    if(attr_data1[i]!=read_data1[i])
		TestErrPrintf("%d: attribute data different: attr_data1[%d]=%d,read_data1[%d]=%d\n",__LINE__,i,attr_data1[i],i,read_data1[i]);

	// Verify Name
	H5std_string attr_name = attr.getName();
	verify_val(attr_name, ATTR1_NAME, "DataType::getName", __LINE__, __FILE__);

	attr.close();
	space.close();

	// Open 2nd attribute for the dataset
	attr = dataset.openAttribute((unsigned)1);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR2_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR2_DIM1)
	    TestErrPrintf("%d:attribute dimensions different: dims[0]=%d, should be %d\n",__LINE__,(int)dims[0],ATTR2_DIM1);
	if(dims[1]!=ATTR2_DIM2)
	    TestErrPrintf("%d:attribute dimensions different: dims[1]=%d, should be %d\n",__LINE__,(int)dims[1],ATTR2_DIM2);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        type_class = attr.getTypeClass();

        // Verify that the type is of integer datatype
        verify_val(type_class, H5T_INTEGER, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the integer datatype
        IntType i_type2 = attr.getIntType();

	// Get and verify the order of this type
	order = i_type2.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size = i_type2.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_INT, read_data2);
	//attr.read(i_type, read_data2);

	// Verify values read in
	for(i=0; i<ATTR2_DIM1; i++)
	  for(j=0; j<ATTR2_DIM2; j++)
            if(attr_data2[i][j]!=read_data2[i][j])
                TestErrPrintf("%d: attribute data different: attr_data2[%d][%d]=%d, read_data2[%d][%d]=%d\n",__LINE__,i,j,attr_data2[i][j],i,j,read_data2[i][j]);

	// Verify Name
	attr_name = attr.getName();
	verify_val(attr_name, ATTR2_NAME, "DataType::getName", __LINE__, __FILE__);
	attr.close();
	space.close();

	// Open 3rd attribute for the dataset
	attr = dataset.openAttribute((unsigned)2);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR3_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	ndims = space.getSimpleExtentDims(dims);
	verify_val((long)dims[0],(long)ATTR3_DIM1,"attribute dimensions",__FILE__,__LINE__);
	verify_val((long)dims[1],(long)ATTR3_DIM2,"attribute dimensions",__FILE__,__LINE__);
	verify_val((long)dims[2],(long)ATTR3_DIM3,"attribute dimensions",__FILE__,__LINE__);

	/* Verify Datatype */

        // Get the class of the datatype that is used by attr
        type_class = attr.getTypeClass();

        // Verify that the type is of compound datatype
        verify_val(type_class, H5T_FLOAT, "Attribute::getTypeClass", __LINE__, __FILE__);

    	// Get the double datatype
        FloatType f_type = attr.getFloatType();

	// Get and verify the order of this type
	order = f_type.getOrder();
	verify_val(order, PredType::NATIVE_DOUBLE.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this type
	size = f_type.getSize();
	verify_val(size, PredType::NATIVE_DOUBLE.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(PredType::NATIVE_DOUBLE, read_data3);

	// Verify values read in
	for(i=0; i<ATTR3_DIM1; i++)
	    for(j=0; j<ATTR3_DIM2; j++)
		for(k=0; k<ATTR3_DIM3; k++)
		    if(attr_data3[i][j][k]!=read_data3[i][j][k])
			TestErrPrintf("%d: attribute data different: attr_data3[%d][%d][%d]=%f, read_data3[%d][%d][%d]=%f\n",__LINE__,i,j,k,attr_data3[i][j][k],i,j,k,read_data3[i][j][k]);

	// Verify Name
	attr_name = attr.getName();
	verify_val(attr_name, ATTR3_NAME, "DataType::getName", __LINE__, __FILE__);

	PASSED();
    } // end try block

    catch (Exception E) {
	issue_fail_msg("test_attr_mult_read()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}   // test_attr_mult_read()
예제 #30
0
/****************************************************************
**
**  test_attr_compound_read(): Test basic H5A (attribute) code.
**
****************************************************************/
static void test_attr_compound_read()
{
    hsize_t dims[ATTR_MAX_DIMS];	// Attribute dimensions
    size_t      size;   // Attribute datatype size as stored in file
    size_t      offset; // Attribute datatype field offset
    struct attr4_struct read_data4[ATTR4_DIM1][ATTR4_DIM2]; // Buffer for reading 4th attribute
    int     i,j;

    // Output message about test being performed
    SUBTEST("Basic Attribute Functions");

    try {
	// Open file
	H5File fid1(FILENAME, H5F_ACC_RDWR);

	// Open the dataset
	DataSet dataset = fid1.openDataSet(DSET1_NAME);

	// Verify the correct number of attributes
	int num_attrs = dataset.getNumAttrs();
	verify_val(num_attrs, 1, "H5Object::getNumAttrs", __LINE__, __FILE__);

	// Open 1st attribute for the dataset
	Attribute attr = dataset.openAttribute((unsigned)0);

	/* Verify Dataspace */

	// Get the dataspace of the attribute
	DataSpace space = attr.getSpace();

	// Get the rank of the dataspace and verify it
	int rank = space.getSimpleExtentNdims();
	verify_val(rank, ATTR4_RANK, "DataSpace::getSimpleExtentNdims", __LINE__, __FILE__);

	// Get the dims of the dataspace and verify them
	int ndims = space.getSimpleExtentDims(dims);
	if(dims[0]!=ATTR4_DIM1)
        verify_val((long)dims[0], (long)ATTR4_DIM1, "DataSpace::getSimpleExtentDims",__LINE__, __FILE__);
        verify_val((long)dims[1], (long)ATTR4_DIM2, "DataSpace::getSimpleExtentDims",__LINE__, __FILE__);

	// Get the class of the datatype that is used by attr
	H5T_class_t type_class = attr.getTypeClass();

	// Verify that the type is of compound datatype
	verify_val(type_class, H5T_COMPOUND, "Attribute::getTypeClass", __LINE__, __FILE__);

	// Get the compound datatype
	CompType datatype = attr.getCompType();

	// Verify the number of fields in the datatype, which must be 3
	int fields = datatype.getNmembers();
	verify_val(fields, 3, "CompType::getNmembers", __LINE__, __FILE__);

	// Verify that the fields have the same names as when the type
	// was created
	for(i=0; i<fields; i++)
	{
	    H5std_string fieldname = datatype.getMemberName(i);
	    if(!((fieldname == ATTR4_FIELDNAME1) ||
		(fieldname == ATTR4_FIELDNAME2) ||
		(fieldname == ATTR4_FIELDNAME3)))
            TestErrPrintf("%d:invalid field name for field #%d: %s\n",__LINE__,i,fieldname.c_str());
	} /* end for */

	offset = datatype.getMemberOffset(0);
	verify_val(offset, attr4_field1_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	offset = datatype.getMemberOffset(1);
	verify_val(offset, attr4_field2_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	offset = datatype.getMemberOffset(2);
	verify_val(offset, attr4_field3_off, "DataType::getMemberOffset", __LINE__, __FILE__);

	/* Verify each field's type, class & size */

	// Get and verify the type class of the first member
	type_class = datatype.getMemberClass(0);
	verify_val(type_class, H5T_INTEGER, "DataType::getMemberClass", __LINE__, __FILE__);
	// Get and verify the order of this member's type
	IntType i_type = datatype.getMemberIntType(0);
	H5T_order_t order = i_type.getOrder();
	verify_val(order, PredType::NATIVE_INT.getOrder(), "DataType::getOrder", __LINE__, __FILE__);

	// Get and verify the size of this member's type
	size = i_type.getSize();
	verify_val(size, PredType::NATIVE_INT.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Get and verify class, order, and size of the second member's type
	type_class = datatype.getMemberClass(1);
	verify_val(type_class, H5T_FLOAT, "DataType::getMemberClass", __LINE__, __FILE__);
	FloatType f_type = datatype.getMemberFloatType(1);
	order = f_type.getOrder();
	verify_val(order, PredType::NATIVE_DOUBLE.getOrder(), "DataType::getOrder", __LINE__, __FILE__);
	size = f_type.getSize();
	verify_val(size, PredType::NATIVE_DOUBLE.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Get and verify class, order, and size of the third member's type
	type_class = datatype.getMemberClass(2);
	verify_val(type_class, H5T_INTEGER, "DataType::getMemberClass", __LINE__, __FILE__);
	// Note: H5T_INTEGER is correct here!

	StrType s_type = datatype.getMemberStrType(2);
	order = s_type.getOrder();
	verify_val(order, PredType::NATIVE_SCHAR.getOrder(), "DataType::getOrder", __LINE__, __FILE__);
	size = s_type.getSize();
	verify_val(size, PredType::NATIVE_SCHAR.getSize(), "DataType::getSize", __LINE__, __FILE__);

	// Read attribute information
	attr.read(datatype, read_data4);

	// Verify values read in
	for(i=0; i<ATTR4_DIM1; i++)
	    for(j=0; j<ATTR4_DIM2; j++)
		if(HDmemcmp(&attr_data4[i][j],&read_data4[i][j],sizeof(struct attr4_struct))) {
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].i=%d, read_data4[%d][%d].i=%d\n",__LINE__,i,j,attr_data4[i][j].i,i,j,read_data4[i][j].i);
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].d=%f, read_data4[%d][%d].d=%f\n",__LINE__,i,j,attr_data4[i][j].d,i,j,read_data4[i][j].d);
		    TestErrPrintf("%d:attribute data different: attr_data4[%d][%d].c=%c, read_data4[%d][%d].c=%c\n",__LINE__,i,j,attr_data4[i][j].c,i,j,read_data4[i][j].c);
             } /* end if */

	// Verify name
	H5std_string attr_name = attr.getName();
	verify_val(attr_name, ATTR4_NAME, "Attribute::getName", __LINE__, __FILE__);
	PASSED();
    } // end try block

    catch (Exception E) {
	issue_fail_msg("test_attr_compound_read()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}   // test_attr_compound_read()