Exemplo n.º 1
0
/**
 * Sets up the chunking and compression rate.
 * @param length
 * @return The configured property list
 */
DSetCreatPropList getPropList(const std::size_t length) {
  DSetCreatPropList propList;
  hsize_t chunk_dims[1] = {length};
  propList.setChunk(1, chunk_dims);
  propList.setDeflate(6);
  return propList;
}
Exemplo n.º 2
0
//Creates a dataset: an array of HDF5 CompoundType 
//If you want a dimension i to be unlimited, pass chunk_dims[i]=NCHUNK and max_dims[i]=0. If limited, pass max_dims[i]=N and chunk_dims[i]=N.
ArfRecordingData* ArfFileBase::createCompoundDataSet(CompType type, String path, int dimension, int* max_dims, int* chunk_dims)
{
    ScopedPointer<DataSet> data;
    DSetCreatPropList prop;
    
    hsize_t Hdims[3];
    hsize_t Hmax_dims [3];
    hsize_t Hchunk_dims[3];

    for (int i=0; i < dimension; i++)
    {
        Hchunk_dims[i] = chunk_dims[i];
        if (chunk_dims[i] > 0 && chunk_dims[i] != max_dims[i])
        {
            Hmax_dims[i] = H5S_UNLIMITED;
            Hdims[i] = 0;
        }
        else
        {
            Hmax_dims[i] = max_dims[i];
            Hdims[i] = max_dims[i];
        }
    }   
    
    DataSpace dSpace(dimension, Hdims, Hmax_dims);
    prop.setChunk(dimension, Hchunk_dims);
    data = new DataSet(file->createDataSet(path.toUTF8(),type,dSpace,prop));
    return new ArfRecordingData(data.release());  
}
Exemplo n.º 3
0
// * * * * * * * * * * * * * * * * * * * * * * * * * *
void H5_C3PO_NS::createExtendibleDataset(std::string FILE_NAME,const char* datasetName_)
{

   hsize_t dims[2] = { 0, 1}; // dataset dimensions at creation
   hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
   DataSpace mspace1( RANK, dims, maxdims);

  H5File* file=new H5File( FILE_NAME.c_str(),H5F_ACC_RDWR );
  IntType datatype( PredType::NATIVE_DOUBLE );         //Define datatype for the data
  datatype.setOrder( H5T_ORDER_LE );
  
  DSetCreatPropList cparms;
  hsize_t chunk_dims[2] ={6, 1};
  cparms.setChunk( RANK, chunk_dims );
  
  //Set fill value for the dataset
  
  int fill_val = 1.0;
  cparms.setFillValue( PredType::NATIVE_DOUBLE, &fill_val);
  
  DataSet dataset = file->createDataSet( datasetName_, PredType::NATIVE_DOUBLE, mspace1, cparms);
  
  file->close();
  
  delete file;

}
Exemplo n.º 4
0
void hdf5ExternalArrayTestCreate(CuTest *testCase) {
    for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) {
        hsize_t chunkSize = chunkSizes[chunkIdx];
        setup();
        try {
            IntType datatype(PredType::NATIVE_HSIZE);

            H5File file(H5std_string(fileName), H5F_ACC_TRUNC);

            Hdf5ExternalArray myArray;
            DSetCreatPropList cparms;
            if (chunkSize > 0) {
                cparms.setDeflate(2);
                cparms.setChunk(1, &chunkSize);
            }
            myArray.create(&file, datasetName, datatype, N, &cparms);
            for (hsize_t i = 0; i < N; ++i) {
                hsize_t *block = reinterpret_cast<hsize_t *>(myArray.getUpdate(i));
                *block = i;
            }
            myArray.write();
            file.flush(H5F_SCOPE_LOCAL);
            file.close();
            checkNumbers(testCase);
        } catch (Exception &exception) {
            cerr << exception.getCDetailMsg() << endl;
            CuAssertTrue(testCase, 0);
        } catch (...) {
            CuAssertTrue(testCase, 0);
        }
        teardown();
    }
}
Exemplo n.º 5
0
int TestCompress()
{

	unsigned int flags = 0;
    unsigned int config = 0;
    size_t cd_nelemts = 0;

    TESTING("compression")
#ifdef H5_HAVE_FILTER_DEFLATE
    try {
        /* Create packet table with compression. */
        FL_PacketTable wrapper(fileID, "/compressTest", H5T_NATIVE_CHAR, 100, 8);

        /* Create an HDF5 C++ file object */
        H5File file;
        file.setId(fileID);

        /* Make sure that the deflate filter is set by opening the packet table
         * as a dataset and getting its creation property list */
        DataSet dsetID = file.openDataSet("/compressTest");

        DSetCreatPropList dcplID = dsetID.getCreatePlist();

        dcplID.getFilterById(H5Z_FILTER_DEFLATE, flags, cd_nelemts, NULL, 0, NULL, config);
    } catch (Exception e) {
      H5_FAILED();
      return 1;
    }
    PASSED();
#else
    SKIPPED();
    puts("    deflate filter not enabled");
#endif /* H5_HAVE_FILTER_DEFLATE */
    return 0;
}
Exemplo n.º 6
0
void HDF5Genome::setGenomeBottomDimensions(
  const vector<Sequence::UpdateInfo>& bottomDimensions)
{
  hal_size_t numBottomSegments = 0;
  for (vector<Sequence::UpdateInfo>::const_iterator i
          = bottomDimensions.begin(); i != bottomDimensions.end(); 
       ++i)
  {
    numBottomSegments += i->_numSegments;
  }
  H5::Exception::dontPrint();
  try
  {
    DataSet d = _group.openDataSet(bottomArrayName);
    _group.unlink(bottomArrayName);
  }
  catch (H5::Exception){}
  hal_size_t numChildren = _alignment->getChildNames(_name).size();
 
  // scale down the chunk size in order to keep chunks proportional to
  // the size of a bottom segment with two children.
  hsize_t chunk;
  _dcprops.getChunk(1, &chunk);  
  double scale = numChildren < 10 ? 1. : 10. / numChildren;
  chunk *= scale;
  DSetCreatPropList botDC;
  botDC.copy(_dcprops);
  botDC.setChunk(1, &chunk);

  _bottomArray.create(&_group, bottomArrayName, 
                      HDF5BottomSegment::dataType(numChildren), 
                      numBottomSegments + 1, &botDC, _numChunksInArrayBuffer);
  _numChildrenInBottomArray = numChildren;
  _childCache.clear();
}
Exemplo n.º 7
0
static void test_null_filter()
{
    // Output message about test being performed
    SUBTEST("'Null' filter");
    try {
	//hsize_t  null_size;          // Size of dataset with null filter

	// Prepare dataset create property list
	DSetCreatPropList dsplist;
	dsplist.setChunk(2, chunk_size);

	if (H5Zregister (H5Z_BOGUS)<0)
            throw Exception("test_null_filter", "H5Zregister failed");

	// Set some pretent filter
	dsplist.setFilter(H5Z_FILTER_BOGUS);

	// this function is just a stub right now; will work on it later - BMR
	//if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size)<0)
        //  throw Exception("test_null_filter", "test_filter_internal failed");

	// Close objects.
	dsplist.close();
	PASSED();
    } // end of try

    // catch all other exceptions
    catch (Exception E)
    {
        issue_fail_msg("test_null_filter()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}  // test_null_filter
HDF5HandlerBase::HDF5HandlerBase(const std::string &fileName, const std::string &datasetName)
    : FILE_NAME(H5std_string(fileName))
    , DATASETNAME(H5std_string(datasetName))
{


    try
    {

        Exception::dontPrint();

        file = H5File(FILE_NAME, H5F_ACC_TRUNC);

        hsize_t dims[1] = {0};
        hsize_t maxdims[1] = {H5S_UNLIMITED};
        hsize_t chunk_dims[1] = {10000};

        DataSpace dataspace = DataSpace(1,dims,maxdims);

        DSetCreatPropList prop;
        prop.setChunk(1, chunk_dims);

        dataset = file.createDataSet( DATASETNAME,
	                         PredType::STD_I32BE, dataspace, prop);

        prop.close();
        dataspace.close();
    } catch (Exception &error) {
        // Throw FileIException, DataSetIException, DataSpaceIException
        throw;
    }

}
Exemplo n.º 9
0
HDF5RecordingData::HDF5RecordingData(DataSet* data)
{
    DataSpace dSpace;
    DSetCreatPropList prop;
    ScopedPointer<DataSet> dataSet = data;
    hsize_t dims[3], chunk[3];

    dSpace = dataSet->getSpace();
    prop = dataSet->getCreatePlist();

    dimension = dSpace.getSimpleExtentDims(dims);
    prop.getChunk(dimension,chunk);

    this->size[0] = dims[0];
    if (dimension > 1)
        this->size[1] = dims[1];
    else
        this->size[1] = 1;
    if (dimension > 1)
        this->size[2] = dims[2];
    else
        this->size[2] = 1;

    this->xChunkSize = chunk[0];
    this->xPos = dims[0];
    this->dSet = dataSet;
    this->rowXPos.clear();
    this->rowXPos.insertMultiple(0,0,this->size[1]);
}
Exemplo n.º 10
0
void HDF5CLParser::applyToDCProps(DSetCreatPropList& dcprops) const
{
  if (hasOption("chunk"))
  {
    hsize_t chunk = getOption<hsize_t>("chunk");
    hsize_t deflate = getOption<hsize_t>("deflate");
    dcprops.setChunk(1, &chunk);
    dcprops.setDeflate(deflate);
  }
}
Exemplo n.º 11
0
HDF5RecordingData* HDF5FileBase::createDataSet(DataTypes type, int dimension, int* size, int* chunking, String path)
{
    ScopedPointer<DataSet> data;
    DSetCreatPropList prop;
    if (!opened) return nullptr;

    //Right now this classes don't support datasets with rank > 3.
    //If it's needed in the future we can extend them to be of generic rank
    if ((dimension > 3) || (dimension < 1)) return nullptr;

    DataType H5type = getH5Type(type);

    hsize_t dims[3], chunk_dims[3], max_dims[3];

    for (int i=0; i < dimension; i++)
    {
        dims[i] = size[i];
        if (chunking[i] > 0)
        {
            chunk_dims[i] = chunking[i];
            max_dims[i] = H5S_UNLIMITED;
        }
        else
        {
            chunk_dims[i] = size[i];
            max_dims[i] = size[i];
        }
    }

    try
    {
        DataSpace dSpace(dimension,dims,max_dims);
        prop.setChunk(dimension,chunk_dims);

        data = new DataSet(file->createDataSet(path.toUTF8(),H5type,dSpace,prop));
        return new HDF5RecordingData(data.release());
    }
    catch (DataSetIException error)
    {
        error.printError();
        return nullptr;
    }
    catch (FileIException error)
    {
        error.printError();
        return nullptr;
    }
    catch (DataSpaceIException error)
    {
        error.printError();
        return nullptr;
    }


}
Exemplo n.º 12
0
void hdf5DNATypeTest(CuTest *testCase)
{
  for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx)
  {
    hsize_t chunkSize = chunkSizes[chunkIdx];
    setup();
    try 
    {
      PredType datatype = HDF5DNA::dataType();
      H5File file(H5std_string(fileName), H5F_ACC_TRUNC);

      HDF5ExternalArray myArray;
      DSetCreatPropList cparms;
      if (chunkSize > 0)
      {
        cparms.setChunk(1, &chunkSize);
      }
      hsize_t NEVEN = N % 2 ? N + 1 : N;
      myArray.create(&file, datasetName, datatype, NEVEN / 2, &cparms);
      for (hsize_t i = 0; i < NEVEN / 2; ++i)
      {
        unsigned char value = 0U;
        HDF5DNA::pack(idxToDNA(i * 2), i * 2, value);
        HDF5DNA::pack(idxToDNA((i * 2) + 1), (i * 2) + 1, value);
        myArray.setValue(i, 0, value);
      }
      myArray.write();
      file.flush(H5F_SCOPE_LOCAL);
      file.close();

      H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY);
      HDF5ExternalArray readArray;
      readArray.load(&rfile, datasetName);
      for (hsize_t i = 0; i < NEVEN / 2; ++i)
      {
        unsigned char value = readArray.getValue<unsigned char>(i, 0);
        char v1 = HDF5DNA::unpack(0, value);
        char v2 = HDF5DNA::unpack(1, value);
        CuAssertTrue(testCase, v1 == idxToDNA(i * 2));
        CuAssertTrue(testCase, v2 == idxToDNA((i * 2) + 1));
      }
    }
    catch(Exception& exception)
    {
      cerr << exception.getCDetailMsg() << endl;
      CuAssertTrue(testCase, 0);
    }
    catch(...)
    {
      CuAssertTrue(testCase, 0);
    }
    teardown();
  }
}
Exemplo n.º 13
0
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
    DSetCreatPropList cparms;
    hsize_t chunk_dims[2] = {256, 256};
    hsize_t dims[2];
    dims[0] = im.size().height;
    dims[1] = im.size().width;
  
    if (chunk_dims[0] > dims[0]) {
        chunk_dims[0] = dims[0];
    }
    if (chunk_dims[1] > dims[1]) {
        chunk_dims[1] = dims[1];
    }

    cparms.setChunk(2, chunk_dims);
    cparms.setShuffle();
    cparms.setDeflate(5);

    DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
                                        DataSpace(2, dims, dims),
                                        cparms);

    Mat image;
    if (im.type() !=  CV_32F)
        im.convertTo(image, CV_32F);
    else
        image = im;
    
    DataSpace imspace;
    float *imdata;
    if (image.isContinuous()) {
        imspace = dataset.getSpace(); // same size as 
        imspace.selectAll();
        imdata = image.ptr<float>();
    } else {
        // we are working with an ROI
        assert (image.isSubmatrix());
        Size parent_size; Point parent_ofs;
        image.locateROI(parent_size, parent_ofs);
        hsize_t parent_count[2];
        parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
        imspace.setExtentSimple(2, parent_count);
        hsize_t im_offset[2], im_size[2];
        im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
        im_size[0] = image.size().height; im_size[1] = image.size().width;
        imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
        imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
    }
    dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
Exemplo n.º 14
0
static DataSet create_dataset(H5File h5f, const char *name)
{
    DSetCreatPropList cparms;
    hsize_t chunk_dims[2] = {256, 256};
    hsize_t dims[2];
    cparms.setChunk(2, chunk_dims);
    cparms.setShuffle();
    cparms.setDeflate(5);
    dims[0] = imsize.height;
    dims[1] = imsize.width;
  
    return h5f.createDataSet(name, PredType::NATIVE_FLOAT,
                             DataSpace(2, dims, dims),
                             cparms);
}
Exemplo n.º 15
0
// When the column header is complete, create a table with
// appropriately typed columns and prepare to write data to it.
void end_column (void* state)
{
  program_state_t* s = (program_state_t*)state;

  // Create a global dataspace.
  s->current_dims = 0;
  hsize_t max_dims = H5S_UNLIMITED;
  DataSpace global_dataspace(1, &s->current_dims, &max_dims);

  // Define an HDF5 datatype based on the Byfl column header.
  construct_hdf5_datatype(s);

  // Create a dataset.  Enable chunking (required because of the
  // H5S_UNLIMITED dimension) and deflate compression (optional).
  DSetCreatPropList proplist;
  proplist.setChunk(1, &chunk_size);
  proplist.setDeflate(9);    // Maximal compression
  s->dataset = s->hdf5file.createDataSet(s->table_name, s->datatype,
                                         global_dataspace, proplist);
}
Exemplo n.º 16
0
//--------------------------------------------------------------------------
// Function:	CommonFG::createDataSet
///\brief	Creates a new dataset at this location.
///\param	name  - IN: Name of the dataset to create
///\param	data_type - IN: Datatype of the dataset
///\param	data_space - IN: Dataspace for the dataset
///\param	create_plist - IN: Creation properly list for the dataset
///\return	DataSet instance
///\exception	H5::FileIException or H5::GroupIException
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
DataSet CommonFG::createDataSet( const char* name, const DataType& data_type, const DataSpace& data_space, const DSetCreatPropList& create_plist ) const
{
   // Obtain identifiers for C API
   hid_t type_id = data_type.getId();
   hid_t space_id = data_space.getId();
   hid_t create_plist_id = create_plist.getId();

   // Call C routine H5Dcreate2 to create the named dataset
   hid_t dataset_id = H5Dcreate2( getLocId(), name, type_id, space_id, H5P_DEFAULT, create_plist_id, H5P_DEFAULT );

   // If the creation of the dataset failed, throw an exception
   if( dataset_id < 0 )
   {
      throwException("createDataSet", "H5Dcreate2 failed");
   }

   // No failure, create and return the DataSet object
   DataSet dataset( dataset_id );
   return( dataset );
}
Exemplo n.º 17
0
int main (void)
{
    hsize_t	i, j;

    // Try block to detect exceptions raised by any of the calls inside it
    try
    {
	/*
	 * Turn off the auto-printing when failure occurs so that we can
	 * handle the errors appropriately
	 */
	Exception::dontPrint();

	/*
	 * Open the file and the dataset.
	 */
	H5File file( FILE_NAME, H5F_ACC_RDONLY );
	DataSet dataset = file.openDataSet( DATASET_NAME );

	/*
	 * Get filespace for rank and dimension
	 */
	DataSpace filespace = dataset.getSpace();

	/*
	 * Get number of dimensions in the file dataspace
	 */
	int rank = filespace.getSimpleExtentNdims();

	/*
	 * Get and print the dimension sizes of the file dataspace
	 */
	hsize_t dims[2]; 	// dataset dimensions
	rank = filespace.getSimpleExtentDims( dims );
	cout << "dataset rank = " << rank << ", dimensions "
	     << (unsigned long)(dims[0]) << " x "
	     << (unsigned long)(dims[1]) << endl;

	/*
	 * Define the memory space to read dataset.
	 */
	DataSpace mspace1(RANK, dims);

	/*
	 * Read dataset back and display.
	 */
	int data_out[NX][NY];  // buffer for dataset to be read
	dataset.read( data_out, PredType::NATIVE_INT, mspace1, filespace );

	cout << "\n";
	cout << "Dataset: \n";
	for (j = 0; j < dims[0]; j++)
	{
	    for (i = 0; i < dims[1]; i++)
		cout << data_out[j][i] << " ";
	    cout << endl;
	}

	/*
	 *	    dataset rank 2, dimensions 10 x 5
	 *	    chunk rank 2, dimensions 2 x 5

	 *	    Dataset:
	 *	    1 1 1 3 3
	 *	    1 1 1 3 3
	 *	    1 1 1 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 */

	/*
	 * Read the third column from the dataset.
	 * First define memory dataspace, then define hyperslab
	 * and read it into column array.
	 */
	hsize_t col_dims[1];
	col_dims[0] = 10;
	DataSpace mspace2( RANKC, col_dims );

	/*
	 * Define the column (hyperslab) to read.
	 */
	hsize_t offset[2] = { 0, 2 };
	hsize_t  count[2] = { 10, 1 };
	int column[10];  // buffer for column to be read

	/*
	 * Define hyperslab and read.
	 */
	filespace.selectHyperslab( H5S_SELECT_SET, count, offset );
	dataset.read( column, PredType::NATIVE_INT, mspace2, filespace );

	cout << endl;
	cout << "Third column: " << endl;
	for (i = 0; i < 10; i++)
	    cout << column[i] << endl;

	/*
	 *	    Third column:
	 *	    1
	 *	    1
	 *	    1
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 */

	/*
	 * Get creation properties list.
	 */
	DSetCreatPropList cparms = dataset.getCreatePlist();

	/*
	 * Check if dataset is chunked.
	 */
	hsize_t chunk_dims[2];
	int     rank_chunk;
	if( H5D_CHUNKED == cparms.getLayout() )
	{
	    /*
	     * Get chunking information: rank and dimensions
	     */
	    rank_chunk = cparms.getChunk( 2, chunk_dims);
	    cout << "chunk rank " << rank_chunk << "dimensions "
		<< (unsigned long)(chunk_dims[0]) << " x "
		<< (unsigned long)(chunk_dims[1]) << endl;

	    /*
	     * Define the memory space to read a chunk.
	     */
	    DataSpace mspace3( rank_chunk, chunk_dims );

	    /*
	     * Define chunk in the file (hyperslab) to read.
	     */
	    offset[0] = 2;
	    offset[1] = 0;
	    count[0]  = chunk_dims[0];
	    count[1]  = chunk_dims[1];
	    filespace.selectHyperslab( H5S_SELECT_SET, count, offset );

	    /*
	     * Read chunk back and display.
	     */
	    int chunk_out[2][5];   // buffer for chunk to be read
	    dataset.read( chunk_out, PredType::NATIVE_INT, mspace3, filespace );
	    cout << endl;
	    cout << "Chunk:" << endl;
	    for (j = 0; j < chunk_dims[0]; j++)
	    {
		for (i = 0; i < chunk_dims[1]; i++)
		    cout << chunk_out[j][i] << " ";
		cout << endl;
	    }
	    /*
	     *	 Chunk:
	     *	 1 1 1 0 0
	     *	 2 0 0 0 0
	     */
	}
    }  // end of try block

    // catch failure caused by the H5File operations
    catch( FileIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSet operations
    catch( DataSetIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSpace operations
    catch( DataSpaceIException error )
    {
	error.printErrorStack();
	return -1;
    }
    return 0;
}
Exemplo n.º 18
0
/*-------------------------------------------------------------------------
 * Function:	test_compact_vlstring
 *
 * Purpose:	Test storing VL strings in compact datasets.
 *
 * Return:	None
 *
 * Programmer:	Binh-Minh Ribler (use C version)
 *		January, 2007
 *
 *-------------------------------------------------------------------------
 */
static void test_compact_vlstring()
{
    // Output message about test being performed
    SUBTEST("VL Strings on Compact Dataset");

    try {
	// Create file
	H5File file1(FILENAME, H5F_ACC_TRUNC);

	// Create dataspace for datasets
        hsize_t dims1[] = {SPACE1_DIM1};
        DataSpace sid1(SPACE1_RANK, dims1);

	// Create a datatype to refer to
	StrType vlst(0, H5T_VARIABLE);

	// Create dataset create property list and set layout
	DSetCreatPropList plist;
	plist.setLayout(H5D_COMPACT);

	// Create a dataset
	DataSet dataset(file1.createDataSet("Dataset5", vlst, sid1, plist));

	// Write dataset to disk
	const char *wdata[SPACE1_DIM1] = {"one", "two", "three", "four"};
	dataset.write(wdata, vlst);

	// Read dataset from disk
	char *rdata[SPACE1_DIM1];   // Information read in
	dataset.read(rdata, vlst);

	// Compare data read in
	hsize_t i;
	for (i=0; i<SPACE1_DIM1; i++) {
	    if (HDstrlen(wdata[i])!=strlen(rdata[i])) {
		TestErrPrintf("VL data length don't match!, strlen(wdata[%d])=%d, strlen(rdata[%d])=%d\n",(int)i,(int)strlen(wdata[i]),(int)i,(int)strlen(rdata[i]));
		continue;
	    } // end if
	    if (HDstrcmp(wdata[i],rdata[i]) != 0) {
		TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n",(int)i,wdata[i],(int)i,rdata[i]);
		continue;
	    } // end if
	} // end for

	// Reclaim the read VL data
	DataSet::vlenReclaim((void *)rdata, vlst, sid1);

	// Close objects and file
	dataset.close();
	vlst.close();
	sid1.close();
	plist.close();
	file1.close();

	PASSED();
    } // end try

    // Catch all exceptions.
    catch (Exception E)
    {
        issue_fail_msg("test_compact_vlstrings()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}   // test_compact_vlstrings
Exemplo n.º 19
0
/*-------------------------------------------------------------------------
 * Function:	test_vlstrings_special
 *
 * Purpose:	Test VL string code for special string cases, nil and
 *		zero-sized.
 *
 * Return:	None
 *
 * Programmer:	Binh-Minh Ribler (use C version)
 *		January, 2007
 *
 *-------------------------------------------------------------------------
 */
static void test_vlstrings_special()
{
    const char *wdata[SPACE1_DIM1] = {"one", "two", "", "four"};
    const char *wdata2[SPACE1_DIM1] = {NULL, NULL, NULL, NULL};
    char *rdata[SPACE1_DIM1];   // Information read in

    // Output message about test being performed.
    SUBTEST("Special VL Strings");

    try {
	// Create file.
	H5File file1(FILENAME, H5F_ACC_TRUNC);

        // Create dataspace for datasets.
        hsize_t dims1[] = {SPACE1_DIM1};
        DataSpace sid1(SPACE1_RANK, dims1);

	// Create a datatype to refer to.
	StrType vlst(0, H5T_VARIABLE);

	// Create a dataset.
	DataSet dataset(file1.createDataSet("Dataset3", vlst, sid1));

	// Read from the dataset before writing data.
	dataset.read(rdata, vlst);

	// Check data read in.
	hsize_t i;      	// counting variable
	for (i=0; i<SPACE1_DIM1; i++)
	    if(rdata[i]!=NULL)
		TestErrPrintf("VL doesn't match!, rdata[%d]=%p\n",(int)i,rdata[i]);

	// Write dataset to disk, then read it back.
	dataset.write(wdata, vlst);
	dataset.read(rdata, vlst);

	// Compare data read in.
	for (i = 0; i < SPACE1_DIM1; i++) {
	    size_t wlen = HDstrlen(wdata[i]);
	    size_t rlen = HDstrlen(rdata[i]);
	    if(wlen != rlen) {
		TestErrPrintf("VL data lengths don't match!, strlen(wdata[%d])=%u, strlen(rdata[%d])=%u\n", (int)i, (unsigned)wlen, (int)i, (unsigned)rlen);
		continue;
	    } // end if
	    if(HDstrcmp(wdata[i],rdata[i]) != 0) {
		TestErrPrintf("VL data values don't match!, wdata[%d]=%s, rdata[%d]=%s\n", (int)i, wdata[i], (int)i, rdata[i]);
		continue;
	    } // end if
	} // end for

	// Reclaim the read VL data.
	DataSet::vlenReclaim((void *)rdata, vlst, sid1);

	// Close Dataset.
	dataset.close();

	/*
	 * Create another dataset to test nil strings.
	 */

	// Create the property list and set the fill value for the second
	// dataset.
	DSetCreatPropList dcpl;
	char *fill = NULL;	// Fill value
	dcpl.setFillValue(vlst, &fill);
	dataset = file1.createDataSet("Dataset4", vlst, sid1, dcpl);

	// Close dataset creation property list.
	dcpl.close();

	// Read from dataset before writing data.
	dataset.read(rdata, vlst);

	// Check data read in.
	for (i=0; i<SPACE1_DIM1; i++)
	  if(rdata[i]!=NULL)
	    TestErrPrintf("VL doesn't match!, rdata[%d]=%p\n",(int)i,rdata[i]);

	// Try to write nil strings to disk.
	dataset.write(wdata2, vlst);

	// Read nil strings back from disk.
	dataset.read(rdata, vlst);

	// Check data read in.
	for (i=0; i<SPACE1_DIM1; i++)
	  if(rdata[i]!=NULL)
	    TestErrPrintf("VL doesn't match!, rdata[%d]=%p\n",(int)i,rdata[i]);

	// Close objects and file.
	dataset.close();
	vlst.close();
	sid1.close();
	file1.close();

	PASSED();
    } // end try

    // Catch all exceptions.
    catch (Exception E)
    {
	issue_fail_msg("test_vlstrings_special()", __LINE__, __FILE__, E.getCDetailMsg());
    }
} // test_vlstrings_special
Exemplo n.º 20
0
void HDF5Genome::setDimensions(
  const vector<Sequence::Info>& sequenceDimensions,
  bool storeDNAArrays)
{
  _totalSequenceLength = 0;
  hal_size_t totalSeq = sequenceDimensions.size();
  hal_size_t maxName = 0;
  
  // Copy segment dimensions to use the external interface
  vector<Sequence::UpdateInfo> topDimensions;
  topDimensions.reserve(sequenceDimensions.size());
  vector<Sequence::UpdateInfo> bottomDimensions;
  bottomDimensions.reserve(sequenceDimensions.size());

  // Compute summary info from the list of sequence Dimensions
  for (vector<Sequence::Info>::const_iterator i = sequenceDimensions.begin();
       i != sequenceDimensions.end(); 
       ++i)
  {
    _totalSequenceLength += i->_length;
    maxName = max(static_cast<hal_size_t>(i->_name.length()), maxName);
    topDimensions.push_back(
      Sequence::UpdateInfo(i->_name, i->_numTopSegments));
    bottomDimensions.push_back(
      Sequence::UpdateInfo(i->_name, i->_numBottomSegments));
  }

  // Unlink the DNA and segment arrays if they exist (using 
  // exceptions is the only way I know how right now).  Note that
  // the file needs to be refactored to take advantage of the new
  // space. 
  H5::Exception::dontPrint();
  try
  {
    DataSet d = _group.openDataSet(dnaArrayName);
    _group.unlink(dnaArrayName);
  }
  catch (H5::Exception){}
  try
  {
    DataSet d = _group.openDataSet(sequenceIdxArrayName);
    _group.unlink(sequenceIdxArrayName);
  }
  catch (H5::Exception){}
  try
  {
    DataSet d = _group.openDataSet(sequenceNameArrayName);
    _group.unlink(sequenceNameArrayName);
  }
  catch (H5::Exception){}

  if (_totalSequenceLength > 0 && storeDNAArrays == true)
  {
    hal_size_t arrayLength = _totalSequenceLength / 2;
    if (_totalSequenceLength % 2)
    {
      ++arrayLength;
      _rup->set(rupGroupName, "1");
    }
    else
    {
      _rup->set(rupGroupName, "0");
    }
    hsize_t chunk;
    _dcprops.getChunk(1, &chunk);
    // enalarge chunk size because dna bases are so much smaller
    // than segments.  (about 30x). we default to 10x enlargement
    // since the seem to compress about 3x worse.  
    chunk *= dnaChunkScale;
    DSetCreatPropList dnaDC;
    dnaDC.copy(_dcprops);
    dnaDC.setChunk(1, &chunk);
    _dnaArray.create(&_group, dnaArrayName, HDF5DNA::dataType(), 
                     arrayLength, &dnaDC, _numChunksInArrayBuffer);
  }
  if (totalSeq > 0)
  {
    _sequenceIdxArray.create(&_group, sequenceIdxArrayName, 
                             HDF5Sequence::idxDataType(), 
                             totalSeq + 1, &_dcprops, _numChunksInArrayBuffer);

    _sequenceNameArray.create(&_group, sequenceNameArrayName, 
                              HDF5Sequence::nameDataType(maxName + 1), 
                              totalSeq, &_dcprops, _numChunksInArrayBuffer);

    writeSequences(sequenceDimensions);    
  }
  
  // Do the same as above for the segments. 
  setGenomeTopDimensions(topDimensions);
  setGenomeBottomDimensions(bottomDimensions);

  _parentCache = NULL;
  _childCache.clear();
}
Exemplo n.º 21
0
int
main(int argc, char **argv) {
  // Try block to detect exceptions raised by any of the calls inside it
  try {
    // Turn off the auto-printing when failure occurs so that we can
    // handle the errors appropriately
    H5std_string FILE_NAME(argv[1]);
    Exception::dontPrint();

    // Open the file and the dataset in the file.
    H5File file(FILE_NAME, H5F_ACC_RDONLY);

    DataSet dataset;
    H5std_string dataset_name;
    auto objCount(H5Fget_obj_count(file.getId(), H5F_OBJ_ALL));
    for (size_t i = 0; i != objCount; ++i)
      if (H5G_DATASET == file.getObjTypeByIdx(i)) {
	dataset_name = file.getObjnameByIdx(i);
	dataset = file.openDataSet(dataset_name);
      }

    auto datatype(dataset.getDataType());
    auto dataspace(dataset.getSpace());

    hsize_t dims_in[2];
    auto ndims(dataspace.getSimpleExtentDims(dims_in, NULL));

    hsize_t dims_out[2] = { DIM0, DIM1 };	// dataset dimensions

    double *buf = new double[dims_in[0] * dims_in[1]];

    // Read data.
    dataset.read(buf, PredType::NATIVE_DOUBLE);//, memspace, dataspace);

    H5std_string outFileName("out.h5");

    // Create a new file using the default property lists. 
    H5File outfile(outFileName, H5F_ACC_TRUNC);

    // Create the data space for the dataset.
    DataSpace *output_dataspace = new DataSpace(ndims, dims_out);

    hsize_t chunk_dims[2] = { 20, 20 };	// chunk dimensions
    // Modify dataset creation property to enable chunking
    DSetCreatPropList *plist = new DSetCreatPropList;
    plist->setChunk(2, chunk_dims);

    // Set ZLIB (DEFLATE) Compression using level 9.
    plist->setDeflate(9);

    // Create the attributes.
    const size_t numAttrs = file.getNumAttrs();
    for (size_t i = 0; i != numAttrs; ++i) {
      auto attr(file.openAttribute(i));
      auto output_attr(outfile.createAttribute(attr.getName(), 
					       attr.getDataType(), 
					       attr.getSpace()));

      switch (attr.getTypeClass()) {
      case H5T_FLOAT: {
	double buf;
    	attr.read(attr.getDataType(), &buf);
    	output_attr.write(attr.getDataType(), &buf);
      }
	break;
      case H5T_STRING: {
	char *buf = new char[(unsigned long)attr.getStorageSize()];
    	attr.read(attr.getDataType(), buf);
    	output_attr.write(attr.getDataType(), buf);
	delete buf;
      }
	break;
      default:
	break;
      }
    }

    // Create the dataset.      
    DataSet *output_dataset = new DataSet(outfile.createDataSet(dataset_name, datatype, *output_dataspace, *plist));

    // Write data to dataset.
    output_dataset->write(buf, datatype);

    // Close objects and file.  Either approach will close the HDF5 item.
    delete output_dataspace;
    delete output_dataset;
    delete plist;
    file.close();
  }  // end of try block

  // catch failure caused by the H5File operations
  catch(FileIException &error) {
    error.printError();
    return -1;
  }

  // catch failure caused by the DataSet operations
  catch(DataSetIException &error) {
    error.printError();
    return -1;
  }

  // catch failure caused by the DataSpace operations
  catch(DataSpaceIException &error) {
    error.printError();
    return -1;
  }

  // catch failure caused by the Attribute operations
  catch (AttributeIException &error) {
    error.printError();
    return -1;
  }

  catch (std::exception &error) {
    std::cerr << error.what() << std::endl;
    return -1;
  }

  return 0;  // successfully terminated
}
Exemplo n.º 22
0
/*-------------------------------------------------------------------------
 * Function:	test_compression
 *
 * Purpose:	Tests dataset compression. If compression is requested when
 *		it hasn't been compiled into the library (such as when
 *		updating an existing compressed dataset) then data is sent to
 *		the file uncompressed but no errors are returned.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_compression(H5File& file)
{
#ifndef H5_HAVE_FILTER_DEFLATE
    const char		*not_supported;
    not_supported = "    Deflate compression is not enabled.";
#endif /* H5_HAVE_FILTER_DEFLATE */
    int		points[100][200];
    int		check[100][200];
    hsize_t	i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = (int)n++;
	}
    }
    char* tconv_buf = new char [1000];
    DataSet* dataset = NULL;
    try
    {
	const hsize_t	size[2] = {100, 200};
	// Create the data space
	DataSpace space1(2, size, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Use chunked storage with compression
	DSetCreatPropList dscreatplist;

	const hsize_t	chunk_size[2] = {2, 25};
	dscreatplist.setChunk (2, chunk_size);
	dscreatplist.setDeflate (6);

#ifdef H5_HAVE_FILTER_DEFLATE
	SUBTEST("Compression (setup)");

	// Create the dataset
	dataset = new DataSet (file.createDataSet
	    (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist));

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 1: Read uninitialized data.  It should be zero.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (uninitialized read)");

	dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	for (i=0; i<size[0]; i++) {
	    for (j=0; j<size[1]; j++) {
		if (0!=check[i][j]) {
		    H5_FAILED();
		    cerr << "    Read a non-zero value." << endl;
		    cerr << "    At index " << (unsigned long)i << "," <<
		   (unsigned long)j << endl;
		    throw Exception("test_compression", "Failed in uninitialized read");
		}
	    }
	}
	PASSED();

	/*----------------------------------------------------------------------
	* STEP 2: Test compression by setting up a chunked dataset and writing
	* to it.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (write)");

	for (i=n=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]; j++)
	    {
		points[i][j] = (int)n++;
	    }
	}

	dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 3: Try to read the data we just wrote.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (read)");

	// Read the dataset back
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in read");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 4: Write new data over the top of the old data.  The new data is
	* random thus not very compressible, and will cause the chunks to move
	* around as they grow.  We only change values for the left half of the
	* dataset although we rewrite the whole thing.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (modify)");

	for (i=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]/2; j++)
	    {
	    	points[i][j] = rand ();
	    }
	}
	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back and check it
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in modify");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 5: Close the dataset and then open it and read it again.  This
	* insures that the compression message is picked up properly from the
	* object header.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (re-open)");

	// close this dataset to reuse the var
	delete dataset;

	dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME));
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in re-open");
	    }

	PASSED();


	/*----------------------------------------------------------------------
	* STEP 6: Test partial I/O by writing to and then reading from a
	* hyperslab of the dataset.  The hyperslab does not line up on chunk
	* boundaries (we know that case already works from above tests).
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (partial I/O)");

	const hsize_t	hs_size[2] = {4, 50};
	const hsize_t	hs_offset[2] = {7, 30};
	for (i = 0; i < hs_size[0]; i++) {
	    for (j = 0; j < hs_size[1]; j++) {
		points[hs_offset[0]+i][hs_offset[1]+j] = rand ();
	    }
	}
	space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset );
	dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer);

	// Check that the values read are the same as the values written
	for (i=0; i<hs_size[0]; i++) {
	for (j=0; j<hs_size[1]; j++) {
	    if (points[hs_offset[0]+i][hs_offset[1]+j] !=
		check[hs_offset[0]+i][hs_offset[1]+j]) {
		H5_FAILED();
		cerr << "    Read different values than written.\n" << endl;
		cerr << "    At index " << (unsigned long)(hs_offset[0]+i) <<
		   "," << (unsigned long)(hs_offset[1]+j) << endl;

		cerr << "    At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl;
		cerr << "    At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl;
		throw Exception("test_compression", "Failed in partial I/O");
	    }
	} // for j
	} // for i

	delete dataset;
	dataset = NULL;

	PASSED();

#else
	SUBTEST("deflate filter");
	SKIPPED();
	cerr << not_supported << endl;
#endif

	/*----------------------------------------------------------------------
	* STEP 7: Register an application-defined compression method and use it
	* to write and then read the dataset.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (app-defined method)");

        if (H5Zregister (H5Z_BOGUS)<0)
		throw Exception("test_compression", "Failed in app-defined method");
	if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0)
	    throw Exception("test_compression", "Failed in app-defined method");
	dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL);

	DataSpace space2 (2, size, NULL);
	dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist));

	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in app-defined method");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* Cleanup
	*----------------------------------------------------------------------
	*/
	delete dataset;
	delete [] tconv_buf;
	return 0;
    } // end try

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_compression
Exemplo n.º 23
0
/*
 *  Create data spaces and data sets of the hdf5 for recording histories.
 */
void Hdf5Recorder::initDataSet()
{
    // create the data space & dataset for burstiness history
    hsize_t dims[2];
    dims[0] = static_cast<hsize_t>(m_sim_info->epochDuration * m_sim_info->maxSteps);
    DataSpace dsBurstHist(1, dims);
    dataSetBurstHist = new DataSet(stateOut->createDataSet(nameBurstHist, PredType::NATIVE_INT, dsBurstHist));

    // create the data space & dataset for spikes history
    dims[0] = static_cast<hsize_t>(m_sim_info->epochDuration * m_sim_info->maxSteps * 100);
    DataSpace dsSpikesHist(1, dims);
    dataSetSpikesHist = new DataSet(stateOut->createDataSet(nameSpikesHist, PredType::NATIVE_INT, dsSpikesHist));

    // create the data space & dataset for xloc & ylo c
    dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons);
    DataSpace dsXYloc(1, dims);
    dataSetXloc = new DataSet(stateOut->createDataSet(nameXloc, PredType::NATIVE_INT, dsXYloc));
    dataSetYloc = new DataSet(stateOut->createDataSet(nameYloc, PredType::NATIVE_INT, dsXYloc));

    // create the data space & dataset for neuron types
    dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons);
    DataSpace dsNeuronTypes(1, dims);
    dataSetNeuronTypes = new DataSet(stateOut->createDataSet(nameNeuronTypes, PredType::NATIVE_INT, dsNeuronTypes));

    // create the data space & dataset for neuron threashold
    dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons);
    DataSpace dsNeuronThresh(1, dims);
    dataSetNeuronThresh = new DataSet(stateOut->createDataSet(nameNeuronThresh, H5_FLOAT, dsNeuronThresh));

    // create the data space & dataset for simulation step duration
    dims[0] = static_cast<hsize_t>(1);
    DataSpace dsTsim(1, dims);
    dataSetTsim = new DataSet(stateOut->createDataSet(nameTsim, H5_FLOAT, dsTsim));

    // create the data space & dataset for simulation end time
    dims[0] = static_cast<hsize_t>(1);
    DataSpace dsSimulationEndTime(1, dims);
    dataSetSimulationEndTime = new DataSet(stateOut->createDataSet(nameSimulationEndTime, H5_FLOAT, dsSimulationEndTime));

    // probed neurons
    if (m_model->getLayout()->m_probed_neuron_list.size() > 0)
    {
        // create the data space & dataset for probed neurons
        dims[0] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size());
        DataSpace dsProbedNeurons(1, dims);
        dataSetProbedNeurons = new DataSet(stateOut->createDataSet(nameProbedNeurons, PredType::NATIVE_INT, dsProbedNeurons));

        // create the data space & dataset for spikes of probed neurons

        // the data space with unlimited dimensions
        hsize_t maxdims[2];
        maxdims[0] = H5S_UNLIMITED;
        maxdims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size());

        // dataset dimensions at creation
        dims[0] = static_cast<hsize_t>(1);
        dims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size());
        DataSpace dsSpikesProbedNeurons(2, dims, maxdims);

        // set fill value for the dataset
        DSetCreatPropList cparms;
        uint64_t fill_val = 0;
        cparms.setFillValue( PredType::NATIVE_UINT64, &fill_val);

        // modify dataset creation properties, enable chunking
        hsize_t      chunk_dims[2];
        chunk_dims[0] = static_cast<hsize_t>(100);
        chunk_dims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size());
        cparms.setChunk( 2, chunk_dims );

        dataSetSpikesProbedNeurons = new DataSet(stateOut->createDataSet(nameSpikesProbedNeurons, PredType::NATIVE_UINT64, dsSpikesProbedNeurons, cparms));
    }

    // allocate data memories
    burstinessHist = new int[static_cast<int>(m_sim_info->epochDuration)];
    spikesHistory = new int[static_cast<int>(m_sim_info->epochDuration * 100)]; 
    memset(burstinessHist, 0, static_cast<int>(m_sim_info->epochDuration * sizeof(int)));
    memset(spikesHistory, 0, static_cast<int>(m_sim_info->epochDuration * 100 * sizeof(int)));

    // create the data space & dataset for spikes history of probed neurons
    if (m_model->getLayout()->m_probed_neuron_list.size() > 0)
    {
        // allocate data for spikesProbedNeurons
        spikesProbedNeurons = new vector<uint64_t>[m_model->getLayout()->m_probed_neuron_list.size()];

        // allocate memory to save offset
        offsetSpikesProbedNeurons = new hsize_t[m_model->getLayout()->m_probed_neuron_list.size()];
        memset(offsetSpikesProbedNeurons, 0, static_cast<int>(m_model->getLayout()->m_probed_neuron_list.size() * sizeof(hsize_t)));
    }
}
int main (void)
{
    hsize_t dims[2] = { DIM0, DIM1 };	// dataset dimensions
    hsize_t chunk_dims[2] = { 20, 20 };	// chunk dimensions
    int     i,j, buf[DIM0][DIM1];

    // Try block to detect exceptions raised by any of the calls inside it
    try
    {
	// Turn off the auto-printing when failure occurs so that we can
	// handle the errors appropriately
	Exception::dontPrint();

	// Create a new file using the default property lists. 
	H5File file(FILE_NAME, H5F_ACC_TRUNC);

	// Create the data space for the dataset.
	DataSpace *dataspace = new DataSpace(2, dims);

	// Modify dataset creation property to enable chunking
	DSetCreatPropList  *plist = new  DSetCreatPropList;
	plist->setChunk(2, chunk_dims);

	// Set ZLIB (DEFLATE) Compression using level 6.
	// To use SZIP compression comment out this line.
	plist->setDeflate(6);

	// Uncomment these lines to set SZIP Compression
	// unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK;
	// unsigned szip_pixels_per_block = 16;
	// plist->setSzip(szip_options_mask, szip_pixels_per_block);
     
	// Create the dataset.      
	DataSet *dataset = new DataSet(file.createDataSet( DATASET_NAME, 
	                        PredType::STD_I32BE, *dataspace, *plist) );

	for (i = 0; i< DIM0; i++)
	  for (j=0; j<DIM1; j++)
	      buf[i][j] = i+j;

	// Write data to dataset.
	dataset->write(buf, PredType::NATIVE_INT);

	// Close objects and file.  Either approach will close the HDF5 item.
	delete dataspace;
	delete dataset;
	delete plist;
	file.close();

	// -----------------------------------------------
	// Re-open the file and dataset, retrieve filter 
	// information for dataset and read the data back.
	// -----------------------------------------------
	
	int        rbuf[DIM0][DIM1];
	int        numfilt;
	size_t     nelmts={1}, namelen={1};
	unsigned  flags, filter_info, cd_values[1], idx;
	char       name[1];
	H5Z_filter_t filter_type;

	// Open the file and the dataset in the file.
	file.openFile(FILE_NAME, H5F_ACC_RDONLY);
	dataset = new DataSet(file.openDataSet( DATASET_NAME));

	// Get the create property list of the dataset.
	plist = new DSetCreatPropList(dataset->getCreatePlist ());

	// Get the number of filters associated with the dataset.
	numfilt = plist->getNfilters();
	cout << "Number of filters associated with dataset: " << numfilt << endl;

	for (idx=0; idx < numfilt; idx++) {
	    nelmts = 0;

	    filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name , filter_info);

	    cout << "Filter Type: ";

	    switch (filter_type) {
	      case H5Z_FILTER_DEFLATE:
	           cout << "H5Z_FILTER_DEFLATE" << endl;
	           break;
	      case H5Z_FILTER_SZIP:
	           cout << "H5Z_FILTER_SZIP" << endl; 
	           break;
	      default:
	           cout << "Other filter type included." << endl;
	      }
	}

	// Read data.
	dataset->read(rbuf, PredType::NATIVE_INT);

	delete plist; 
	delete dataset;
	file.close();	// can be skipped

    }  // end of try block

    // catch failure caused by the H5File operations
    catch(FileIException error)
    {
	error.printError();
	return -1;
    }

    // catch failure caused by the DataSet operations
    catch(DataSetIException error)
    {
	error.printError();
	return -1;
    }

    // catch failure caused by the DataSpace operations
    catch(DataSpaceIException error)
    {
	error.printError();
	return -1;
    }

    return 0;  // successfully terminated
}
void SavingCtrlObj::HwSavingStream::prepare() {
	DEB_MEMBER_FUNCT();
	DEB_ALWAYS() << "Entering SavingCtrlObj prepare stream " << m_streamNb;
	std::string filename;
	if (m_suffix != ".hdf")
		THROW_HW_ERROR(lima::Error) << "Suffix must be .hdf";

	try {
		// Turn off the auto-printing when failure occurs so that we can
		// handle the errors appropriately
		H5::Exception::dontPrint();

		// Get the fully qualified filename
		char number[16];
		snprintf(number, sizeof(number), m_index_format.c_str(), m_next_number);
		filename = m_directory + DIR_SEPARATOR + m_prefix + number + m_suffix;
		DEB_TRACE() << "Opening filename " << filename << " with overwritePolicy " << m_overwritePolicy;

		if (m_overwritePolicy == "Overwrite") {
			// overwrite existing file
			m_file = new H5File(filename, H5F_ACC_TRUNC);
		} else if (m_overwritePolicy == "Abort") {
			// fail if file already exists
			m_file = new H5File(filename, H5F_ACC_EXCL);
		} else {
			THROW_CTL_ERROR(Error) << "Append and multiset  not supported !";
		}
		m_entry = new Group(m_file->createGroup("/entry"));
		string nxentry = "NXentry";
		write_h5_attribute(*m_entry, "NX_class", nxentry);
		string title = "Lima Hexitec detector";
		write_h5_dataset(*m_entry, "title", title);

		Size size;
		m_cam.getDetectorImageSize(size);
		m_nrasters = size.getHeight();
		m_npixels = size.getWidth();
		m_nframes = m_frames_per_file;
		{
			// ISO 8601 Time format
			time_t now;
			time(&now);
			char buf[sizeof("2011-10-08T07:07:09Z")];
			strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
			string stime = string(buf);
			write_h5_dataset(*m_entry, "start_time", stime);
		}
		Group instrument = Group(m_entry->createGroup("Instrument"));
		string nxinstrument = "NXinstrument";
		write_h5_attribute(instrument, "NX_class", nxinstrument);
		m_instrument_detector = new Group(instrument.createGroup("Hexitec"));
		string nxdetector = "NXdetector";
		write_h5_attribute(*m_instrument_detector, "NX_class", nxdetector);

		Group measurement = Group(m_entry->createGroup("measurement"));
		string nxcollection = "NXcollection";
		write_h5_attribute(measurement, "NX_class", nxcollection);
		m_measurement_detector = new Group(measurement.createGroup("Hexitec"));
		write_h5_attribute(*m_measurement_detector, "NX_class", nxdetector);

		Group det_info = Group(m_instrument_detector->createGroup("detector_information"));
		Group det_params = Group(det_info.createGroup("parameters"));
		double rate;
		m_cam.getFrameRate(rate);
		write_h5_dataset(det_params, "frame rate", rate);

		Group env_info = Group(det_info.createGroup("environment"));
		Camera::Environment env;
		m_cam.getEnvironmentalValues(env);
		write_h5_dataset(env_info, "humidity", env.humidity);
		write_h5_dataset(env_info, "ambientTemperature", env.ambientTemperature);
		write_h5_dataset(env_info, "asicTemperature", env.asicTemperature);
		write_h5_dataset(env_info, "adcTemperature", env.adcTemperature);
		write_h5_dataset(env_info, "ntcTemperature", env.ntcTemperature);

		Group oper_info = Group(det_info.createGroup("operating_values"));
		Camera::OperatingValues opvals;
		m_cam.getOperatingValues(opvals);
		write_h5_dataset(oper_info, "v3_3 ", opvals.v3_3);
		write_h5_dataset(oper_info, "hvMon", opvals.hvMon);
		write_h5_dataset(oper_info, "hvOut", opvals.hvOut);
		write_h5_dataset(oper_info, "v1_2", opvals.v1_2);
		write_h5_dataset(oper_info, "v1_8", opvals.v1_8);
		write_h5_dataset(oper_info, "v3", opvals.v3);
		write_h5_dataset(oper_info, "v2_5", opvals.v2_5);
		write_h5_dataset(oper_info, "v3_3ln", opvals.v3_3ln);
		write_h5_dataset(oper_info, "v1_65ln", opvals.v1_65ln);
		write_h5_dataset(oper_info, "v1_8ana", opvals.v1_8ana);
		write_h5_dataset(oper_info, "v3_8ana", opvals.v3_8ana);
		write_h5_dataset(oper_info, "peltierCurrent", opvals.peltierCurrent);
		write_h5_dataset(oper_info, "ntcTemperature", opvals.ntcTemperature);

		Group process_info = Group(det_info.createGroup("processing_values"));
		int value;
		m_cam.getLowThreshold(value);
		write_h5_dataset(process_info, "LowThreshold ", value);
		m_cam.getHighThreshold(value);
		write_h5_dataset(process_info, "HighThreshold", value);
		int binWidth;
		m_cam.getBinWidth(binWidth);
		write_h5_dataset(process_info, "BinWidth", binWidth);
        int specLen;
        m_cam.getSpecLen(specLen);
        write_h5_dataset(process_info, "SpecLen", specLen);
        int nbins = (specLen / binWidth);

        int saveOpt;
        m_cam.getSaveOpt(saveOpt);

        // StreamNb == 3 is a bit of a kludge for now!!
        if (saveOpt & Camera::SaveSummed && m_streamNb == 3) {
            DEB_TRACE() << "create the spectrum data structure in the file";
            // create the image data structure in the file
            hsize_t data_dims[2], max_dims[2];
            data_dims[1] = nbins;
            data_dims[0] = m_nframes;
            m_image_dataspace = new DataSpace(RANK_TWO, data_dims); // create new dspace
            m_image_dataset = new DataSet(
                    m_measurement_detector->createDataSet("spectrum", PredType::NATIVE_UINT64, *m_image_dataspace));
        } else {
            DEB_TRACE() << "create the image data structure in the file";
            // create the image data structure in the file
            hsize_t data_dims[3], max_dims[3];
            data_dims[1] = m_nrasters;
            data_dims[2] = m_npixels;
            data_dims[0] = m_nframes;
            max_dims[1] = m_nrasters;
            max_dims[2] = m_npixels;
            max_dims[0] = H5S_UNLIMITED;
            // Create property list for the dataset and setup chunk size
            DSetCreatPropList plist;
            hsize_t chunk_dims[3];
            // calculate a optimized chunking
            calculate_chunck(data_dims, chunk_dims, 2);
            plist.setChunk(RANK_THREE, chunk_dims);
            m_image_dataspace = new DataSpace(RANK_THREE, data_dims, max_dims); // create new dspace
            if (saveOpt & Camera::SaveHistogram && m_streamNb == 2) {
                m_image_dataset = new DataSet(
                    m_measurement_detector->createDataSet("raw_image", PredType::NATIVE_UINT32, *m_image_dataspace, plist));
            } else {
                m_image_dataset = new DataSet(
                    m_measurement_detector->createDataSet("raw_image", PredType::NATIVE_UINT16, *m_image_dataspace, plist));
            }
        }
	} catch (FileIException &error) {
		THROW_CTL_ERROR(Error) << "File " << filename << " not opened successfully";
	}
	// catch failure caused by the DataSet operations
	catch (DataSetIException& error) {
		THROW_CTL_ERROR(Error) << "DataSet " << filename << " not created successfully";
		error.printError();
	}
	// catch failure caused by the DataSpace operations
	catch (DataSpaceIException& error) {
		THROW_CTL_ERROR(Error) << "DataSpace " << filename << " not created successfully";
	}
	// catch failure caused by any other HDF5 error
	catch (H5::Exception &e) {
		THROW_CTL_ERROR(Error) << e.getCDetailMsg();
	}
	// catch anything not hdf5 related
	catch (Exception &e) {
		THROW_CTL_ERROR(Error) << e.getErrMsg();
	}
}
void hdf5SequenceTypeTest(CuTest *testCase)
{
  for (hsize_t lengthIdx = 0; lengthIdx < numLengths; ++lengthIdx)
  {
    hsize_t length = maxNameLength[lengthIdx];
    for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx)
    {
      hsize_t chunkSize = chunkSizes[chunkIdx];
      setup();
      try 
      {
        CompType datatype = HDF5Sequence::dataType(length);
        H5File file(H5std_string(fileName), H5F_ACC_TRUNC);

        HDF5ExternalArray myArray;
        DSetCreatPropList cparms;
        if (chunkSize > 0)
        {
          cparms.setChunk(1, &chunkSize);
        }
        myArray.create(&file, datasetName, datatype, N, &cparms);
        hal_size_t totalTopSegments = 0;
        hal_size_t totalBottomSegments = 0;
        for (hsize_t i = 0; i < N; ++i)
        {
          HDF5Sequence sequence(NULL, &myArray, i);
          Sequence::Info seqInfo(genName(i, length), i * 2, i * 3, i * 4);
          sequence.set(i, seqInfo, totalTopSegments, totalBottomSegments);
          totalTopSegments += seqInfo._numTopSegments;
          totalBottomSegments += seqInfo._numBottomSegments;
        }
        myArray.write();
        file.flush(H5F_SCOPE_LOCAL);
        file.close();

        H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY);
        HDF5ExternalArray readArray;
        readArray.load(&rfile, datasetName);

        for (hsize_t i = 0; i < N; ++i)
        {
          HDF5Sequence sequence(NULL, &readArray, i);
          CuAssertTrue(testCase,
                       sequence.getName() == genName(i, length));
          CuAssertTrue(testCase, 
                       sequence.getStartPosition() == i);
          CuAssertTrue(testCase,
                       sequence.getSequenceLength() == i * 2);
          CuAssertTrue(testCase,
                       sequence.getNumTopSegments() == i * 3);
          CuAssertTrue(testCase,
                       sequence.getNumBottomSegments() == i * 4);
        }
      }
      catch(Exception& exception)
      {
        cerr << exception.getCDetailMsg() << endl;
        CuAssertTrue(testCase, 0);
      }
      catch(...)
      {
        CuAssertTrue(testCase, 0);
      }
      teardown();
    }
  }
}
Exemplo n.º 27
0
void test_szip_filter(H5File& file1)
{
#ifdef H5_HAVE_FILTER_SZIP
    int      points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
    unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK;
    unsigned szip_pixels_per_block=4;

    // Output message about test being performed
    SUBTEST("szip filter (with encoder)");

    if ( h5_szip_can_encode() == 1) {
        char* tconv_buf = new char [1000];

        try {
            const hsize_t   size[2] = {DSET_DIM1, DSET_DIM2};

            // Create the data space
            DataSpace space1(2, size, NULL);

            // Create a small conversion buffer to test strip mining (?)
            DSetMemXferPropList xfer;
            xfer.setBuffer (1000, tconv_buf, NULL);

            // Prepare dataset create property list
            DSetCreatPropList dsplist;
            dsplist.setChunk(2, chunk_size);

            // Set up for szip compression
            dsplist.setSzip(szip_options_mask, szip_pixels_per_block);

            // Create a dataset with szip compression
            DataSpace space2 (2, size, NULL);
            DataSet dataset(file1.createDataSet (DSET_SZIP_NAME, PredType::NATIVE_INT, space2, dsplist));

            hsize_t i, j, n;
            for (i=n=0; i<size[0]; i++)
            {
                for (j=0; j<size[1]; j++)
                {
                    points[i][j] = (int)n++;
                }
            }

            // Write to the dataset then read back the values
            dataset.write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
            dataset.read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

            // Check that the values read are the same as the values written
            for (i = 0; i < size[0]; i++)
                for (j = 0; j < size[1]; j++)
                {
                    int status = check_values (i, j, points[i][j], check[i][j]);
                    if (status == -1)
                        throw Exception("test_szip_filter", "Failed in testing szip method");
                }
            dsplist.close();
            PASSED();
        } // end of try

        // catch all other exceptions
        catch (Exception E)
        {
            issue_fail_msg("test_szip_filter()", __LINE__, __FILE__, E.getCDetailMsg());
        }

        delete[] tconv_buf;
    } // if szip presents
    else {
	SKIPPED();
    }

#else /* H5_HAVE_FILTER_SZIP */
    SUBTEST("szip filter");
    SKIPPED();
    puts("    Szip filter not enabled");
#endif /* H5_HAVE_FILTER_SZIP */
}  // test_szip_filter
Exemplo n.º 28
0
void SaveContainerHdf5::_writeFile(Data &aData, CtSaving::HeaderMap &aHeader, CtSaving::FileFormat aFormat) {
	DEB_MEMBER_FUNCT();
	if (aFormat == CtSaving::HDF5) {

		// get the proper data type
		PredType data_type(PredType::NATIVE_UINT8);
		switch (aData.type) {
		case Data::UINT8:
		       break;
		case Data::INT8:
		       data_type = PredType::NATIVE_INT8;
		       break;
		case Data::UINT16:
		       data_type = PredType::NATIVE_UINT16;
		       break;
		case Data::INT16:
		       data_type = PredType::NATIVE_INT16;
		       break;
		case Data::UINT32:
		       data_type = PredType::NATIVE_UINT32;
		       break;
		case Data::INT32:
		       data_type = PredType::NATIVE_INT32;
		       break;
		case Data::UINT64:
		       data_type = PredType::NATIVE_UINT64;
		       break;
		case Data::INT64:
		       data_type = PredType::NATIVE_INT64;
		       break;
		case Data::FLOAT:
		       data_type = PredType::NATIVE_FLOAT;
		       break;
		case Data::DOUBLE:
		       data_type = PredType::NATIVE_DOUBLE;
		       break;
		case Data::UNDEF:
		default:
		  THROW_CTL_ERROR(Error) << "Invalid image type";
		}

		try {
			if (!m_format_written) {
			       
			        // ISO 8601 Time format
			        time_t now;
				time(&now);
				char buf[sizeof("2011-10-08T07:07:09Z")];
				strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
				string stime = string(buf);
				write_h5_dataset(*m_entry,"start_time",stime);
				// write header only once into "parameters" group 
				// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
				if (!aHeader.empty()) {
					for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {

						string key = it->first;
						string value = it->second;
						write_h5_dataset(*m_measurement_detector_parameters,key.c_str(),value);
					}
				}
				delete m_measurement_detector_parameters;m_measurement_detector_parameters = NULL;
					
				// create the image data structure in the file
				hsize_t data_dims[3], max_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = m_nbframes;
				max_dims[1] = aData.dimensions[1];
				max_dims[2] = aData.dimensions[0];
				max_dims[0] = H5S_UNLIMITED;
				// Create property list for the dataset and setup chunk size
				DSetCreatPropList plist;
				hsize_t chunk_dims[3];
				// calculate a optimized chunking
				calculate_chunck(data_dims, chunk_dims, aData.depth());
				plist.setChunk(RANK_THREE, chunk_dims);

				m_image_dataspace = new DataSpace(RANK_THREE, data_dims, max_dims); // create new dspace
				m_image_dataset = new DataSet(m_measurement_detector->createDataSet("data", data_type, *m_image_dataspace, plist));
				string nxdata = "NXdata";
				write_h5_attribute(*m_image_dataset, "NX_class", nxdata);
				string image = "image"; 
				write_h5_attribute(*m_image_dataset, "interpretation", image);
				m_prev_images_written = 0;
				m_format_written = true;
			} else if (m_in_append && !m_is_multiset && !m_dataset_extended) {
				hsize_t allocated_dims[3];
				m_image_dataset = new DataSet(m_measurement_detector->openDataSet("data"));
				m_image_dataspace = new DataSpace(m_image_dataset->getSpace());
				m_image_dataspace->getSimpleExtentDims(allocated_dims);

				hsize_t data_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = allocated_dims[0] + m_nbframes;

				if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) {
					THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions";
				}

				m_image_dataset->extend(data_dims);
				m_image_dataspace->close();
				delete m_image_dataset;
				m_image_dataspace = new DataSpace(m_image_dataset->getSpace());
				m_prev_images_written = allocated_dims[0];
				m_dataset_extended = true;
			}
			// write the image data
			hsize_t slab_dim[3];
			slab_dim[2] = aData.dimensions[0];
			slab_dim[1] = aData.dimensions[1];
			slab_dim[0] = 1;
			DataSpace slabspace = DataSpace(RANK_THREE, slab_dim);
			int image_nb = aData.frameNumber % m_nbframes;
			hsize_t start[] = { m_prev_images_written + image_nb, 0, 0 };
			hsize_t count[] = { 1, aData.dimensions[1], aData.dimensions[0] };
			m_image_dataspace->selectHyperslab(H5S_SELECT_SET, count, start);
			m_image_dataset->write((u_int8_t*) aData.data(), data_type, slabspace, *m_image_dataspace);

		// catch failure caused by the DataSet operations
		} catch (DataSetIException& error) {
			THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg();
			error.printError();
		}
		// catch failure caused by the DataSpace operations
		catch (DataSpaceIException& error) {
			THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg();
		}
		// catch failure caused by any other HDF5 error
		catch (H5::Exception &e) {
			THROW_CTL_ERROR(Error) << e.getCDetailMsg();
		}
		// catch anything not hdf5 related
		catch (Exception &e) {
			THROW_CTL_ERROR(Error) << e.getErrMsg();
		}
	}
	DEB_RETURN();
}
Exemplo n.º 29
0
/*-------------------------------------------------------------------------
 * Function:	test_multiopen
 *
 * Purpose:	Tests that a bug no longer exists.  If a dataset is opened
 *		twice and one of the handles is used to extend the dataset,
 *		then the other handle should return the new size when
 *		queried.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Saturday, February 17, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_multiopen (H5File& file)
{

    SUBTEST("Multi-open with extending");

    DataSpace* space = NULL;
    try {

	// Create a dataset creation property list
	DSetCreatPropList dcpl;

	// Set chunk size to given size
	hsize_t		cur_size[1] = {10};
	dcpl.setChunk (1, cur_size);

	// Create a simple data space with unlimited size
	static hsize_t	max_size[1] = {H5S_UNLIMITED};
	space = new DataSpace (1, cur_size, max_size);

	// Create first dataset
	DataSet dset1 = file.createDataSet ("multiopen", PredType::NATIVE_INT, *space, dcpl);

	// Open again the first dataset from the file to another DataSet object.
	DataSet dset2 = file.openDataSet ("multiopen");

	// Relieve the dataspace
	delete space;
	space = NULL;

	// Extend the dimensionality of the first dataset
	cur_size[0] = 20;
	dset1.extend (cur_size);

	// Get the size from the second handle
	space = new DataSpace (dset2.getSpace());

	hsize_t		tmp_size[1];
	space->getSimpleExtentDims (tmp_size);
	if (cur_size[0]!=tmp_size[0])
	{
	    cerr << "    Got " << (int)tmp_size[0] << " instead of "
		    << (int)cur_size[0] << "!" << endl;
	    throw Exception("test_multiopen", "Failed in multi-open with extending");
	}

	// clean up and return with success
	delete space;
	PASSED();
	return 0;
    } // end try block

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (space != NULL)
	    delete space;
	return -1;
    }
}   // test_multiopen
Exemplo n.º 30
0
long SaveContainerHdf5::_writeFile(void* f,Data &aData,
				   CtSaving::HeaderMap &aHeader,
				   CtSaving::FileFormat aFormat) {
        DEB_MEMBER_FUNCT();

        _File* file = (_File*)f;
		size_t buf_size = 0;
		
		// get the proper data type
		PredType data_type(PredType::NATIVE_UINT8);
		switch (aData.type) {
		case Data::UINT8:
		       break;
		case Data::INT8:
		       data_type = PredType::NATIVE_INT8;
		       break;
		case Data::UINT16:
		       data_type = PredType::NATIVE_UINT16;
		       break;
		case Data::INT16:
		       data_type = PredType::NATIVE_INT16;
		       break;
		case Data::UINT32:
		       data_type = PredType::NATIVE_UINT32;
		       break;
		case Data::INT32:
		       data_type = PredType::NATIVE_INT32;
		       break;
		case Data::UINT64:
		       data_type = PredType::NATIVE_UINT64;
		       break;
		case Data::INT64:
		       data_type = PredType::NATIVE_INT64;
		       break;
		case Data::FLOAT:
		       data_type = PredType::NATIVE_FLOAT;
		       break;
		case Data::DOUBLE:
		       data_type = PredType::NATIVE_DOUBLE;
		       break;
		case Data::UNDEF:
		default:
		  THROW_CTL_ERROR(Error) << "Invalid image type";
		}

		try {
			if (!file->m_format_written) {
			       
			        // ISO 8601 Time format
			        time_t now;
				time(&now);
				char buf[sizeof("2011-10-08T07:07:09Z")];
#ifdef WIN32
				struct tm gmtime_now;
				gmtime_s(&gmtime_now, &now);
				strftime(buf, sizeof(buf), "%FT%TZ", &gmtime_now);
#else
				strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
#endif
				string stime = string(buf);
				write_h5_dataset(*file->m_entry,"start_time",stime);
				// write header only once into "parameters" group 
				// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
				if (!aHeader.empty()) {
					for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {

						string key = it->first;
						string value = it->second;
						write_h5_dataset(*file->m_measurement_detector_parameters,
								 key.c_str(),value);
					}
				}
				delete file->m_measurement_detector_parameters;
				file->m_measurement_detector_parameters = NULL;
					
				// create the image data structure in the file
				hsize_t data_dims[3], max_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = m_nbframes;
				max_dims[1] = aData.dimensions[1];
				max_dims[2] = aData.dimensions[0];
				max_dims[0] = H5S_UNLIMITED;
				// Create property list for the dataset and setup chunk size
				DSetCreatPropList plist;
				hsize_t chunk_dims[RANK_THREE];
				// test direct chunk write, so chunk dims is 1 image size
				chunk_dims[0] = 1; chunk_dims[1] = data_dims[1]; chunk_dims[2] = data_dims[2];
				
				plist.setChunk(RANK_THREE, chunk_dims);

#if defined(WITH_Z_COMPRESSION)
				if (aFormat == CtSaving::HDF5GZ)
				  plist.setDeflate(m_compression_level);
#endif
#if defined(WITH_BS_COMPRESSION)
				if (aFormat == CtSaving::HDF5BS) {
				  unsigned int opt_vals[2]= {0, BSHUF_H5_COMPRESS_LZ4};
				  plist.setFilter(BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, 2, opt_vals);
				}
#endif
				// create new dspace
				file->m_image_dataspace = new DataSpace(RANK_THREE, data_dims, NULL);
				file->m_image_dataset = 
				  new DataSet(file->m_measurement_detector->createDataSet("data",
											  data_type,
											  *file->m_image_dataspace,
											  plist));
				string nxdata = "NXdata";
				write_h5_attribute(*file->m_image_dataset, "NX_class", nxdata);
				string image = "image"; 
				write_h5_attribute(*file->m_image_dataset, "interpretation", image);
				file->m_prev_images_written = 0;
				file->m_format_written = true;
			} else if (file->m_in_append && !m_is_multiset && !file->m_dataset_extended) {
				hsize_t allocated_dims[3];
				file->m_image_dataset = new DataSet(file->m_measurement_detector->
								    openDataSet("data"));
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_image_dataspace->getSimpleExtentDims(allocated_dims);

				hsize_t data_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = allocated_dims[0] + m_nbframes;

				if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) {
					THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions";
				}

				file->m_image_dataset->extend(data_dims);
				file->m_image_dataspace->close();
				delete file->m_image_dataset;
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_prev_images_written = allocated_dims[0];
				file->m_dataset_extended = true;
			}
			// write the image data
			hsize_t image_nb = aData.frameNumber % m_nbframes;

			// we test direct chunk write
			hsize_t offset[RANK_THREE] = {image_nb, 0U, 0U};
			uint32_t filter_mask = 0; 
			hid_t dataset = file->m_image_dataset->getId();
			herr_t  status;
			void * buf_data;
			hid_t dxpl;

			dxpl = H5Pcreate(H5P_DATASET_XFER);

			if ((aFormat == CtSaving::HDF5GZ) || (aFormat == CtSaving::HDF5BS))
			  {
			    ZBufferType* buffers = _takeBuffer(aData.frameNumber);
			    // with single chunk, only one buffer allocated
			    buf_size = buffers->front()->used_size;
			    buf_data = buffers->front()->buffer;
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }
			    delete  buffers->front();
			    delete buffers;
			  }
			 else
			   {
			    buf_data = aData.data();
			    buf_size = aData.size();
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }

			  } // else
		// catch failure caused by the DataSet operations
		}catch (DataSetIException& error) {
			THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg();
			error.printError();
		}
		// catch failure caused by the DataSpace operations
		catch (DataSpaceIException& error) {
			THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg();
		}
		// catch failure caused by any other HDF5 error
		catch (H5::Exception &e) {
			THROW_CTL_ERROR(Error) << e.getCDetailMsg();
		}
		// catch anything not hdf5 related
		catch (Exception &e) {
			THROW_CTL_ERROR(Error) << e.getErrMsg();
		}

		DEB_RETURN();
		return buf_size;
}