Esempio n. 1
0
//'@title Function to write a matrix chunk to file
//'
//'@description Function is intended for internal use
//'
//'@param dset character denoting the meta data name of the data set
//'@param chunk matrix that will be written to h5file
//'@param dim numeric containing the dimension of the matrix that will be written to file
//'@param filePath character denoting the location of the h5 file
//'@return int 0
// [[Rcpp::export]]
int h5WriteDoubleMat (std::string dset, SEXP chunk, NumericVector dim, std::string filePath)
{
  H5File *file = new H5File(filePath, H5F_ACC_RDWR);
  
  // Data initialization.
  int rank = 2;
  hsize_t dims[rank];              // dataset dimensions
  for(int k = 0; k < rank; k++)
    dims[k] = dim(k);
  const void *buf = REAL(chunk);
  
  // Create the data space for the dataset.
  DataSpace dataspace (rank, dims, NULL);
  
  // Create the dataset.
  H5std_string dsetName(dset);
  DataSet dataset = file->createDataSet(dsetName, PredType::NATIVE_DOUBLE, dataspace);

  // Write the data to the dataset using default memory space, file
  // space, and transfer properties.
  dataset.write(buf, PredType::NATIVE_DOUBLE);
  dataset.close(); //nn
  file->close();
  return 0;
}
Esempio n. 2
0
    void writeScalar(H5File &file, string DSitem, Scalar value){
	hsize_t dim[0];
	DataSpace dsp(0, dim);
	PredType type = getPredType<Scalar>();
	DataSet item = file.createDataSet(DSitem, type, dsp);
	item.write(&value, type);
    }
Esempio n. 3
0
void write_hdf5_image(H5File h5f, const char *name, const Mat &im)
{
    DSetCreatPropList cparms;
    hsize_t chunk_dims[2] = {256, 256};
    hsize_t dims[2];
    dims[0] = im.size().height;
    dims[1] = im.size().width;
  
    if (chunk_dims[0] > dims[0]) {
        chunk_dims[0] = dims[0];
    }
    if (chunk_dims[1] > dims[1]) {
        chunk_dims[1] = dims[1];
    }

    cparms.setChunk(2, chunk_dims);
    cparms.setShuffle();
    cparms.setDeflate(5);

    DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT,
                                        DataSpace(2, dims, dims),
                                        cparms);

    Mat image;
    if (im.type() !=  CV_32F)
        im.convertTo(image, CV_32F);
    else
        image = im;
    
    DataSpace imspace;
    float *imdata;
    if (image.isContinuous()) {
        imspace = dataset.getSpace(); // same size as 
        imspace.selectAll();
        imdata = image.ptr<float>();
    } else {
        // we are working with an ROI
        assert (image.isSubmatrix());
        Size parent_size; Point parent_ofs;
        image.locateROI(parent_size, parent_ofs);
        hsize_t parent_count[2];
        parent_count[0] = parent_size.height; parent_count[1] = parent_size.width;
        imspace.setExtentSimple(2, parent_count);
        hsize_t im_offset[2], im_size[2];
        im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x;
        im_size[0] = image.size().height; im_size[1] = image.size().width;
        imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset);
        imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width;
    }
    dataset.write(imdata, PredType::NATIVE_FLOAT, imspace);
}
Esempio n. 4
0
static DataSet create_dataset(H5File h5f, const char *name)
{
    DSetCreatPropList cparms;
    hsize_t chunk_dims[2] = {256, 256};
    hsize_t dims[2];
    cparms.setChunk(2, chunk_dims);
    cparms.setShuffle();
    cparms.setDeflate(5);
    dims[0] = imsize.height;
    dims[1] = imsize.width;
  
    return h5f.createDataSet(name, PredType::NATIVE_FLOAT,
                             DataSpace(2, dims, dims),
                             cparms);
}
Esempio n. 5
0
/* Helper routine for test_vl_rewrite() */
static void write_scalar_dset(H5File& file, DataType& type, DataSpace& space,
				char *name, char *data)
{
    DataSet dset;
    try {
	dset = file.createDataSet(name, type, space);
	dset.write(&data, type, space, space);
	dset.close();
    } // end try
    catch (FileIException ferr) {
	throw;
    }
    catch (DataSetIException derr) {
	throw;
    }
}
Esempio n. 6
0
int main(void)
{
   /* First structure  and dataset*/
   typedef struct s1_t {
	int    a;
	float  b;
	double c;
   } s1_t;

   /* Second structure (subset of s1_t)  and dataset*/
   typedef struct s2_t {
	double c;
	int    a;
   } s2_t;

   // Try block to detect exceptions raised by any of the calls inside it
   try
   {
      /*
       * Initialize the data
       */
      int  i;
      s1_t s1[LENGTH];
      for (i = 0; i< LENGTH; i++)
      {
         s1[i].a = i;
         s1[i].b = i*i;
         s1[i].c = 1./(i+1);
      }

      /*
       * Turn off the auto-printing when failure occurs so that we can
       * handle the errors appropriately
       */
      Exception::dontPrint();

      /*
       * Create the data space.
       */
      hsize_t dim[] = {LENGTH};   /* Dataspace dimensions */
      DataSpace space( RANK, dim );

      /*
       * Create the file.
       */
      H5File* file = new H5File( FILE_NAME, H5F_ACC_TRUNC );

      /*
       * Create the memory datatype.
       */
      CompType mtype1( sizeof(s1_t) );
      mtype1.insertMember( MEMBER1, HOFFSET(s1_t, a), PredType::NATIVE_INT);
      mtype1.insertMember( MEMBER3, HOFFSET(s1_t, c), PredType::NATIVE_DOUBLE);
      mtype1.insertMember( MEMBER2, HOFFSET(s1_t, b), PredType::NATIVE_FLOAT);

      /*
       * Create the dataset.
       */
      DataSet* dataset;
      dataset = new DataSet(file->createDataSet(DATASET_NAME, mtype1, space));

      /*
       * Write data to the dataset;
       */
      dataset->write( s1, mtype1 );

      /*
       * Release resources
       */
      delete dataset;
      delete file;

      /*
       * Open the file and the dataset.
       */
      file = new H5File( FILE_NAME, H5F_ACC_RDONLY );
      dataset = new DataSet (file->openDataSet( DATASET_NAME ));

      /*
       * Create a datatype for s2
       */
      CompType mtype2( sizeof(s2_t) );

      mtype2.insertMember( MEMBER3, HOFFSET(s2_t, c), PredType::NATIVE_DOUBLE);
      mtype2.insertMember( MEMBER1, HOFFSET(s2_t, a), PredType::NATIVE_INT);

      /*
       * Read two fields c and a from s1 dataset. Fields in the file
       * are found by their names "c_name" and "a_name".
       */
      s2_t s2[LENGTH];
      dataset->read( s2, mtype2 );

      /*
       * Display the fields
       */
      cout << endl << "Field c : " << endl;
      for( i = 0; i < LENGTH; i++)
	 cout << s2[i].c << " ";
      cout << endl;

      cout << endl << "Field a : " << endl;
      for( i = 0; i < LENGTH; i++)
	 cout << s2[i].a << " ";
      cout << endl;

      /*
       * Create a datatype for s3.
       */
      CompType mtype3( sizeof(float) );

      mtype3.insertMember( MEMBER2, 0, PredType::NATIVE_FLOAT);

      /*
       * Read field b from s1 dataset. Field in the file is found by its name.
       */
      float s3[LENGTH];  // Third "structure" - used to read float field of s1
      dataset->read( s3, mtype3 );

      /*
       * Display the field
       */
      cout << endl << "Field b : " << endl;
      for( i = 0; i < LENGTH; i++)
	 cout << s3[i] << " ";
      cout << endl;

      /*
       * Release resources
       */
      delete dataset;
      delete file;
   }  // end of try block

   // catch failure caused by the H5File operations
   catch( FileIException error )
   {
      error.printErrorStack();
      return -1;
   }

   // catch failure caused by the DataSet operations
   catch( DataSetIException error )
   {
      error.printErrorStack();
      return -1;
   }

   // catch failure caused by the DataSpace operations
   catch( DataSpaceIException error )
   {
      error.printErrorStack();
      return -1;
   }

   // catch failure caused by the DataSpace operations
   catch( DataTypeIException error )
   {
      error.printErrorStack();
      return -1;
   }

   return 0;
}
Esempio n. 7
0
void test_szip_filter(H5File& file1)
{
#ifdef H5_HAVE_FILTER_SZIP
    int      points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
    unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK;
    unsigned szip_pixels_per_block=4;

    // Output message about test being performed
    SUBTEST("szip filter (with encoder)");

    if ( h5_szip_can_encode() == 1) {
        char* tconv_buf = new char [1000];

        try {
            const hsize_t   size[2] = {DSET_DIM1, DSET_DIM2};

            // Create the data space
            DataSpace space1(2, size, NULL);

            // Create a small conversion buffer to test strip mining (?)
            DSetMemXferPropList xfer;
            xfer.setBuffer (1000, tconv_buf, NULL);

            // Prepare dataset create property list
            DSetCreatPropList dsplist;
            dsplist.setChunk(2, chunk_size);

            // Set up for szip compression
            dsplist.setSzip(szip_options_mask, szip_pixels_per_block);

            // Create a dataset with szip compression
            DataSpace space2 (2, size, NULL);
            DataSet dataset(file1.createDataSet (DSET_SZIP_NAME, PredType::NATIVE_INT, space2, dsplist));

            hsize_t i, j, n;
            for (i=n=0; i<size[0]; i++)
            {
                for (j=0; j<size[1]; j++)
                {
                    points[i][j] = (int)n++;
                }
            }

            // Write to the dataset then read back the values
            dataset.write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
            dataset.read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

            // Check that the values read are the same as the values written
            for (i = 0; i < size[0]; i++)
                for (j = 0; j < size[1]; j++)
                {
                    int status = check_values (i, j, points[i][j], check[i][j]);
                    if (status == -1)
                        throw Exception("test_szip_filter", "Failed in testing szip method");
                }
            dsplist.close();
            PASSED();
        } // end of try

        // catch all other exceptions
        catch (Exception E)
        {
            issue_fail_msg("test_szip_filter()", __LINE__, __FILE__, E.getCDetailMsg());
        }

        delete[] tconv_buf;
    } // if szip presents
    else {
	SKIPPED();
    }

#else /* H5_HAVE_FILTER_SZIP */
    SUBTEST("szip filter");
    SKIPPED();
    puts("    Szip filter not enabled");
#endif /* H5_HAVE_FILTER_SZIP */
}  // test_szip_filter
Esempio n. 8
0
/*-------------------------------------------------------------------------
 * Function:	test_create
 *
 * Purpose:	Attempts to create a dataset.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_create( H5File& file)
{
    SUBTEST("create, open, close");

    // Setting this to NULL for cleaning up in failure situations
    DataSet *dataset = NULL;
    try {
	// Create a data space
	hsize_t     dims[2];
	dims[0] = 256;
	dims[1] = 512;
	DataSpace space (2, dims, NULL);

	// Create a dataset using the default dataset creation properties.
	// We're not sure what they are, so we won't check.
	dataset = new DataSet (file.createDataSet
		(DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space));


	// Add a comment to the dataset
	file.setComment (DSET_DEFAULT_NAME, "This is a dataset");

	// Close the dataset
	delete dataset;
	dataset = NULL;

	// Try creating a dataset that already exists.  This should fail since a
	// dataset can only be created once.  If an exception is not thrown for
	// this action by createDataSet, then throw an invalid action exception.
	try {
	    dataset = new DataSet (file.createDataSet
			(DSET_DEFAULT_NAME, PredType::NATIVE_DOUBLE, space));

	    // continuation here, that means no exception has been thrown
	    throw InvalidActionException("H5File::createDataSet", "Library allowed overwrite of existing dataset");
	}
	catch (FileIException E)	// catching invalid creating dataset
	{} // do nothing, exception expected

	// Open the dataset we created above and then close it.  This is one
	// way to open an existing dataset for accessing.
	dataset = new DataSet (file.openDataSet (DSET_DEFAULT_NAME));

	// Get and verify the name of this dataset, using
	// H5std_string getObjName()
	H5std_string ds_name = dataset->getObjName();
	verify_val(ds_name, DSET_DEFAULT_NAME_PATH, "DataSet::getObjName", __LINE__, __FILE__);

	// Get and verify the comment from this dataset, using
	// H5std_string getComment(const H5std_string& name, <buf_size=0, by default>)
	H5std_string comment = file.getComment(DSET_DEFAULT_NAME);
	verify_val(comment, "This is a dataset", "DataSet::getComment", __LINE__, __FILE__);

	// Close the dataset when accessing is completed
	delete dataset;

	// This is another way to open an existing dataset for accessing.
	DataSet another_dataset(file.openDataSet (DSET_DEFAULT_NAME));

	// Try opening a non-existent dataset.  This should fail so if an
	// exception is not thrown for this action by openDataSet, then
	// display failure information and throw an exception.
	try {
	    dataset = new DataSet (file.openDataSet( "does_not_exist" ));

	    // continuation here, that means no exception has been thrown
	    throw InvalidActionException("H5File::openDataSet", "Attempted to open a non-existent dataset");
	}
	catch (FileIException E ) // catching creating non-existent dataset
	{} // do nothing, exception expected

	// Create a new dataset that uses chunked storage instead of the default
	// layout.
	DSetCreatPropList create_parms;
	hsize_t     csize[2];
	csize[0] = 5;
	csize[1] = 100;
	create_parms.setChunk( 2, csize );

	dataset = new DataSet (file.createDataSet
		(DSET_CHUNKED_NAME, PredType::NATIVE_DOUBLE, space, create_parms));
	// Note: this one has no error message in C when failure occurs?

	// clean up and return with success
	delete dataset;

	PASSED();
	return 0;
    }	// outer most try block

    catch (InvalidActionException E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	return -1;
    }
    // catch all other exceptions
    catch (Exception E)
    {
	issue_fail_msg("test_create", __LINE__, __FILE__);

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	return -1;
    }
}   // test_create
Esempio n. 9
0
/*-------------------------------------------------------------------------
 * Function:	test_multiopen
 *
 * Purpose:	Tests that a bug no longer exists.  If a dataset is opened
 *		twice and one of the handles is used to extend the dataset,
 *		then the other handle should return the new size when
 *		queried.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Saturday, February 17, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_multiopen (H5File& file)
{

    SUBTEST("Multi-open with extending");

    DataSpace* space = NULL;
    try {

	// Create a dataset creation property list
	DSetCreatPropList dcpl;

	// Set chunk size to given size
	hsize_t		cur_size[1] = {10};
	dcpl.setChunk (1, cur_size);

	// Create a simple data space with unlimited size
	static hsize_t	max_size[1] = {H5S_UNLIMITED};
	space = new DataSpace (1, cur_size, max_size);

	// Create first dataset
	DataSet dset1 = file.createDataSet ("multiopen", PredType::NATIVE_INT, *space, dcpl);

	// Open again the first dataset from the file to another DataSet object.
	DataSet dset2 = file.openDataSet ("multiopen");

	// Relieve the dataspace
	delete space;
	space = NULL;

	// Extend the dimensionality of the first dataset
	cur_size[0] = 20;
	dset1.extend (cur_size);

	// Get the size from the second handle
	space = new DataSpace (dset2.getSpace());

	hsize_t		tmp_size[1];
	space->getSimpleExtentDims (tmp_size);
	if (cur_size[0]!=tmp_size[0])
	{
	    cerr << "    Got " << (int)tmp_size[0] << " instead of "
		    << (int)cur_size[0] << "!" << endl;
	    throw Exception("test_multiopen", "Failed in multi-open with extending");
	}

	// clean up and return with success
	delete space;
	PASSED();
	return 0;
    } // end try block

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (space != NULL)
	    delete space;
	return -1;
    }
}   // test_multiopen
Esempio n. 10
0
/*-------------------------------------------------------------------------
 * Function:	test_compression
 *
 * Purpose:	Tests dataset compression. If compression is requested when
 *		it hasn't been compiled into the library (such as when
 *		updating an existing compressed dataset) then data is sent to
 *		the file uncompressed but no errors are returned.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_compression(H5File& file)
{
#ifndef H5_HAVE_FILTER_DEFLATE
    const char		*not_supported;
    not_supported = "    Deflate compression is not enabled.";
#endif /* H5_HAVE_FILTER_DEFLATE */
    int		points[100][200];
    int		check[100][200];
    hsize_t	i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = (int)n++;
	}
    }
    char* tconv_buf = new char [1000];
    DataSet* dataset = NULL;
    try
    {
	const hsize_t	size[2] = {100, 200};
	// Create the data space
	DataSpace space1(2, size, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Use chunked storage with compression
	DSetCreatPropList dscreatplist;

	const hsize_t	chunk_size[2] = {2, 25};
	dscreatplist.setChunk (2, chunk_size);
	dscreatplist.setDeflate (6);

#ifdef H5_HAVE_FILTER_DEFLATE
	SUBTEST("Compression (setup)");

	// Create the dataset
	dataset = new DataSet (file.createDataSet
	    (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist));

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 1: Read uninitialized data.  It should be zero.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (uninitialized read)");

	dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	for (i=0; i<size[0]; i++) {
	    for (j=0; j<size[1]; j++) {
		if (0!=check[i][j]) {
		    H5_FAILED();
		    cerr << "    Read a non-zero value." << endl;
		    cerr << "    At index " << (unsigned long)i << "," <<
		   (unsigned long)j << endl;
		    throw Exception("test_compression", "Failed in uninitialized read");
		}
	    }
	}
	PASSED();

	/*----------------------------------------------------------------------
	* STEP 2: Test compression by setting up a chunked dataset and writing
	* to it.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (write)");

	for (i=n=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]; j++)
	    {
		points[i][j] = (int)n++;
	    }
	}

	dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 3: Try to read the data we just wrote.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (read)");

	// Read the dataset back
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in read");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 4: Write new data over the top of the old data.  The new data is
	* random thus not very compressible, and will cause the chunks to move
	* around as they grow.  We only change values for the left half of the
	* dataset although we rewrite the whole thing.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (modify)");

	for (i=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]/2; j++)
	    {
	    	points[i][j] = rand ();
	    }
	}
	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back and check it
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in modify");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 5: Close the dataset and then open it and read it again.  This
	* insures that the compression message is picked up properly from the
	* object header.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (re-open)");

	// close this dataset to reuse the var
	delete dataset;

	dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME));
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in re-open");
	    }

	PASSED();


	/*----------------------------------------------------------------------
	* STEP 6: Test partial I/O by writing to and then reading from a
	* hyperslab of the dataset.  The hyperslab does not line up on chunk
	* boundaries (we know that case already works from above tests).
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (partial I/O)");

	const hsize_t	hs_size[2] = {4, 50};
	const hsize_t	hs_offset[2] = {7, 30};
	for (i = 0; i < hs_size[0]; i++) {
	    for (j = 0; j < hs_size[1]; j++) {
		points[hs_offset[0]+i][hs_offset[1]+j] = rand ();
	    }
	}
	space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset );
	dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer);

	// Check that the values read are the same as the values written
	for (i=0; i<hs_size[0]; i++) {
	for (j=0; j<hs_size[1]; j++) {
	    if (points[hs_offset[0]+i][hs_offset[1]+j] !=
		check[hs_offset[0]+i][hs_offset[1]+j]) {
		H5_FAILED();
		cerr << "    Read different values than written.\n" << endl;
		cerr << "    At index " << (unsigned long)(hs_offset[0]+i) <<
		   "," << (unsigned long)(hs_offset[1]+j) << endl;

		cerr << "    At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl;
		cerr << "    At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl;
		throw Exception("test_compression", "Failed in partial I/O");
	    }
	} // for j
	} // for i

	delete dataset;
	dataset = NULL;

	PASSED();

#else
	SUBTEST("deflate filter");
	SKIPPED();
	cerr << not_supported << endl;
#endif

	/*----------------------------------------------------------------------
	* STEP 7: Register an application-defined compression method and use it
	* to write and then read the dataset.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (app-defined method)");

        if (H5Zregister (H5Z_BOGUS)<0)
		throw Exception("test_compression", "Failed in app-defined method");
	if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0)
	    throw Exception("test_compression", "Failed in app-defined method");
	dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL);

	DataSpace space2 (2, size, NULL);
	dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist));

	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in app-defined method");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* Cleanup
	*----------------------------------------------------------------------
	*/
	delete dataset;
	delete [] tconv_buf;
	return 0;
    } // end try

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_compression
Esempio n. 11
0
/*-------------------------------------------------------------------------
 * Function:	test_tconv
 *
 * Purpose:	Test some simple data type conversion stuff.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_tconv( H5File& file)
{
    // Prepare buffers for input/output
    char	*out=NULL, *in=NULL;
    out = new char [4*1000000];
    // assert (out); - should use exception handler for new - BMR
    in = new char [4*1000000];
    //assert (in);

    SUBTEST("data type conversion");

    // Initialize the dataset
    for (int i = 0; i < 1000000; i++) {
	out[i*4+0] = 0x11;
	out[i*4+1] = 0x22;
	out[i*4+2] = 0x33;
	out[i*4+3] = 0x44;
    }

    try
    {
	// Create the data space
	hsize_t	dims[1];
	dims[0] = 1000000;
	DataSpace space (1, dims, NULL);

	// Create the data set
	DataSet dataset (file.createDataSet (DSET_TCONV_NAME, PredType::STD_I32LE, space));

	// Write the data to the dataset
	dataset.write ((void*) out, PredType::STD_I32LE);

	// Read data with byte order conversion
	dataset.read ((void*) in, PredType::STD_I32BE);

	// Check
	for (int i = 0; i < 1000000; i++) {
	    if (in[4*i+0]!=out[4*i+3] ||
		in[4*i+1]!=out[4*i+2] ||
		in[4*i+2]!=out[4*i+1] ||
		in[4*i+3]!=out[4*i+0])
	    {
		throw Exception("DataSet::read", "Read with byte order conversion failed");
	    }
	}

	// clean up and return with success
	delete [] out;
	delete [] in;
	PASSED();
	return 0;
    }  // end try

    // catch all dataset and space exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	delete [] out;
	delete [] in;
	return -1;
    }
}   // test_tconv
Esempio n. 12
0
/*-------------------------------------------------------------------------
 * Function:	test_simple_io
 *
 * Purpose:	Tests simple I/O.  That is, reading and writing a complete
 *		multi-dimensional array without data type or data space
 *		conversions, without compression, and stored contiguously.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_simple_io( H5File& file)
{

    SUBTEST("simple I/O");

    int	points[100][200];
    int	check[100][200];
    int		i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = n++;
	}
    }

    char* tconv_buf = new char [1000];
    try
    {
	// Create the data space
	hsize_t	dims[2];
	dims[0] = 100;
	dims[1] = 200;
	DataSpace space (2, dims, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Create the dataset
	DataSet dataset (file.createDataSet (DSET_SIMPLE_IO_NAME, PredType::NATIVE_INT, space));

	// Write the data to the dataset
	dataset.write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back
	dataset.read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < 100; i++)
	    for (j = 0; j < 200; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("DataSet::read");
	    }

	// clean up and return with success
	delete [] tconv_buf;
	PASSED();
	return 0;
    }  // end try

    // catch all dataset, space, plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_simple_io
Esempio n. 13
0
int main (void)
{
    int   i,j; // loop indices */

    /*
     * Try block to detect exceptions raised by any of the calls inside it
     */
    try
    {
	/*
	 * Turn off the auto-printing when failure occurs so that we can
	 * handle the errors appropriately
	 */
	Exception::dontPrint();

	/*
	 * Create a file.
	 */
	H5File* file = new H5File( FILE_NAME, H5F_ACC_TRUNC );

	/*
	* Create property list for a dataset and set up fill values.
	*/
	int fillvalue = 0;   /* Fill value for the dataset */
	DSetCreatPropList plist;
	plist.setFillValue(PredType::NATIVE_INT, &fillvalue);

	/*
	 * Create dataspace for the dataset in the file.
	 */
	hsize_t fdim[] = {FSPACE_DIM1, FSPACE_DIM2}; // dim sizes of ds (on disk)
	DataSpace fspace( FSPACE_RANK, fdim );

	/*
	 * Create dataset and write it into the file.
	 */
	DataSet* dataset = new DataSet(file->createDataSet(
		DATASET_NAME, PredType::NATIVE_INT, fspace, plist));

	/*
	 * Select hyperslab for the dataset in the file, using 3x2 blocks,
	 * (4,3) stride and (2,4) count starting at the position (0,1).
	 */
	hsize_t start[2]; // Start of hyperslab
	hsize_t stride[2]; // Stride of hyperslab
	hsize_t count[2];  // Block count
	hsize_t block[2];  // Block sizes
	start[0]  = 0; start[1]  = 1;
	stride[0] = 4; stride[1] = 3;
	count[0]  = 2; count[1]  = 4;
	block[0]  = 3; block[1]  = 2;
	fspace.selectHyperslab( H5S_SELECT_SET, count, start, stride, block);

	/*
	 * Create dataspace for the first dataset.
	 */
	hsize_t dim1[] = {MSPACE1_DIM};  /* Dimension size of the first dataset
	                                   (in memory) */
	DataSpace mspace1( MSPACE1_RANK, dim1 );

	/*
	 * Select hyperslab.
	 * We will use 48 elements of the vector buffer starting at the
	 * second element.  Selected elements are 1 2 3 . . . 48
	 */
	start[0]  = 1;
	stride[0] = 1;
	count[0]  = 48;
	block[0]  = 1;
	mspace1.selectHyperslab( H5S_SELECT_SET, count, start, stride, block);

	/*
	 * Write selection from the vector buffer to the dataset in the file.
	 *
	 * File dataset should look like this:
	 *                    0  1  2  0  3  4  0  5  6  0  7  8
	 *                    0  9 10  0 11 12  0 13 14  0 15 16
	 *                    0 17 18  0 19 20  0 21 22  0 23 24
	 *                    0  0  0  0  0  0  0  0  0  0  0  0
	 *                    0 25 26  0 27 28  0 29 30  0 31 32
	 *                    0 33 34  0 35 36  0 37 38  0 39 40
	 *                    0 41 42  0 43 44  0 45 46  0 47 48
	 *                    0  0  0  0  0  0  0  0  0  0  0  0
	 */
	int    vector[MSPACE1_DIM];	// vector buffer for dset

	/*
	 * Buffer initialization.
	 */
	vector[0] = vector[MSPACE1_DIM - 1] = -1;
	for (i = 1; i < MSPACE1_DIM - 1; i++)
	    vector[i] = i;

	dataset->write( vector, PredType::NATIVE_INT, mspace1, fspace );

	/*
	 * Reset the selection for the file dataspace fid.
	 */
	fspace.selectNone();

	/*
	 * Create dataspace for the second dataset.
	 */
	hsize_t dim2[] = {MSPACE2_DIM};  /* Dimension size of the second dataset
	                                   (in memory */
	DataSpace mspace2( MSPACE2_RANK, dim2 );

	/*
	 * Select sequence of NPOINTS points in the file dataspace.
	 */
	hsize_t coord[NPOINTS][FSPACE_RANK]; /* Array to store selected points
	                                        from the file dataspace */
	coord[0][0] = 0; coord[0][1] = 0;
	coord[1][0] = 3; coord[1][1] = 3;
	coord[2][0] = 3; coord[2][1] = 5;
	coord[3][0] = 5; coord[3][1] = 6;

	fspace.selectElements( H5S_SELECT_SET, NPOINTS, (const hsize_t *)coord);

	/*
	 * Write new selection of points to the dataset.
	 */
	int    values[] = {53, 59, 61, 67};  /* New values to be written */
	dataset->write( values, PredType::NATIVE_INT, mspace2, fspace );

	/*
	 * File dataset should look like this:
	 *                   53  1  2  0  3  4  0  5  6  0  7  8
	 *                    0  9 10  0 11 12  0 13 14  0 15 16
	 *                    0 17 18  0 19 20  0 21 22  0 23 24
	 *                    0  0  0 59  0 61  0  0  0  0  0  0
	 *                    0 25 26  0 27 28  0 29 30  0 31 32
	 *                    0 33 34  0 35 36 67 37 38  0 39 40
	 *                    0 41 42  0 43 44  0 45 46  0 47 48
	 *                    0  0  0  0  0  0  0  0  0  0  0  0
	 *
	 */

	/*
	 * Close the dataset and the file.
	 */
	delete dataset;
	delete file;

	/*
	 * Open the file.
	 */
	file = new H5File( FILE_NAME, H5F_ACC_RDONLY );

	/*
	 * Open the dataset.
	 */
	dataset = new DataSet( file->openDataSet( DATASET_NAME ));

	/*
	 * Get dataspace of the dataset.
	 */
	fspace = dataset->getSpace();

	/*
	 * Select first hyperslab for the dataset in the file. The following
	 * elements are selected:
	 *                     10  0 11 12
	 *                     18  0 19 20
	 *                      0 59  0 61
	 *
	 */
	start[0] = 1; start[1] = 2;
	block[0] = 1; block[1] = 1;
	stride[0] = 1; stride[1] = 1;
	count[0]  = 3; count[1]  = 4;
	fspace.selectHyperslab(H5S_SELECT_SET, count, start, stride, block);

	/*
	 * Add second selected hyperslab to the selection.
	 * The following elements are selected:
	 *                    19 20  0 21 22
	 *                     0 61  0  0  0
	 *                    27 28  0 29 30
	 *                    35 36 67 37 38
	 *                    43 44  0 45 46
	 *                     0  0  0  0  0
	 * Note that two hyperslabs overlap. Common elements are:
	 *                                              19 20
	 *                                               0 61
	 */
	start[0] = 2; start[1] = 4;
	block[0] = 1; block[1] = 1;
	stride[0] = 1; stride[1] = 1;
	count[0]  = 6; count[1]  = 5;
	fspace.selectHyperslab(H5S_SELECT_OR, count, start, stride, block);

	/*
	 * Create memory dataspace.
	 */
	hsize_t mdim[] = {MSPACE_DIM1, MSPACE_DIM2}; /* Dimension sizes of the
                                                   dataset in memory when we
                                                   read selection from the
                                                   dataset on the disk */
	DataSpace mspace(MSPACE_RANK, mdim);

	/*
	 * Select two hyperslabs in memory. Hyperslabs has the same
	 * size and shape as the selected hyperslabs for the file dataspace.
	 */
	start[0] = 0; start[1] = 0;
	block[0] = 1; block[1] = 1;
	stride[0] = 1; stride[1] = 1;
	count[0]  = 3; count[1]  = 4;
	mspace.selectHyperslab(H5S_SELECT_SET, count, start, stride, block);
	start[0] = 1; start[1] = 2;
	block[0] = 1; block[1] = 1;
	stride[0] = 1; stride[1] = 1;
	count[0]  = 6; count[1]  = 5;
	mspace.selectHyperslab(H5S_SELECT_OR, count, start, stride, block);

	/*
	 * Initialize data buffer.
	 */
	int matrix_out[MSPACE_DIM1][MSPACE_DIM2];
	for (i = 0; i < MSPACE_DIM1; i++)
	    for (j = 0; j < MSPACE_DIM2; j++)
		matrix_out[i][j] = 0;

	/*
	 * Read data back to the buffer matrix.
	 */
	dataset->read(matrix_out, PredType::NATIVE_INT, mspace, fspace);

	/*
	 * Display the result.  Memory dataset is:
	 *
	 *                    10  0 11 12  0  0  0  0  0
	 *                    18  0 19 20  0 21 22  0  0
	 *                     0 59  0 61  0  0  0  0  0
	 *                     0  0 27 28  0 29 30  0  0
	 *                     0  0 35 36 67 37 38  0  0
	 *                     0  0 43 44  0 45 46  0  0
	 *                     0  0  0  0  0  0  0  0  0
	 *                     0  0  0  0  0  0  0  0  0
	 */
	for (i=0; i < MSPACE_DIM1; i++)
	{
	    for(j=0; j < MSPACE_DIM2; j++)
		cout << matrix_out[i][j] << "  ";
	    cout << endl;
	}

	/*
	 * Close the dataset and the file.
	 */
	delete dataset;
	delete file;
   }  // end of try block

   // catch failure caused by the H5File operations
   catch( FileIException error )
   {
	error.printErrorStack();
	return -1;
   }

   // catch failure caused by the DataSet operations
   catch( DataSetIException error )
   {
	error.printErrorStack();
	return -1;
   }

   // catch failure caused by the DataSpace operations
   catch( DataSpaceIException error )
   {
	error.printErrorStack();
	return -1;
   }

   return 0;
}