Exemplo n.º 1
0
//--------------------------------------------------------------------------
// Function:	DataSet::read
///\brief	This is an overloaded member function, provided for convenience.
///		It takes a reference to a \c H5std_string for the buffer.
///\param	buf - IN: Buffer for read data
///\param	mem_type - IN: Memory datatype
///\param	mem_space - IN: Memory dataspace
///\param	file_space - IN: Dataset's dataspace in the file
///\param	xfer_plist - IN: Transfer property list for this I/O operation
///\exception	H5::DataSetIException
// Programmer	Binh-Minh Ribler - 2000
// Modification
//	Jul 2009
//		Follow the change to Attribute::read and use the following
//		private functions to read datasets with fixed- and
//		variable-length string:
//			DataSet::p_read_fixed_len and
//			DataSet::p_read_variable_len
//--------------------------------------------------------------------------
void DataSet::read(H5std_string& strg, const DataType& mem_type, const DataSpace& mem_space, const DataSpace& file_space, const DSetMemXferPropList& xfer_plist) const
{
    // Check if this dataset has variable-len string or fixed-len string and
    // proceed appropriately.
    htri_t is_variable_len = H5Tis_variable_str(mem_type.getId());
    if (is_variable_len < 0)
    {
        throw DataSetIException("DataSet::read", "H5Tis_variable_str failed");
    }

    // Obtain identifiers for C API
    hid_t mem_type_id = mem_type.getId();
    hid_t mem_space_id = mem_space.getId();
    hid_t file_space_id = file_space.getId();
    hid_t xfer_plist_id = xfer_plist.getId();

    if (!is_variable_len)       // only allocate for fixed-len string
    {
        p_read_fixed_len(mem_type_id, mem_space_id, file_space_id, xfer_plist_id, strg);
    }
    else
    {
        p_read_variable_len(mem_type_id, mem_space_id, file_space_id, xfer_plist_id, strg);
    }
}
Exemplo n.º 2
0
//--------------------------------------------------------------------------
// Function:	DataSet::vlenReclaim
///\brief	Reclaims VL datatype memory buffers.
///\param	type - IN: Datatype, which is the datatype stored in the buffer
///\param	space - IN: Selection for the memory buffer to free the
///		VL datatypes within
///\param	xfer_plist - IN: Property list used to create the buffer
///\param	buf - IN: Pointer to the buffer to be reclaimed
///\exception	H5::DataSetIException
// Programmer	Binh-Minh Ribler - 2000
//\parDescription
//		This function has better prototype for the users than the
//		other, which might be removed at some point. BMR - 2006/12/20
//--------------------------------------------------------------------------
void DataSet::vlenReclaim(void* buf, const DataType& type, const DataSpace& space, const DSetMemXferPropList& xfer_plist)
{
   // Obtain identifiers for C API
   hid_t type_id = type.getId();
   hid_t space_id = space.getId();
   hid_t xfer_plist_id = xfer_plist.getId();

   herr_t ret_value = H5Dvlen_reclaim(type_id, space_id, xfer_plist_id, buf);
   if (ret_value < 0)
   {
      throw DataSetIException("DataSet::vlenReclaim", "H5Dvlen_reclaim failed");
   }
}
Exemplo n.º 3
0
//--------------------------------------------------------------------------
// Function:	DataSet::write
///\brief	Writes raw data from an application buffer to a dataset.
///\param	buf - IN: Buffer containing data to be written
///\param	mem_type - IN: Memory datatype
///\param	mem_space - IN: Memory dataspace
///\param	file_space - IN: Dataset's dataspace in the file
///\param	xfer_plist - IN: Transfer property list for this I/O operation
///\exception	H5::DataSetIException
///\par Description
///		This function writes raw data from an application buffer
///		\a buf to a dataset, converting from memory datatype
///		\a mem_type and dataspace \a mem_space to file datatype
///		and dataspace.
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DataSet::write( const void* buf, const DataType& mem_type, const DataSpace& mem_space, const DataSpace& file_space, const DSetMemXferPropList& xfer_plist ) const
{
   // Obtain identifiers for C API
   hid_t mem_type_id = mem_type.getId();
   hid_t mem_space_id = mem_space.getId();
   hid_t file_space_id = file_space.getId();
   hid_t xfer_plist_id = xfer_plist.getId();

   herr_t ret_value = H5Dwrite( id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf );
   if( ret_value < 0 )
   {
      throw DataSetIException("DataSet::write", "H5Dwrite failed");
   }
}
Exemplo n.º 4
0
//--------------------------------------------------------------------------
// Function:	DataSet::write
///\brief	This is an overloaded member function, provided for convenience.
///		It takes a reference to a \c H5std_string for the buffer.
// Programmer	Binh-Minh Ribler - 2000
// Modification
//	Jul 2009
//		Modified to pass the buffer into H5Dwrite properly depending
//		whether the dataset has variable- or fixed-length string.
//--------------------------------------------------------------------------
void DataSet::write( const H5std_string& strg, const DataType& mem_type, const DataSpace& mem_space, const DataSpace& file_space, const DSetMemXferPropList& xfer_plist ) const
{
    // Check if this attribute has variable-len string or fixed-len string and
    // proceed appropriately.
    htri_t is_variable_len = H5Tis_variable_str(mem_type.getId());
    if (is_variable_len < 0)
    {
        throw DataSetIException("DataSet::write", "H5Tis_variable_str failed");
    }

   // Obtain identifiers for C API
   hid_t mem_type_id = mem_type.getId();
   hid_t mem_space_id = mem_space.getId();
   hid_t file_space_id = file_space.getId();
   hid_t xfer_plist_id = xfer_plist.getId();

    // Convert string to C-string
    const char* strg_C;
    strg_C = strg.c_str();  // strg_C refers to the contents of strg as a C-str
    herr_t ret_value = 0;

    // Pass string in differently depends on variable or fixed length
    if (!is_variable_len)
    {
        ret_value = H5Dwrite( id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, strg_C );
    }
    else
    {
        // passing string argument by address
        ret_value = H5Dwrite( id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, &strg_C );
    }
    if (ret_value < 0)
    {
        throw DataSetIException("DataSet::write", "H5Dwrite failed");
    }
}
Exemplo n.º 5
0
void test_szip_filter(H5File& file1)
{
#ifdef H5_HAVE_FILTER_SZIP
    int      points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
    unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK;
    unsigned szip_pixels_per_block=4;

    // Output message about test being performed
    SUBTEST("szip filter (with encoder)");

    if ( h5_szip_can_encode() == 1) {
        char* tconv_buf = new char [1000];

        try {
            const hsize_t   size[2] = {DSET_DIM1, DSET_DIM2};

            // Create the data space
            DataSpace space1(2, size, NULL);

            // Create a small conversion buffer to test strip mining (?)
            DSetMemXferPropList xfer;
            xfer.setBuffer (1000, tconv_buf, NULL);

            // Prepare dataset create property list
            DSetCreatPropList dsplist;
            dsplist.setChunk(2, chunk_size);

            // Set up for szip compression
            dsplist.setSzip(szip_options_mask, szip_pixels_per_block);

            // Create a dataset with szip compression
            DataSpace space2 (2, size, NULL);
            DataSet dataset(file1.createDataSet (DSET_SZIP_NAME, PredType::NATIVE_INT, space2, dsplist));

            hsize_t i, j, n;
            for (i=n=0; i<size[0]; i++)
            {
                for (j=0; j<size[1]; j++)
                {
                    points[i][j] = (int)n++;
                }
            }

            // Write to the dataset then read back the values
            dataset.write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
            dataset.read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

            // Check that the values read are the same as the values written
            for (i = 0; i < size[0]; i++)
                for (j = 0; j < size[1]; j++)
                {
                    int status = check_values (i, j, points[i][j], check[i][j]);
                    if (status == -1)
                        throw Exception("test_szip_filter", "Failed in testing szip method");
                }
            dsplist.close();
            PASSED();
        } // end of try

        // catch all other exceptions
        catch (Exception E)
        {
            issue_fail_msg("test_szip_filter()", __LINE__, __FILE__, E.getCDetailMsg());
        }

        delete[] tconv_buf;
    } // if szip presents
    else {
	SKIPPED();
    }

#else /* H5_HAVE_FILTER_SZIP */
    SUBTEST("szip filter");
    SKIPPED();
    puts("    Szip filter not enabled");
#endif /* H5_HAVE_FILTER_SZIP */
}  // test_szip_filter
Exemplo n.º 6
0
/*-------------------------------------------------------------------------
 * Function:	test_compression
 *
 * Purpose:	Tests dataset compression. If compression is requested when
 *		it hasn't been compiled into the library (such as when
 *		updating an existing compressed dataset) then data is sent to
 *		the file uncompressed but no errors are returned.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_compression(H5File& file)
{
#ifndef H5_HAVE_FILTER_DEFLATE
    const char		*not_supported;
    not_supported = "    Deflate compression is not enabled.";
#endif /* H5_HAVE_FILTER_DEFLATE */
    int		points[100][200];
    int		check[100][200];
    hsize_t	i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = (int)n++;
	}
    }
    char* tconv_buf = new char [1000];
    DataSet* dataset = NULL;
    try
    {
	const hsize_t	size[2] = {100, 200};
	// Create the data space
	DataSpace space1(2, size, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Use chunked storage with compression
	DSetCreatPropList dscreatplist;

	const hsize_t	chunk_size[2] = {2, 25};
	dscreatplist.setChunk (2, chunk_size);
	dscreatplist.setDeflate (6);

#ifdef H5_HAVE_FILTER_DEFLATE
	SUBTEST("Compression (setup)");

	// Create the dataset
	dataset = new DataSet (file.createDataSet
	    (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist));

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 1: Read uninitialized data.  It should be zero.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (uninitialized read)");

	dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	for (i=0; i<size[0]; i++) {
	    for (j=0; j<size[1]; j++) {
		if (0!=check[i][j]) {
		    H5_FAILED();
		    cerr << "    Read a non-zero value." << endl;
		    cerr << "    At index " << (unsigned long)i << "," <<
		   (unsigned long)j << endl;
		    throw Exception("test_compression", "Failed in uninitialized read");
		}
	    }
	}
	PASSED();

	/*----------------------------------------------------------------------
	* STEP 2: Test compression by setting up a chunked dataset and writing
	* to it.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (write)");

	for (i=n=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]; j++)
	    {
		points[i][j] = (int)n++;
	    }
	}

	dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 3: Try to read the data we just wrote.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (read)");

	// Read the dataset back
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in read");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 4: Write new data over the top of the old data.  The new data is
	* random thus not very compressible, and will cause the chunks to move
	* around as they grow.  We only change values for the left half of the
	* dataset although we rewrite the whole thing.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (modify)");

	for (i=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]/2; j++)
	    {
	    	points[i][j] = rand ();
	    }
	}
	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back and check it
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in modify");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 5: Close the dataset and then open it and read it again.  This
	* insures that the compression message is picked up properly from the
	* object header.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (re-open)");

	// close this dataset to reuse the var
	delete dataset;

	dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME));
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in re-open");
	    }

	PASSED();


	/*----------------------------------------------------------------------
	* STEP 6: Test partial I/O by writing to and then reading from a
	* hyperslab of the dataset.  The hyperslab does not line up on chunk
	* boundaries (we know that case already works from above tests).
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (partial I/O)");

	const hsize_t	hs_size[2] = {4, 50};
	const hsize_t	hs_offset[2] = {7, 30};
	for (i = 0; i < hs_size[0]; i++) {
	    for (j = 0; j < hs_size[1]; j++) {
		points[hs_offset[0]+i][hs_offset[1]+j] = rand ();
	    }
	}
	space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset );
	dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer);

	// Check that the values read are the same as the values written
	for (i=0; i<hs_size[0]; i++) {
	for (j=0; j<hs_size[1]; j++) {
	    if (points[hs_offset[0]+i][hs_offset[1]+j] !=
		check[hs_offset[0]+i][hs_offset[1]+j]) {
		H5_FAILED();
		cerr << "    Read different values than written.\n" << endl;
		cerr << "    At index " << (unsigned long)(hs_offset[0]+i) <<
		   "," << (unsigned long)(hs_offset[1]+j) << endl;

		cerr << "    At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl;
		cerr << "    At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl;
		throw Exception("test_compression", "Failed in partial I/O");
	    }
	} // for j
	} // for i

	delete dataset;
	dataset = NULL;

	PASSED();

#else
	SUBTEST("deflate filter");
	SKIPPED();
	cerr << not_supported << endl;
#endif

	/*----------------------------------------------------------------------
	* STEP 7: Register an application-defined compression method and use it
	* to write and then read the dataset.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (app-defined method)");

        if (H5Zregister (H5Z_BOGUS)<0)
		throw Exception("test_compression", "Failed in app-defined method");
	if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0)
	    throw Exception("test_compression", "Failed in app-defined method");
	dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL);

	DataSpace space2 (2, size, NULL);
	dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist));

	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in app-defined method");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* Cleanup
	*----------------------------------------------------------------------
	*/
	delete dataset;
	delete [] tconv_buf;
	return 0;
    } // end try

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_compression
Exemplo n.º 7
0
/*-------------------------------------------------------------------------
 * Function:	test_simple_io
 *
 * Purpose:	Tests simple I/O.  That is, reading and writing a complete
 *		multi-dimensional array without data type or data space
 *		conversions, without compression, and stored contiguously.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_simple_io( H5File& file)
{

    SUBTEST("simple I/O");

    int	points[100][200];
    int	check[100][200];
    int		i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = n++;
	}
    }

    char* tconv_buf = new char [1000];
    try
    {
	// Create the data space
	hsize_t	dims[2];
	dims[0] = 100;
	dims[1] = 200;
	DataSpace space (2, dims, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Create the dataset
	DataSet dataset (file.createDataSet (DSET_SIMPLE_IO_NAME, PredType::NATIVE_INT, space));

	// Write the data to the dataset
	dataset.write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back
	dataset.read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < 100; i++)
	    for (j = 0; j < 200; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("DataSet::read");
	    }

	// clean up and return with success
	delete [] tconv_buf;
	PASSED();
	return 0;
    }  // end try

    // catch all dataset, space, plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_simple_io