Exemplo n.º 1
0
static void test_null_filter()
{
    // Output message about test being performed
    SUBTEST("'Null' filter");
    try {
	//hsize_t  null_size;          // Size of dataset with null filter

	// Prepare dataset create property list
	DSetCreatPropList dsplist;
	dsplist.setChunk(2, chunk_size);

	if (H5Zregister (H5Z_BOGUS)<0)
            throw Exception("test_null_filter", "H5Zregister failed");

	// Set some pretent filter
	dsplist.setFilter(H5Z_FILTER_BOGUS);

	// this function is just a stub right now; will work on it later - BMR
	//if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size)<0)
        //  throw Exception("test_null_filter", "test_filter_internal failed");

	// Close objects.
	dsplist.close();
	PASSED();
    } // end of try

    // catch all other exceptions
    catch (Exception E)
    {
        issue_fail_msg("test_null_filter()", __LINE__, __FILE__, E.getCDetailMsg());
    }
}  // test_null_filter
Exemplo n.º 2
0
/*-------------------------------------------------------------------------
 * Function:	test_compression
 *
 * Purpose:	Tests dataset compression. If compression is requested when
 *		it hasn't been compiled into the library (such as when
 *		updating an existing compressed dataset) then data is sent to
 *		the file uncompressed but no errors are returned.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Binh-Minh Ribler (using C version)
 *		Friday, January 5, 2001
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_compression(H5File& file)
{
#ifndef H5_HAVE_FILTER_DEFLATE
    const char		*not_supported;
    not_supported = "    Deflate compression is not enabled.";
#endif /* H5_HAVE_FILTER_DEFLATE */
    int		points[100][200];
    int		check[100][200];
    hsize_t	i, j, n;

    // Initialize the dataset
    for (i = n = 0; i < 100; i++)
    {
	for (j = 0; j < 200; j++) {
	    points[i][j] = (int)n++;
	}
    }
    char* tconv_buf = new char [1000];
    DataSet* dataset = NULL;
    try
    {
	const hsize_t	size[2] = {100, 200};
	// Create the data space
	DataSpace space1(2, size, NULL);

	// Create a small conversion buffer to test strip mining
	DSetMemXferPropList xfer;

	xfer.setBuffer (1000, tconv_buf, NULL);

	// Use chunked storage with compression
	DSetCreatPropList dscreatplist;

	const hsize_t	chunk_size[2] = {2, 25};
	dscreatplist.setChunk (2, chunk_size);
	dscreatplist.setDeflate (6);

#ifdef H5_HAVE_FILTER_DEFLATE
	SUBTEST("Compression (setup)");

	// Create the dataset
	dataset = new DataSet (file.createDataSet
	    (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist));

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 1: Read uninitialized data.  It should be zero.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (uninitialized read)");

	dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	for (i=0; i<size[0]; i++) {
	    for (j=0; j<size[1]; j++) {
		if (0!=check[i][j]) {
		    H5_FAILED();
		    cerr << "    Read a non-zero value." << endl;
		    cerr << "    At index " << (unsigned long)i << "," <<
		   (unsigned long)j << endl;
		    throw Exception("test_compression", "Failed in uninitialized read");
		}
	    }
	}
	PASSED();

	/*----------------------------------------------------------------------
	* STEP 2: Test compression by setting up a chunked dataset and writing
	* to it.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (write)");

	for (i=n=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]; j++)
	    {
		points[i][j] = (int)n++;
	    }
	}

	dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 3: Try to read the data we just wrote.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (read)");

	// Read the dataset back
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in read");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 4: Write new data over the top of the old data.  The new data is
	* random thus not very compressible, and will cause the chunks to move
	* around as they grow.  We only change values for the left half of the
	* dataset although we rewrite the whole thing.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (modify)");

	for (i=0; i<size[0]; i++)
	{
	    for (j=0; j<size[1]/2; j++)
	    {
	    	points[i][j] = rand ();
	    }
	}
	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Read the dataset back and check it
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in modify");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* STEP 5: Close the dataset and then open it and read it again.  This
	* insures that the compression message is picked up properly from the
	* object header.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (re-open)");

	// close this dataset to reuse the var
	delete dataset;

	dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME));
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in re-open");
	    }

	PASSED();


	/*----------------------------------------------------------------------
	* STEP 6: Test partial I/O by writing to and then reading from a
	* hyperslab of the dataset.  The hyperslab does not line up on chunk
	* boundaries (we know that case already works from above tests).
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (partial I/O)");

	const hsize_t	hs_size[2] = {4, 50};
	const hsize_t	hs_offset[2] = {7, 30};
	for (i = 0; i < hs_size[0]; i++) {
	    for (j = 0; j < hs_size[1]; j++) {
		points[hs_offset[0]+i][hs_offset[1]+j] = rand ();
	    }
	}
	space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset );
	dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer);

	// Check that the values read are the same as the values written
	for (i=0; i<hs_size[0]; i++) {
	for (j=0; j<hs_size[1]; j++) {
	    if (points[hs_offset[0]+i][hs_offset[1]+j] !=
		check[hs_offset[0]+i][hs_offset[1]+j]) {
		H5_FAILED();
		cerr << "    Read different values than written.\n" << endl;
		cerr << "    At index " << (unsigned long)(hs_offset[0]+i) <<
		   "," << (unsigned long)(hs_offset[1]+j) << endl;

		cerr << "    At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl;
		cerr << "    At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl;
		throw Exception("test_compression", "Failed in partial I/O");
	    }
	} // for j
	} // for i

	delete dataset;
	dataset = NULL;

	PASSED();

#else
	SUBTEST("deflate filter");
	SKIPPED();
	cerr << not_supported << endl;
#endif

	/*----------------------------------------------------------------------
	* STEP 7: Register an application-defined compression method and use it
	* to write and then read the dataset.
	*----------------------------------------------------------------------
	*/
	SUBTEST("Compression (app-defined method)");

        if (H5Zregister (H5Z_BOGUS)<0)
		throw Exception("test_compression", "Failed in app-defined method");
	if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0)
	    throw Exception("test_compression", "Failed in app-defined method");
	dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL);

	DataSpace space2 (2, size, NULL);
	dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist));

	dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
	dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);

	// Check that the values read are the same as the values written
	for (i = 0; i < size[0]; i++)
	    for (j = 0; j < size[1]; j++)
	    {
		int status = check_values (i, j, points[i][j], check[i][j]);
		if (status == -1)
		    throw Exception("test_compression", "Failed in app-defined method");
	    }

	PASSED();

	/*----------------------------------------------------------------------
	* Cleanup
	*----------------------------------------------------------------------
	*/
	delete dataset;
	delete [] tconv_buf;
	return 0;
    } // end try

    // catch all dataset, file, space, and plist exceptions
    catch (Exception E)
    {
	cerr << " FAILED" << endl;
	cerr << "    <<<  " << E.getDetailMsg() << "  >>>" << endl << endl;

	// clean up and return with failure
	if (dataset != NULL)
	    delete dataset;
	if (tconv_buf)
	    delete [] tconv_buf;
	return -1;
    }
}   // test_compression
Exemplo n.º 3
0
long SaveContainerHdf5::_writeFile(void* f,Data &aData,
				   CtSaving::HeaderMap &aHeader,
				   CtSaving::FileFormat aFormat) {
        DEB_MEMBER_FUNCT();

        _File* file = (_File*)f;
		size_t buf_size = 0;
		
		// get the proper data type
		PredType data_type(PredType::NATIVE_UINT8);
		switch (aData.type) {
		case Data::UINT8:
		       break;
		case Data::INT8:
		       data_type = PredType::NATIVE_INT8;
		       break;
		case Data::UINT16:
		       data_type = PredType::NATIVE_UINT16;
		       break;
		case Data::INT16:
		       data_type = PredType::NATIVE_INT16;
		       break;
		case Data::UINT32:
		       data_type = PredType::NATIVE_UINT32;
		       break;
		case Data::INT32:
		       data_type = PredType::NATIVE_INT32;
		       break;
		case Data::UINT64:
		       data_type = PredType::NATIVE_UINT64;
		       break;
		case Data::INT64:
		       data_type = PredType::NATIVE_INT64;
		       break;
		case Data::FLOAT:
		       data_type = PredType::NATIVE_FLOAT;
		       break;
		case Data::DOUBLE:
		       data_type = PredType::NATIVE_DOUBLE;
		       break;
		case Data::UNDEF:
		default:
		  THROW_CTL_ERROR(Error) << "Invalid image type";
		}

		try {
			if (!file->m_format_written) {
			       
			        // ISO 8601 Time format
			        time_t now;
				time(&now);
				char buf[sizeof("2011-10-08T07:07:09Z")];
#ifdef WIN32
				struct tm gmtime_now;
				gmtime_s(&gmtime_now, &now);
				strftime(buf, sizeof(buf), "%FT%TZ", &gmtime_now);
#else
				strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
#endif
				string stime = string(buf);
				write_h5_dataset(*file->m_entry,"start_time",stime);
				// write header only once into "parameters" group 
				// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
				if (!aHeader.empty()) {
					for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {

						string key = it->first;
						string value = it->second;
						write_h5_dataset(*file->m_measurement_detector_parameters,
								 key.c_str(),value);
					}
				}
				delete file->m_measurement_detector_parameters;
				file->m_measurement_detector_parameters = NULL;
					
				// create the image data structure in the file
				hsize_t data_dims[3], max_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = m_nbframes;
				max_dims[1] = aData.dimensions[1];
				max_dims[2] = aData.dimensions[0];
				max_dims[0] = H5S_UNLIMITED;
				// Create property list for the dataset and setup chunk size
				DSetCreatPropList plist;
				hsize_t chunk_dims[RANK_THREE];
				// test direct chunk write, so chunk dims is 1 image size
				chunk_dims[0] = 1; chunk_dims[1] = data_dims[1]; chunk_dims[2] = data_dims[2];
				
				plist.setChunk(RANK_THREE, chunk_dims);

#if defined(WITH_Z_COMPRESSION)
				if (aFormat == CtSaving::HDF5GZ)
				  plist.setDeflate(m_compression_level);
#endif
#if defined(WITH_BS_COMPRESSION)
				if (aFormat == CtSaving::HDF5BS) {
				  unsigned int opt_vals[2]= {0, BSHUF_H5_COMPRESS_LZ4};
				  plist.setFilter(BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, 2, opt_vals);
				}
#endif
				// create new dspace
				file->m_image_dataspace = new DataSpace(RANK_THREE, data_dims, NULL);
				file->m_image_dataset = 
				  new DataSet(file->m_measurement_detector->createDataSet("data",
											  data_type,
											  *file->m_image_dataspace,
											  plist));
				string nxdata = "NXdata";
				write_h5_attribute(*file->m_image_dataset, "NX_class", nxdata);
				string image = "image"; 
				write_h5_attribute(*file->m_image_dataset, "interpretation", image);
				file->m_prev_images_written = 0;
				file->m_format_written = true;
			} else if (file->m_in_append && !m_is_multiset && !file->m_dataset_extended) {
				hsize_t allocated_dims[3];
				file->m_image_dataset = new DataSet(file->m_measurement_detector->
								    openDataSet("data"));
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_image_dataspace->getSimpleExtentDims(allocated_dims);

				hsize_t data_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = allocated_dims[0] + m_nbframes;

				if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) {
					THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions";
				}

				file->m_image_dataset->extend(data_dims);
				file->m_image_dataspace->close();
				delete file->m_image_dataset;
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_prev_images_written = allocated_dims[0];
				file->m_dataset_extended = true;
			}
			// write the image data
			hsize_t image_nb = aData.frameNumber % m_nbframes;

			// we test direct chunk write
			hsize_t offset[RANK_THREE] = {image_nb, 0U, 0U};
			uint32_t filter_mask = 0; 
			hid_t dataset = file->m_image_dataset->getId();
			herr_t  status;
			void * buf_data;
			hid_t dxpl;

			dxpl = H5Pcreate(H5P_DATASET_XFER);

			if ((aFormat == CtSaving::HDF5GZ) || (aFormat == CtSaving::HDF5BS))
			  {
			    ZBufferType* buffers = _takeBuffer(aData.frameNumber);
			    // with single chunk, only one buffer allocated
			    buf_size = buffers->front()->used_size;
			    buf_data = buffers->front()->buffer;
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }
			    delete  buffers->front();
			    delete buffers;
			  }
			 else
			   {
			    buf_data = aData.data();
			    buf_size = aData.size();
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }

			  } // else
		// catch failure caused by the DataSet operations
		}catch (DataSetIException& error) {
			THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg();
			error.printError();
		}
		// catch failure caused by the DataSpace operations
		catch (DataSpaceIException& error) {
			THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg();
		}
		// catch failure caused by any other HDF5 error
		catch (H5::Exception &e) {
			THROW_CTL_ERROR(Error) << e.getCDetailMsg();
		}
		// catch anything not hdf5 related
		catch (Exception &e) {
			THROW_CTL_ERROR(Error) << e.getErrMsg();
		}

		DEB_RETURN();
		return buf_size;
}