Пример #1
0
long SaveContainerHdf5::_writeFile(void* f,Data &aData,
				   CtSaving::HeaderMap &aHeader,
				   CtSaving::FileFormat aFormat) {
        DEB_MEMBER_FUNCT();

        _File* file = (_File*)f;
		size_t buf_size = 0;
		
		// get the proper data type
		PredType data_type(PredType::NATIVE_UINT8);
		switch (aData.type) {
		case Data::UINT8:
		       break;
		case Data::INT8:
		       data_type = PredType::NATIVE_INT8;
		       break;
		case Data::UINT16:
		       data_type = PredType::NATIVE_UINT16;
		       break;
		case Data::INT16:
		       data_type = PredType::NATIVE_INT16;
		       break;
		case Data::UINT32:
		       data_type = PredType::NATIVE_UINT32;
		       break;
		case Data::INT32:
		       data_type = PredType::NATIVE_INT32;
		       break;
		case Data::UINT64:
		       data_type = PredType::NATIVE_UINT64;
		       break;
		case Data::INT64:
		       data_type = PredType::NATIVE_INT64;
		       break;
		case Data::FLOAT:
		       data_type = PredType::NATIVE_FLOAT;
		       break;
		case Data::DOUBLE:
		       data_type = PredType::NATIVE_DOUBLE;
		       break;
		case Data::UNDEF:
		default:
		  THROW_CTL_ERROR(Error) << "Invalid image type";
		}

		try {
			if (!file->m_format_written) {
			       
			        // ISO 8601 Time format
			        time_t now;
				time(&now);
				char buf[sizeof("2011-10-08T07:07:09Z")];
#ifdef WIN32
				struct tm gmtime_now;
				gmtime_s(&gmtime_now, &now);
				strftime(buf, sizeof(buf), "%FT%TZ", &gmtime_now);
#else
				strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
#endif
				string stime = string(buf);
				write_h5_dataset(*file->m_entry,"start_time",stime);
				// write header only once into "parameters" group 
				// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
				if (!aHeader.empty()) {
					for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {

						string key = it->first;
						string value = it->second;
						write_h5_dataset(*file->m_measurement_detector_parameters,
								 key.c_str(),value);
					}
				}
				delete file->m_measurement_detector_parameters;
				file->m_measurement_detector_parameters = NULL;
					
				// create the image data structure in the file
				hsize_t data_dims[3], max_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = m_nbframes;
				max_dims[1] = aData.dimensions[1];
				max_dims[2] = aData.dimensions[0];
				max_dims[0] = H5S_UNLIMITED;
				// Create property list for the dataset and setup chunk size
				DSetCreatPropList plist;
				hsize_t chunk_dims[RANK_THREE];
				// test direct chunk write, so chunk dims is 1 image size
				chunk_dims[0] = 1; chunk_dims[1] = data_dims[1]; chunk_dims[2] = data_dims[2];
				
				plist.setChunk(RANK_THREE, chunk_dims);

#if defined(WITH_Z_COMPRESSION)
				if (aFormat == CtSaving::HDF5GZ)
				  plist.setDeflate(m_compression_level);
#endif
#if defined(WITH_BS_COMPRESSION)
				if (aFormat == CtSaving::HDF5BS) {
				  unsigned int opt_vals[2]= {0, BSHUF_H5_COMPRESS_LZ4};
				  plist.setFilter(BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, 2, opt_vals);
				}
#endif
				// create new dspace
				file->m_image_dataspace = new DataSpace(RANK_THREE, data_dims, NULL);
				file->m_image_dataset = 
				  new DataSet(file->m_measurement_detector->createDataSet("data",
											  data_type,
											  *file->m_image_dataspace,
											  plist));
				string nxdata = "NXdata";
				write_h5_attribute(*file->m_image_dataset, "NX_class", nxdata);
				string image = "image"; 
				write_h5_attribute(*file->m_image_dataset, "interpretation", image);
				file->m_prev_images_written = 0;
				file->m_format_written = true;
			} else if (file->m_in_append && !m_is_multiset && !file->m_dataset_extended) {
				hsize_t allocated_dims[3];
				file->m_image_dataset = new DataSet(file->m_measurement_detector->
								    openDataSet("data"));
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_image_dataspace->getSimpleExtentDims(allocated_dims);

				hsize_t data_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = allocated_dims[0] + m_nbframes;

				if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) {
					THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions";
				}

				file->m_image_dataset->extend(data_dims);
				file->m_image_dataspace->close();
				delete file->m_image_dataset;
				file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace());
				file->m_prev_images_written = allocated_dims[0];
				file->m_dataset_extended = true;
			}
			// write the image data
			hsize_t image_nb = aData.frameNumber % m_nbframes;

			// we test direct chunk write
			hsize_t offset[RANK_THREE] = {image_nb, 0U, 0U};
			uint32_t filter_mask = 0; 
			hid_t dataset = file->m_image_dataset->getId();
			herr_t  status;
			void * buf_data;
			hid_t dxpl;

			dxpl = H5Pcreate(H5P_DATASET_XFER);

			if ((aFormat == CtSaving::HDF5GZ) || (aFormat == CtSaving::HDF5BS))
			  {
			    ZBufferType* buffers = _takeBuffer(aData.frameNumber);
			    // with single chunk, only one buffer allocated
			    buf_size = buffers->front()->used_size;
			    buf_data = buffers->front()->buffer;
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }
			    delete  buffers->front();
			    delete buffers;
			  }
			 else
			   {
			    buf_data = aData.data();
			    buf_size = aData.size();
			    //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size;
			    status = H5DOwrite_chunk(dataset, dxpl , filter_mask,  offset, buf_size, buf_data);			
			    if (status<0) {
			      THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed";
			    }

			  } // else
		// catch failure caused by the DataSet operations
		}catch (DataSetIException& error) {
			THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg();
			error.printError();
		}
		// catch failure caused by the DataSpace operations
		catch (DataSpaceIException& error) {
			THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg();
		}
		// catch failure caused by any other HDF5 error
		catch (H5::Exception &e) {
			THROW_CTL_ERROR(Error) << e.getCDetailMsg();
		}
		// catch anything not hdf5 related
		catch (Exception &e) {
			THROW_CTL_ERROR(Error) << e.getErrMsg();
		}

		DEB_RETURN();
		return buf_size;
}
Пример #2
0
void SaveContainerHdf5::_writeFile(Data &aData, CtSaving::HeaderMap &aHeader, CtSaving::FileFormat aFormat) {
	DEB_MEMBER_FUNCT();
	if (aFormat == CtSaving::HDF5) {

		// get the proper data type
		PredType data_type(PredType::NATIVE_UINT8);
		switch (aData.type) {
		case Data::UINT8:
		       break;
		case Data::INT8:
		       data_type = PredType::NATIVE_INT8;
		       break;
		case Data::UINT16:
		       data_type = PredType::NATIVE_UINT16;
		       break;
		case Data::INT16:
		       data_type = PredType::NATIVE_INT16;
		       break;
		case Data::UINT32:
		       data_type = PredType::NATIVE_UINT32;
		       break;
		case Data::INT32:
		       data_type = PredType::NATIVE_INT32;
		       break;
		case Data::UINT64:
		       data_type = PredType::NATIVE_UINT64;
		       break;
		case Data::INT64:
		       data_type = PredType::NATIVE_INT64;
		       break;
		case Data::FLOAT:
		       data_type = PredType::NATIVE_FLOAT;
		       break;
		case Data::DOUBLE:
		       data_type = PredType::NATIVE_DOUBLE;
		       break;
		case Data::UNDEF:
		default:
		  THROW_CTL_ERROR(Error) << "Invalid image type";
		}

		try {
			if (!m_format_written) {
			       
			        // ISO 8601 Time format
			        time_t now;
				time(&now);
				char buf[sizeof("2011-10-08T07:07:09Z")];
				strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now));
				string stime = string(buf);
				write_h5_dataset(*m_entry,"start_time",stime);
				// write header only once into "parameters" group 
				// but we should write some keys into measurement, like motor_pos counter_pos (spec)???
				if (!aHeader.empty()) {
					for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) {

						string key = it->first;
						string value = it->second;
						write_h5_dataset(*m_measurement_detector_parameters,key.c_str(),value);
					}
				}
				delete m_measurement_detector_parameters;m_measurement_detector_parameters = NULL;
					
				// create the image data structure in the file
				hsize_t data_dims[3], max_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = m_nbframes;
				max_dims[1] = aData.dimensions[1];
				max_dims[2] = aData.dimensions[0];
				max_dims[0] = H5S_UNLIMITED;
				// Create property list for the dataset and setup chunk size
				DSetCreatPropList plist;
				hsize_t chunk_dims[3];
				// calculate a optimized chunking
				calculate_chunck(data_dims, chunk_dims, aData.depth());
				plist.setChunk(RANK_THREE, chunk_dims);

				m_image_dataspace = new DataSpace(RANK_THREE, data_dims, max_dims); // create new dspace
				m_image_dataset = new DataSet(m_measurement_detector->createDataSet("data", data_type, *m_image_dataspace, plist));
				string nxdata = "NXdata";
				write_h5_attribute(*m_image_dataset, "NX_class", nxdata);
				string image = "image"; 
				write_h5_attribute(*m_image_dataset, "interpretation", image);
				m_prev_images_written = 0;
				m_format_written = true;
			} else if (m_in_append && !m_is_multiset && !m_dataset_extended) {
				hsize_t allocated_dims[3];
				m_image_dataset = new DataSet(m_measurement_detector->openDataSet("data"));
				m_image_dataspace = new DataSpace(m_image_dataset->getSpace());
				m_image_dataspace->getSimpleExtentDims(allocated_dims);

				hsize_t data_dims[3];
				data_dims[1] = aData.dimensions[1];
				data_dims[2] = aData.dimensions[0];
				data_dims[0] = allocated_dims[0] + m_nbframes;

				if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) {
					THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions";
				}

				m_image_dataset->extend(data_dims);
				m_image_dataspace->close();
				delete m_image_dataset;
				m_image_dataspace = new DataSpace(m_image_dataset->getSpace());
				m_prev_images_written = allocated_dims[0];
				m_dataset_extended = true;
			}
			// write the image data
			hsize_t slab_dim[3];
			slab_dim[2] = aData.dimensions[0];
			slab_dim[1] = aData.dimensions[1];
			slab_dim[0] = 1;
			DataSpace slabspace = DataSpace(RANK_THREE, slab_dim);
			int image_nb = aData.frameNumber % m_nbframes;
			hsize_t start[] = { m_prev_images_written + image_nb, 0, 0 };
			hsize_t count[] = { 1, aData.dimensions[1], aData.dimensions[0] };
			m_image_dataspace->selectHyperslab(H5S_SELECT_SET, count, start);
			m_image_dataset->write((u_int8_t*) aData.data(), data_type, slabspace, *m_image_dataspace);

		// catch failure caused by the DataSet operations
		} catch (DataSetIException& error) {
			THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg();
			error.printError();
		}
		// catch failure caused by the DataSpace operations
		catch (DataSpaceIException& error) {
			THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg();
		}
		// catch failure caused by any other HDF5 error
		catch (H5::Exception &e) {
			THROW_CTL_ERROR(Error) << e.getCDetailMsg();
		}
		// catch anything not hdf5 related
		catch (Exception &e) {
			THROW_CTL_ERROR(Error) << e.getErrMsg();
		}
	}
	DEB_RETURN();
}