static void test_null_filter() { // Output message about test being performed SUBTEST("'Null' filter"); try { //hsize_t null_size; // Size of dataset with null filter // Prepare dataset create property list DSetCreatPropList dsplist; dsplist.setChunk(2, chunk_size); if (H5Zregister (H5Z_BOGUS)<0) throw Exception("test_null_filter", "H5Zregister failed"); // Set some pretent filter dsplist.setFilter(H5Z_FILTER_BOGUS); // this function is just a stub right now; will work on it later - BMR //if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size)<0) // throw Exception("test_null_filter", "test_filter_internal failed"); // Close objects. dsplist.close(); PASSED(); } // end of try // catch all other exceptions catch (Exception E) { issue_fail_msg("test_null_filter()", __LINE__, __FILE__, E.getCDetailMsg()); } } // test_null_filter
/** * Sets up the chunking and compression rate. * @param length * @return The configured property list */ DSetCreatPropList getPropList(const std::size_t length) { DSetCreatPropList propList; hsize_t chunk_dims[1] = {length}; propList.setChunk(1, chunk_dims); propList.setDeflate(6); return propList; }
// * * * * * * * * * * * * * * * * * * * * * * * * * * void H5_C3PO_NS::createExtendibleDataset(std::string FILE_NAME,const char* datasetName_) { hsize_t dims[2] = { 0, 1}; // dataset dimensions at creation hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; DataSpace mspace1( RANK, dims, maxdims); H5File* file=new H5File( FILE_NAME.c_str(),H5F_ACC_RDWR ); IntType datatype( PredType::NATIVE_DOUBLE ); //Define datatype for the data datatype.setOrder( H5T_ORDER_LE ); DSetCreatPropList cparms; hsize_t chunk_dims[2] ={6, 1}; cparms.setChunk( RANK, chunk_dims ); //Set fill value for the dataset int fill_val = 1.0; cparms.setFillValue( PredType::NATIVE_DOUBLE, &fill_val); DataSet dataset = file->createDataSet( datasetName_, PredType::NATIVE_DOUBLE, mspace1, cparms); file->close(); delete file; }
void hdf5ExternalArrayTestCreate(CuTest *testCase) { for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) { hsize_t chunkSize = chunkSizes[chunkIdx]; setup(); try { IntType datatype(PredType::NATIVE_HSIZE); H5File file(H5std_string(fileName), H5F_ACC_TRUNC); Hdf5ExternalArray myArray; DSetCreatPropList cparms; if (chunkSize > 0) { cparms.setDeflate(2); cparms.setChunk(1, &chunkSize); } myArray.create(&file, datasetName, datatype, N, &cparms); for (hsize_t i = 0; i < N; ++i) { hsize_t *block = reinterpret_cast<hsize_t *>(myArray.getUpdate(i)); *block = i; } myArray.write(); file.flush(H5F_SCOPE_LOCAL); file.close(); checkNumbers(testCase); } catch (Exception &exception) { cerr << exception.getCDetailMsg() << endl; CuAssertTrue(testCase, 0); } catch (...) { CuAssertTrue(testCase, 0); } teardown(); } }
HDF5HandlerBase::HDF5HandlerBase(const std::string &fileName, const std::string &datasetName) : FILE_NAME(H5std_string(fileName)) , DATASETNAME(H5std_string(datasetName)) { try { Exception::dontPrint(); file = H5File(FILE_NAME, H5F_ACC_TRUNC); hsize_t dims[1] = {0}; hsize_t maxdims[1] = {H5S_UNLIMITED}; hsize_t chunk_dims[1] = {10000}; DataSpace dataspace = DataSpace(1,dims,maxdims); DSetCreatPropList prop; prop.setChunk(1, chunk_dims); dataset = file.createDataSet( DATASETNAME, PredType::STD_I32BE, dataspace, prop); prop.close(); dataspace.close(); } catch (Exception &error) { // Throw FileIException, DataSetIException, DataSpaceIException throw; } }
void HDF5Genome::setGenomeBottomDimensions( const vector<Sequence::UpdateInfo>& bottomDimensions) { hal_size_t numBottomSegments = 0; for (vector<Sequence::UpdateInfo>::const_iterator i = bottomDimensions.begin(); i != bottomDimensions.end(); ++i) { numBottomSegments += i->_numSegments; } H5::Exception::dontPrint(); try { DataSet d = _group.openDataSet(bottomArrayName); _group.unlink(bottomArrayName); } catch (H5::Exception){} hal_size_t numChildren = _alignment->getChildNames(_name).size(); // scale down the chunk size in order to keep chunks proportional to // the size of a bottom segment with two children. hsize_t chunk; _dcprops.getChunk(1, &chunk); double scale = numChildren < 10 ? 1. : 10. / numChildren; chunk *= scale; DSetCreatPropList botDC; botDC.copy(_dcprops); botDC.setChunk(1, &chunk); _bottomArray.create(&_group, bottomArrayName, HDF5BottomSegment::dataType(numChildren), numBottomSegments + 1, &botDC, _numChunksInArrayBuffer); _numChildrenInBottomArray = numChildren; _childCache.clear(); }
//Creates a dataset: an array of HDF5 CompoundType //If you want a dimension i to be unlimited, pass chunk_dims[i]=NCHUNK and max_dims[i]=0. If limited, pass max_dims[i]=N and chunk_dims[i]=N. ArfRecordingData* ArfFileBase::createCompoundDataSet(CompType type, String path, int dimension, int* max_dims, int* chunk_dims) { ScopedPointer<DataSet> data; DSetCreatPropList prop; hsize_t Hdims[3]; hsize_t Hmax_dims [3]; hsize_t Hchunk_dims[3]; for (int i=0; i < dimension; i++) { Hchunk_dims[i] = chunk_dims[i]; if (chunk_dims[i] > 0 && chunk_dims[i] != max_dims[i]) { Hmax_dims[i] = H5S_UNLIMITED; Hdims[i] = 0; } else { Hmax_dims[i] = max_dims[i]; Hdims[i] = max_dims[i]; } } DataSpace dSpace(dimension, Hdims, Hmax_dims); prop.setChunk(dimension, Hchunk_dims); data = new DataSet(file->createDataSet(path.toUTF8(),type,dSpace,prop)); return new ArfRecordingData(data.release()); }
void HDF5CLParser::applyToDCProps(DSetCreatPropList& dcprops) const { if (hasOption("chunk")) { hsize_t chunk = getOption<hsize_t>("chunk"); hsize_t deflate = getOption<hsize_t>("deflate"); dcprops.setChunk(1, &chunk); dcprops.setDeflate(deflate); } }
HDF5RecordingData* HDF5FileBase::createDataSet(DataTypes type, int dimension, int* size, int* chunking, String path) { ScopedPointer<DataSet> data; DSetCreatPropList prop; if (!opened) return nullptr; //Right now this classes don't support datasets with rank > 3. //If it's needed in the future we can extend them to be of generic rank if ((dimension > 3) || (dimension < 1)) return nullptr; DataType H5type = getH5Type(type); hsize_t dims[3], chunk_dims[3], max_dims[3]; for (int i=0; i < dimension; i++) { dims[i] = size[i]; if (chunking[i] > 0) { chunk_dims[i] = chunking[i]; max_dims[i] = H5S_UNLIMITED; } else { chunk_dims[i] = size[i]; max_dims[i] = size[i]; } } try { DataSpace dSpace(dimension,dims,max_dims); prop.setChunk(dimension,chunk_dims); data = new DataSet(file->createDataSet(path.toUTF8(),H5type,dSpace,prop)); return new HDF5RecordingData(data.release()); } catch (DataSetIException error) { error.printError(); return nullptr; } catch (FileIException error) { error.printError(); return nullptr; } catch (DataSpaceIException error) { error.printError(); return nullptr; } }
void hdf5DNATypeTest(CuTest *testCase) { for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) { hsize_t chunkSize = chunkSizes[chunkIdx]; setup(); try { PredType datatype = HDF5DNA::dataType(); H5File file(H5std_string(fileName), H5F_ACC_TRUNC); HDF5ExternalArray myArray; DSetCreatPropList cparms; if (chunkSize > 0) { cparms.setChunk(1, &chunkSize); } hsize_t NEVEN = N % 2 ? N + 1 : N; myArray.create(&file, datasetName, datatype, NEVEN / 2, &cparms); for (hsize_t i = 0; i < NEVEN / 2; ++i) { unsigned char value = 0U; HDF5DNA::pack(idxToDNA(i * 2), i * 2, value); HDF5DNA::pack(idxToDNA((i * 2) + 1), (i * 2) + 1, value); myArray.setValue(i, 0, value); } myArray.write(); file.flush(H5F_SCOPE_LOCAL); file.close(); H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY); HDF5ExternalArray readArray; readArray.load(&rfile, datasetName); for (hsize_t i = 0; i < NEVEN / 2; ++i) { unsigned char value = readArray.getValue<unsigned char>(i, 0); char v1 = HDF5DNA::unpack(0, value); char v2 = HDF5DNA::unpack(1, value); CuAssertTrue(testCase, v1 == idxToDNA(i * 2)); CuAssertTrue(testCase, v2 == idxToDNA((i * 2) + 1)); } } catch(Exception& exception) { cerr << exception.getCDetailMsg() << endl; CuAssertTrue(testCase, 0); } catch(...) { CuAssertTrue(testCase, 0); } teardown(); } }
void write_hdf5_image(H5File h5f, const char *name, const Mat &im) { DSetCreatPropList cparms; hsize_t chunk_dims[2] = {256, 256}; hsize_t dims[2]; dims[0] = im.size().height; dims[1] = im.size().width; if (chunk_dims[0] > dims[0]) { chunk_dims[0] = dims[0]; } if (chunk_dims[1] > dims[1]) { chunk_dims[1] = dims[1]; } cparms.setChunk(2, chunk_dims); cparms.setShuffle(); cparms.setDeflate(5); DataSet dataset = h5f.createDataSet(name, PredType::NATIVE_FLOAT, DataSpace(2, dims, dims), cparms); Mat image; if (im.type() != CV_32F) im.convertTo(image, CV_32F); else image = im; DataSpace imspace; float *imdata; if (image.isContinuous()) { imspace = dataset.getSpace(); // same size as imspace.selectAll(); imdata = image.ptr<float>(); } else { // we are working with an ROI assert (image.isSubmatrix()); Size parent_size; Point parent_ofs; image.locateROI(parent_size, parent_ofs); hsize_t parent_count[2]; parent_count[0] = parent_size.height; parent_count[1] = parent_size.width; imspace.setExtentSimple(2, parent_count); hsize_t im_offset[2], im_size[2]; im_offset[0] = parent_ofs.y; im_offset[1] = parent_ofs.x; im_size[0] = image.size().height; im_size[1] = image.size().width; imspace.selectHyperslab(H5S_SELECT_SET, im_size, im_offset); imdata = image.ptr<float>() - parent_ofs.x - parent_ofs.y * parent_size.width; } dataset.write(imdata, PredType::NATIVE_FLOAT, imspace); }
static DataSet create_dataset(H5File h5f, const char *name) { DSetCreatPropList cparms; hsize_t chunk_dims[2] = {256, 256}; hsize_t dims[2]; cparms.setChunk(2, chunk_dims); cparms.setShuffle(); cparms.setDeflate(5); dims[0] = imsize.height; dims[1] = imsize.width; return h5f.createDataSet(name, PredType::NATIVE_FLOAT, DataSpace(2, dims, dims), cparms); }
// When the column header is complete, create a table with // appropriately typed columns and prepare to write data to it. void end_column (void* state) { program_state_t* s = (program_state_t*)state; // Create a global dataspace. s->current_dims = 0; hsize_t max_dims = H5S_UNLIMITED; DataSpace global_dataspace(1, &s->current_dims, &max_dims); // Define an HDF5 datatype based on the Byfl column header. construct_hdf5_datatype(s); // Create a dataset. Enable chunking (required because of the // H5S_UNLIMITED dimension) and deflate compression (optional). DSetCreatPropList proplist; proplist.setChunk(1, &chunk_size); proplist.setDeflate(9); // Maximal compression s->dataset = s->hdf5file.createDataSet(s->table_name, s->datatype, global_dataspace, proplist); }
void HDF5Genome::setDimensions( const vector<Sequence::Info>& sequenceDimensions, bool storeDNAArrays) { _totalSequenceLength = 0; hal_size_t totalSeq = sequenceDimensions.size(); hal_size_t maxName = 0; // Copy segment dimensions to use the external interface vector<Sequence::UpdateInfo> topDimensions; topDimensions.reserve(sequenceDimensions.size()); vector<Sequence::UpdateInfo> bottomDimensions; bottomDimensions.reserve(sequenceDimensions.size()); // Compute summary info from the list of sequence Dimensions for (vector<Sequence::Info>::const_iterator i = sequenceDimensions.begin(); i != sequenceDimensions.end(); ++i) { _totalSequenceLength += i->_length; maxName = max(static_cast<hal_size_t>(i->_name.length()), maxName); topDimensions.push_back( Sequence::UpdateInfo(i->_name, i->_numTopSegments)); bottomDimensions.push_back( Sequence::UpdateInfo(i->_name, i->_numBottomSegments)); } // Unlink the DNA and segment arrays if they exist (using // exceptions is the only way I know how right now). Note that // the file needs to be refactored to take advantage of the new // space. H5::Exception::dontPrint(); try { DataSet d = _group.openDataSet(dnaArrayName); _group.unlink(dnaArrayName); } catch (H5::Exception){} try { DataSet d = _group.openDataSet(sequenceIdxArrayName); _group.unlink(sequenceIdxArrayName); } catch (H5::Exception){} try { DataSet d = _group.openDataSet(sequenceNameArrayName); _group.unlink(sequenceNameArrayName); } catch (H5::Exception){} if (_totalSequenceLength > 0 && storeDNAArrays == true) { hal_size_t arrayLength = _totalSequenceLength / 2; if (_totalSequenceLength % 2) { ++arrayLength; _rup->set(rupGroupName, "1"); } else { _rup->set(rupGroupName, "0"); } hsize_t chunk; _dcprops.getChunk(1, &chunk); // enalarge chunk size because dna bases are so much smaller // than segments. (about 30x). we default to 10x enlargement // since the seem to compress about 3x worse. chunk *= dnaChunkScale; DSetCreatPropList dnaDC; dnaDC.copy(_dcprops); dnaDC.setChunk(1, &chunk); _dnaArray.create(&_group, dnaArrayName, HDF5DNA::dataType(), arrayLength, &dnaDC, _numChunksInArrayBuffer); } if (totalSeq > 0) { _sequenceIdxArray.create(&_group, sequenceIdxArrayName, HDF5Sequence::idxDataType(), totalSeq + 1, &_dcprops, _numChunksInArrayBuffer); _sequenceNameArray.create(&_group, sequenceNameArrayName, HDF5Sequence::nameDataType(maxName + 1), totalSeq, &_dcprops, _numChunksInArrayBuffer); writeSequences(sequenceDimensions); } // Do the same as above for the segments. setGenomeTopDimensions(topDimensions); setGenomeBottomDimensions(bottomDimensions); _parentCache = NULL; _childCache.clear(); }
void hdf5SequenceTypeTest(CuTest *testCase) { for (hsize_t lengthIdx = 0; lengthIdx < numLengths; ++lengthIdx) { hsize_t length = maxNameLength[lengthIdx]; for (hsize_t chunkIdx = 0; chunkIdx < numSizes; ++chunkIdx) { hsize_t chunkSize = chunkSizes[chunkIdx]; setup(); try { CompType datatype = HDF5Sequence::dataType(length); H5File file(H5std_string(fileName), H5F_ACC_TRUNC); HDF5ExternalArray myArray; DSetCreatPropList cparms; if (chunkSize > 0) { cparms.setChunk(1, &chunkSize); } myArray.create(&file, datasetName, datatype, N, &cparms); hal_size_t totalTopSegments = 0; hal_size_t totalBottomSegments = 0; for (hsize_t i = 0; i < N; ++i) { HDF5Sequence sequence(NULL, &myArray, i); Sequence::Info seqInfo(genName(i, length), i * 2, i * 3, i * 4); sequence.set(i, seqInfo, totalTopSegments, totalBottomSegments); totalTopSegments += seqInfo._numTopSegments; totalBottomSegments += seqInfo._numBottomSegments; } myArray.write(); file.flush(H5F_SCOPE_LOCAL); file.close(); H5File rfile(H5std_string(fileName), H5F_ACC_RDONLY); HDF5ExternalArray readArray; readArray.load(&rfile, datasetName); for (hsize_t i = 0; i < N; ++i) { HDF5Sequence sequence(NULL, &readArray, i); CuAssertTrue(testCase, sequence.getName() == genName(i, length)); CuAssertTrue(testCase, sequence.getStartPosition() == i); CuAssertTrue(testCase, sequence.getSequenceLength() == i * 2); CuAssertTrue(testCase, sequence.getNumTopSegments() == i * 3); CuAssertTrue(testCase, sequence.getNumBottomSegments() == i * 4); } } catch(Exception& exception) { cerr << exception.getCDetailMsg() << endl; CuAssertTrue(testCase, 0); } catch(...) { CuAssertTrue(testCase, 0); } teardown(); } } }
void SaveContainerHdf5::_writeFile(Data &aData, CtSaving::HeaderMap &aHeader, CtSaving::FileFormat aFormat) { DEB_MEMBER_FUNCT(); if (aFormat == CtSaving::HDF5) { // get the proper data type PredType data_type(PredType::NATIVE_UINT8); switch (aData.type) { case Data::UINT8: break; case Data::INT8: data_type = PredType::NATIVE_INT8; break; case Data::UINT16: data_type = PredType::NATIVE_UINT16; break; case Data::INT16: data_type = PredType::NATIVE_INT16; break; case Data::UINT32: data_type = PredType::NATIVE_UINT32; break; case Data::INT32: data_type = PredType::NATIVE_INT32; break; case Data::UINT64: data_type = PredType::NATIVE_UINT64; break; case Data::INT64: data_type = PredType::NATIVE_INT64; break; case Data::FLOAT: data_type = PredType::NATIVE_FLOAT; break; case Data::DOUBLE: data_type = PredType::NATIVE_DOUBLE; break; case Data::UNDEF: default: THROW_CTL_ERROR(Error) << "Invalid image type"; } try { if (!m_format_written) { // ISO 8601 Time format time_t now; time(&now); char buf[sizeof("2011-10-08T07:07:09Z")]; strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now)); string stime = string(buf); write_h5_dataset(*m_entry,"start_time",stime); // write header only once into "parameters" group // but we should write some keys into measurement, like motor_pos counter_pos (spec)??? if (!aHeader.empty()) { for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) { string key = it->first; string value = it->second; write_h5_dataset(*m_measurement_detector_parameters,key.c_str(),value); } } delete m_measurement_detector_parameters;m_measurement_detector_parameters = NULL; // create the image data structure in the file hsize_t data_dims[3], max_dims[3]; data_dims[1] = aData.dimensions[1]; data_dims[2] = aData.dimensions[0]; data_dims[0] = m_nbframes; max_dims[1] = aData.dimensions[1]; max_dims[2] = aData.dimensions[0]; max_dims[0] = H5S_UNLIMITED; // Create property list for the dataset and setup chunk size DSetCreatPropList plist; hsize_t chunk_dims[3]; // calculate a optimized chunking calculate_chunck(data_dims, chunk_dims, aData.depth()); plist.setChunk(RANK_THREE, chunk_dims); m_image_dataspace = new DataSpace(RANK_THREE, data_dims, max_dims); // create new dspace m_image_dataset = new DataSet(m_measurement_detector->createDataSet("data", data_type, *m_image_dataspace, plist)); string nxdata = "NXdata"; write_h5_attribute(*m_image_dataset, "NX_class", nxdata); string image = "image"; write_h5_attribute(*m_image_dataset, "interpretation", image); m_prev_images_written = 0; m_format_written = true; } else if (m_in_append && !m_is_multiset && !m_dataset_extended) { hsize_t allocated_dims[3]; m_image_dataset = new DataSet(m_measurement_detector->openDataSet("data")); m_image_dataspace = new DataSpace(m_image_dataset->getSpace()); m_image_dataspace->getSimpleExtentDims(allocated_dims); hsize_t data_dims[3]; data_dims[1] = aData.dimensions[1]; data_dims[2] = aData.dimensions[0]; data_dims[0] = allocated_dims[0] + m_nbframes; if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) { THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions"; } m_image_dataset->extend(data_dims); m_image_dataspace->close(); delete m_image_dataset; m_image_dataspace = new DataSpace(m_image_dataset->getSpace()); m_prev_images_written = allocated_dims[0]; m_dataset_extended = true; } // write the image data hsize_t slab_dim[3]; slab_dim[2] = aData.dimensions[0]; slab_dim[1] = aData.dimensions[1]; slab_dim[0] = 1; DataSpace slabspace = DataSpace(RANK_THREE, slab_dim); int image_nb = aData.frameNumber % m_nbframes; hsize_t start[] = { m_prev_images_written + image_nb, 0, 0 }; hsize_t count[] = { 1, aData.dimensions[1], aData.dimensions[0] }; m_image_dataspace->selectHyperslab(H5S_SELECT_SET, count, start); m_image_dataset->write((u_int8_t*) aData.data(), data_type, slabspace, *m_image_dataspace); // catch failure caused by the DataSet operations } catch (DataSetIException& error) { THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg(); error.printError(); } // catch failure caused by the DataSpace operations catch (DataSpaceIException& error) { THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg(); } // catch failure caused by any other HDF5 error catch (H5::Exception &e) { THROW_CTL_ERROR(Error) << e.getCDetailMsg(); } // catch anything not hdf5 related catch (Exception &e) { THROW_CTL_ERROR(Error) << e.getErrMsg(); } } DEB_RETURN(); }
int main(int argc, char **argv) { // Try block to detect exceptions raised by any of the calls inside it try { // Turn off the auto-printing when failure occurs so that we can // handle the errors appropriately H5std_string FILE_NAME(argv[1]); Exception::dontPrint(); // Open the file and the dataset in the file. H5File file(FILE_NAME, H5F_ACC_RDONLY); DataSet dataset; H5std_string dataset_name; auto objCount(H5Fget_obj_count(file.getId(), H5F_OBJ_ALL)); for (size_t i = 0; i != objCount; ++i) if (H5G_DATASET == file.getObjTypeByIdx(i)) { dataset_name = file.getObjnameByIdx(i); dataset = file.openDataSet(dataset_name); } auto datatype(dataset.getDataType()); auto dataspace(dataset.getSpace()); hsize_t dims_in[2]; auto ndims(dataspace.getSimpleExtentDims(dims_in, NULL)); hsize_t dims_out[2] = { DIM0, DIM1 }; // dataset dimensions double *buf = new double[dims_in[0] * dims_in[1]]; // Read data. dataset.read(buf, PredType::NATIVE_DOUBLE);//, memspace, dataspace); H5std_string outFileName("out.h5"); // Create a new file using the default property lists. H5File outfile(outFileName, H5F_ACC_TRUNC); // Create the data space for the dataset. DataSpace *output_dataspace = new DataSpace(ndims, dims_out); hsize_t chunk_dims[2] = { 20, 20 }; // chunk dimensions // Modify dataset creation property to enable chunking DSetCreatPropList *plist = new DSetCreatPropList; plist->setChunk(2, chunk_dims); // Set ZLIB (DEFLATE) Compression using level 9. plist->setDeflate(9); // Create the attributes. const size_t numAttrs = file.getNumAttrs(); for (size_t i = 0; i != numAttrs; ++i) { auto attr(file.openAttribute(i)); auto output_attr(outfile.createAttribute(attr.getName(), attr.getDataType(), attr.getSpace())); switch (attr.getTypeClass()) { case H5T_FLOAT: { double buf; attr.read(attr.getDataType(), &buf); output_attr.write(attr.getDataType(), &buf); } break; case H5T_STRING: { char *buf = new char[(unsigned long)attr.getStorageSize()]; attr.read(attr.getDataType(), buf); output_attr.write(attr.getDataType(), buf); delete buf; } break; default: break; } } // Create the dataset. DataSet *output_dataset = new DataSet(outfile.createDataSet(dataset_name, datatype, *output_dataspace, *plist)); // Write data to dataset. output_dataset->write(buf, datatype); // Close objects and file. Either approach will close the HDF5 item. delete output_dataspace; delete output_dataset; delete plist; file.close(); } // end of try block // catch failure caused by the H5File operations catch(FileIException &error) { error.printError(); return -1; } // catch failure caused by the DataSet operations catch(DataSetIException &error) { error.printError(); return -1; } // catch failure caused by the DataSpace operations catch(DataSpaceIException &error) { error.printError(); return -1; } // catch failure caused by the Attribute operations catch (AttributeIException &error) { error.printError(); return -1; } catch (std::exception &error) { std::cerr << error.what() << std::endl; return -1; } return 0; // successfully terminated }
int main (void) { hsize_t dims[2] = { DIM0, DIM1 }; // dataset dimensions hsize_t chunk_dims[2] = { 20, 20 }; // chunk dimensions int i,j, buf[DIM0][DIM1]; // Try block to detect exceptions raised by any of the calls inside it try { // Turn off the auto-printing when failure occurs so that we can // handle the errors appropriately Exception::dontPrint(); // Create a new file using the default property lists. H5File file(FILE_NAME, H5F_ACC_TRUNC); // Create the data space for the dataset. DataSpace *dataspace = new DataSpace(2, dims); // Modify dataset creation property to enable chunking DSetCreatPropList *plist = new DSetCreatPropList; plist->setChunk(2, chunk_dims); // Set ZLIB (DEFLATE) Compression using level 6. // To use SZIP compression comment out this line. plist->setDeflate(6); // Uncomment these lines to set SZIP Compression // unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK; // unsigned szip_pixels_per_block = 16; // plist->setSzip(szip_options_mask, szip_pixels_per_block); // Create the dataset. DataSet *dataset = new DataSet(file.createDataSet( DATASET_NAME, PredType::STD_I32BE, *dataspace, *plist) ); for (i = 0; i< DIM0; i++) for (j=0; j<DIM1; j++) buf[i][j] = i+j; // Write data to dataset. dataset->write(buf, PredType::NATIVE_INT); // Close objects and file. Either approach will close the HDF5 item. delete dataspace; delete dataset; delete plist; file.close(); // ----------------------------------------------- // Re-open the file and dataset, retrieve filter // information for dataset and read the data back. // ----------------------------------------------- int rbuf[DIM0][DIM1]; int numfilt; size_t nelmts={1}, namelen={1}; unsigned flags, filter_info, cd_values[1], idx; char name[1]; H5Z_filter_t filter_type; // Open the file and the dataset in the file. file.openFile(FILE_NAME, H5F_ACC_RDONLY); dataset = new DataSet(file.openDataSet( DATASET_NAME)); // Get the create property list of the dataset. plist = new DSetCreatPropList(dataset->getCreatePlist ()); // Get the number of filters associated with the dataset. numfilt = plist->getNfilters(); cout << "Number of filters associated with dataset: " << numfilt << endl; for (idx=0; idx < numfilt; idx++) { nelmts = 0; filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name , filter_info); cout << "Filter Type: "; switch (filter_type) { case H5Z_FILTER_DEFLATE: cout << "H5Z_FILTER_DEFLATE" << endl; break; case H5Z_FILTER_SZIP: cout << "H5Z_FILTER_SZIP" << endl; break; default: cout << "Other filter type included." << endl; } } // Read data. dataset->read(rbuf, PredType::NATIVE_INT); delete plist; delete dataset; file.close(); // can be skipped } // end of try block // catch failure caused by the H5File operations catch(FileIException error) { error.printError(); return -1; } // catch failure caused by the DataSet operations catch(DataSetIException error) { error.printError(); return -1; } // catch failure caused by the DataSpace operations catch(DataSpaceIException error) { error.printError(); return -1; } return 0; // successfully terminated }
/* * Create data spaces and data sets of the hdf5 for recording histories. */ void Hdf5Recorder::initDataSet() { // create the data space & dataset for burstiness history hsize_t dims[2]; dims[0] = static_cast<hsize_t>(m_sim_info->epochDuration * m_sim_info->maxSteps); DataSpace dsBurstHist(1, dims); dataSetBurstHist = new DataSet(stateOut->createDataSet(nameBurstHist, PredType::NATIVE_INT, dsBurstHist)); // create the data space & dataset for spikes history dims[0] = static_cast<hsize_t>(m_sim_info->epochDuration * m_sim_info->maxSteps * 100); DataSpace dsSpikesHist(1, dims); dataSetSpikesHist = new DataSet(stateOut->createDataSet(nameSpikesHist, PredType::NATIVE_INT, dsSpikesHist)); // create the data space & dataset for xloc & ylo c dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons); DataSpace dsXYloc(1, dims); dataSetXloc = new DataSet(stateOut->createDataSet(nameXloc, PredType::NATIVE_INT, dsXYloc)); dataSetYloc = new DataSet(stateOut->createDataSet(nameYloc, PredType::NATIVE_INT, dsXYloc)); // create the data space & dataset for neuron types dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons); DataSpace dsNeuronTypes(1, dims); dataSetNeuronTypes = new DataSet(stateOut->createDataSet(nameNeuronTypes, PredType::NATIVE_INT, dsNeuronTypes)); // create the data space & dataset for neuron threashold dims[0] = static_cast<hsize_t>(m_sim_info->totalNeurons); DataSpace dsNeuronThresh(1, dims); dataSetNeuronThresh = new DataSet(stateOut->createDataSet(nameNeuronThresh, H5_FLOAT, dsNeuronThresh)); // create the data space & dataset for simulation step duration dims[0] = static_cast<hsize_t>(1); DataSpace dsTsim(1, dims); dataSetTsim = new DataSet(stateOut->createDataSet(nameTsim, H5_FLOAT, dsTsim)); // create the data space & dataset for simulation end time dims[0] = static_cast<hsize_t>(1); DataSpace dsSimulationEndTime(1, dims); dataSetSimulationEndTime = new DataSet(stateOut->createDataSet(nameSimulationEndTime, H5_FLOAT, dsSimulationEndTime)); // probed neurons if (m_model->getLayout()->m_probed_neuron_list.size() > 0) { // create the data space & dataset for probed neurons dims[0] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size()); DataSpace dsProbedNeurons(1, dims); dataSetProbedNeurons = new DataSet(stateOut->createDataSet(nameProbedNeurons, PredType::NATIVE_INT, dsProbedNeurons)); // create the data space & dataset for spikes of probed neurons // the data space with unlimited dimensions hsize_t maxdims[2]; maxdims[0] = H5S_UNLIMITED; maxdims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size()); // dataset dimensions at creation dims[0] = static_cast<hsize_t>(1); dims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size()); DataSpace dsSpikesProbedNeurons(2, dims, maxdims); // set fill value for the dataset DSetCreatPropList cparms; uint64_t fill_val = 0; cparms.setFillValue( PredType::NATIVE_UINT64, &fill_val); // modify dataset creation properties, enable chunking hsize_t chunk_dims[2]; chunk_dims[0] = static_cast<hsize_t>(100); chunk_dims[1] = static_cast<hsize_t>(m_model->getLayout()->m_probed_neuron_list.size()); cparms.setChunk( 2, chunk_dims ); dataSetSpikesProbedNeurons = new DataSet(stateOut->createDataSet(nameSpikesProbedNeurons, PredType::NATIVE_UINT64, dsSpikesProbedNeurons, cparms)); } // allocate data memories burstinessHist = new int[static_cast<int>(m_sim_info->epochDuration)]; spikesHistory = new int[static_cast<int>(m_sim_info->epochDuration * 100)]; memset(burstinessHist, 0, static_cast<int>(m_sim_info->epochDuration * sizeof(int))); memset(spikesHistory, 0, static_cast<int>(m_sim_info->epochDuration * 100 * sizeof(int))); // create the data space & dataset for spikes history of probed neurons if (m_model->getLayout()->m_probed_neuron_list.size() > 0) { // allocate data for spikesProbedNeurons spikesProbedNeurons = new vector<uint64_t>[m_model->getLayout()->m_probed_neuron_list.size()]; // allocate memory to save offset offsetSpikesProbedNeurons = new hsize_t[m_model->getLayout()->m_probed_neuron_list.size()]; memset(offsetSpikesProbedNeurons, 0, static_cast<int>(m_model->getLayout()->m_probed_neuron_list.size() * sizeof(hsize_t))); } }
void SavingCtrlObj::HwSavingStream::prepare() { DEB_MEMBER_FUNCT(); DEB_ALWAYS() << "Entering SavingCtrlObj prepare stream " << m_streamNb; std::string filename; if (m_suffix != ".hdf") THROW_HW_ERROR(lima::Error) << "Suffix must be .hdf"; try { // Turn off the auto-printing when failure occurs so that we can // handle the errors appropriately H5::Exception::dontPrint(); // Get the fully qualified filename char number[16]; snprintf(number, sizeof(number), m_index_format.c_str(), m_next_number); filename = m_directory + DIR_SEPARATOR + m_prefix + number + m_suffix; DEB_TRACE() << "Opening filename " << filename << " with overwritePolicy " << m_overwritePolicy; if (m_overwritePolicy == "Overwrite") { // overwrite existing file m_file = new H5File(filename, H5F_ACC_TRUNC); } else if (m_overwritePolicy == "Abort") { // fail if file already exists m_file = new H5File(filename, H5F_ACC_EXCL); } else { THROW_CTL_ERROR(Error) << "Append and multiset not supported !"; } m_entry = new Group(m_file->createGroup("/entry")); string nxentry = "NXentry"; write_h5_attribute(*m_entry, "NX_class", nxentry); string title = "Lima Hexitec detector"; write_h5_dataset(*m_entry, "title", title); Size size; m_cam.getDetectorImageSize(size); m_nrasters = size.getHeight(); m_npixels = size.getWidth(); m_nframes = m_frames_per_file; { // ISO 8601 Time format time_t now; time(&now); char buf[sizeof("2011-10-08T07:07:09Z")]; strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now)); string stime = string(buf); write_h5_dataset(*m_entry, "start_time", stime); } Group instrument = Group(m_entry->createGroup("Instrument")); string nxinstrument = "NXinstrument"; write_h5_attribute(instrument, "NX_class", nxinstrument); m_instrument_detector = new Group(instrument.createGroup("Hexitec")); string nxdetector = "NXdetector"; write_h5_attribute(*m_instrument_detector, "NX_class", nxdetector); Group measurement = Group(m_entry->createGroup("measurement")); string nxcollection = "NXcollection"; write_h5_attribute(measurement, "NX_class", nxcollection); m_measurement_detector = new Group(measurement.createGroup("Hexitec")); write_h5_attribute(*m_measurement_detector, "NX_class", nxdetector); Group det_info = Group(m_instrument_detector->createGroup("detector_information")); Group det_params = Group(det_info.createGroup("parameters")); double rate; m_cam.getFrameRate(rate); write_h5_dataset(det_params, "frame rate", rate); Group env_info = Group(det_info.createGroup("environment")); Camera::Environment env; m_cam.getEnvironmentalValues(env); write_h5_dataset(env_info, "humidity", env.humidity); write_h5_dataset(env_info, "ambientTemperature", env.ambientTemperature); write_h5_dataset(env_info, "asicTemperature", env.asicTemperature); write_h5_dataset(env_info, "adcTemperature", env.adcTemperature); write_h5_dataset(env_info, "ntcTemperature", env.ntcTemperature); Group oper_info = Group(det_info.createGroup("operating_values")); Camera::OperatingValues opvals; m_cam.getOperatingValues(opvals); write_h5_dataset(oper_info, "v3_3 ", opvals.v3_3); write_h5_dataset(oper_info, "hvMon", opvals.hvMon); write_h5_dataset(oper_info, "hvOut", opvals.hvOut); write_h5_dataset(oper_info, "v1_2", opvals.v1_2); write_h5_dataset(oper_info, "v1_8", opvals.v1_8); write_h5_dataset(oper_info, "v3", opvals.v3); write_h5_dataset(oper_info, "v2_5", opvals.v2_5); write_h5_dataset(oper_info, "v3_3ln", opvals.v3_3ln); write_h5_dataset(oper_info, "v1_65ln", opvals.v1_65ln); write_h5_dataset(oper_info, "v1_8ana", opvals.v1_8ana); write_h5_dataset(oper_info, "v3_8ana", opvals.v3_8ana); write_h5_dataset(oper_info, "peltierCurrent", opvals.peltierCurrent); write_h5_dataset(oper_info, "ntcTemperature", opvals.ntcTemperature); Group process_info = Group(det_info.createGroup("processing_values")); int value; m_cam.getLowThreshold(value); write_h5_dataset(process_info, "LowThreshold ", value); m_cam.getHighThreshold(value); write_h5_dataset(process_info, "HighThreshold", value); int binWidth; m_cam.getBinWidth(binWidth); write_h5_dataset(process_info, "BinWidth", binWidth); int specLen; m_cam.getSpecLen(specLen); write_h5_dataset(process_info, "SpecLen", specLen); int nbins = (specLen / binWidth); int saveOpt; m_cam.getSaveOpt(saveOpt); // StreamNb == 3 is a bit of a kludge for now!! if (saveOpt & Camera::SaveSummed && m_streamNb == 3) { DEB_TRACE() << "create the spectrum data structure in the file"; // create the image data structure in the file hsize_t data_dims[2], max_dims[2]; data_dims[1] = nbins; data_dims[0] = m_nframes; m_image_dataspace = new DataSpace(RANK_TWO, data_dims); // create new dspace m_image_dataset = new DataSet( m_measurement_detector->createDataSet("spectrum", PredType::NATIVE_UINT64, *m_image_dataspace)); } else { DEB_TRACE() << "create the image data structure in the file"; // create the image data structure in the file hsize_t data_dims[3], max_dims[3]; data_dims[1] = m_nrasters; data_dims[2] = m_npixels; data_dims[0] = m_nframes; max_dims[1] = m_nrasters; max_dims[2] = m_npixels; max_dims[0] = H5S_UNLIMITED; // Create property list for the dataset and setup chunk size DSetCreatPropList plist; hsize_t chunk_dims[3]; // calculate a optimized chunking calculate_chunck(data_dims, chunk_dims, 2); plist.setChunk(RANK_THREE, chunk_dims); m_image_dataspace = new DataSpace(RANK_THREE, data_dims, max_dims); // create new dspace if (saveOpt & Camera::SaveHistogram && m_streamNb == 2) { m_image_dataset = new DataSet( m_measurement_detector->createDataSet("raw_image", PredType::NATIVE_UINT32, *m_image_dataspace, plist)); } else { m_image_dataset = new DataSet( m_measurement_detector->createDataSet("raw_image", PredType::NATIVE_UINT16, *m_image_dataspace, plist)); } } } catch (FileIException &error) { THROW_CTL_ERROR(Error) << "File " << filename << " not opened successfully"; } // catch failure caused by the DataSet operations catch (DataSetIException& error) { THROW_CTL_ERROR(Error) << "DataSet " << filename << " not created successfully"; error.printError(); } // catch failure caused by the DataSpace operations catch (DataSpaceIException& error) { THROW_CTL_ERROR(Error) << "DataSpace " << filename << " not created successfully"; } // catch failure caused by any other HDF5 error catch (H5::Exception &e) { THROW_CTL_ERROR(Error) << e.getCDetailMsg(); } // catch anything not hdf5 related catch (Exception &e) { THROW_CTL_ERROR(Error) << e.getErrMsg(); } }
void test_szip_filter(H5File& file1) { #ifdef H5_HAVE_FILTER_SZIP int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2]; unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK; unsigned szip_pixels_per_block=4; // Output message about test being performed SUBTEST("szip filter (with encoder)"); if ( h5_szip_can_encode() == 1) { char* tconv_buf = new char [1000]; try { const hsize_t size[2] = {DSET_DIM1, DSET_DIM2}; // Create the data space DataSpace space1(2, size, NULL); // Create a small conversion buffer to test strip mining (?) DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); // Prepare dataset create property list DSetCreatPropList dsplist; dsplist.setChunk(2, chunk_size); // Set up for szip compression dsplist.setSzip(szip_options_mask, szip_pixels_per_block); // Create a dataset with szip compression DataSpace space2 (2, size, NULL); DataSet dataset(file1.createDataSet (DSET_SZIP_NAME, PredType::NATIVE_INT, space2, dsplist)); hsize_t i, j, n; for (i=n=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { points[i][j] = (int)n++; } } // Write to the dataset then read back the values dataset.write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); dataset.read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_szip_filter", "Failed in testing szip method"); } dsplist.close(); PASSED(); } // end of try // catch all other exceptions catch (Exception E) { issue_fail_msg("test_szip_filter()", __LINE__, __FILE__, E.getCDetailMsg()); } delete[] tconv_buf; } // if szip presents else { SKIPPED(); } #else /* H5_HAVE_FILTER_SZIP */ SUBTEST("szip filter"); SKIPPED(); puts(" Szip filter not enabled"); #endif /* H5_HAVE_FILTER_SZIP */ } // test_szip_filter
/*------------------------------------------------------------------------- * Function: test_multiopen * * Purpose: Tests that a bug no longer exists. If a dataset is opened * twice and one of the handles is used to extend the dataset, * then the other handle should return the new size when * queried. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Saturday, February 17, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_multiopen (H5File& file) { SUBTEST("Multi-open with extending"); DataSpace* space = NULL; try { // Create a dataset creation property list DSetCreatPropList dcpl; // Set chunk size to given size hsize_t cur_size[1] = {10}; dcpl.setChunk (1, cur_size); // Create a simple data space with unlimited size static hsize_t max_size[1] = {H5S_UNLIMITED}; space = new DataSpace (1, cur_size, max_size); // Create first dataset DataSet dset1 = file.createDataSet ("multiopen", PredType::NATIVE_INT, *space, dcpl); // Open again the first dataset from the file to another DataSet object. DataSet dset2 = file.openDataSet ("multiopen"); // Relieve the dataspace delete space; space = NULL; // Extend the dimensionality of the first dataset cur_size[0] = 20; dset1.extend (cur_size); // Get the size from the second handle space = new DataSpace (dset2.getSpace()); hsize_t tmp_size[1]; space->getSimpleExtentDims (tmp_size); if (cur_size[0]!=tmp_size[0]) { cerr << " Got " << (int)tmp_size[0] << " instead of " << (int)cur_size[0] << "!" << endl; throw Exception("test_multiopen", "Failed in multi-open with extending"); } // clean up and return with success delete space; PASSED(); return 0; } // end try block // catch all dataset, file, space, and plist exceptions catch (Exception E) { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; // clean up and return with failure if (space != NULL) delete space; return -1; } } // test_multiopen
/*------------------------------------------------------------------------- * Function: test_compression * * Purpose: Tests dataset compression. If compression is requested when * it hasn't been compiled into the library (such as when * updating an existing compressed dataset) then data is sent to * the file uncompressed but no errors are returned. * * Return: Success: 0 * * Failure: -1 * * Programmer: Binh-Minh Ribler (using C version) * Friday, January 5, 2001 * * Modifications: * *------------------------------------------------------------------------- */ static herr_t test_compression(H5File& file) { #ifndef H5_HAVE_FILTER_DEFLATE const char *not_supported; not_supported = " Deflate compression is not enabled."; #endif /* H5_HAVE_FILTER_DEFLATE */ int points[100][200]; int check[100][200]; hsize_t i, j, n; // Initialize the dataset for (i = n = 0; i < 100; i++) { for (j = 0; j < 200; j++) { points[i][j] = (int)n++; } } char* tconv_buf = new char [1000]; DataSet* dataset = NULL; try { const hsize_t size[2] = {100, 200}; // Create the data space DataSpace space1(2, size, NULL); // Create a small conversion buffer to test strip mining DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); // Use chunked storage with compression DSetCreatPropList dscreatplist; const hsize_t chunk_size[2] = {2, 25}; dscreatplist.setChunk (2, chunk_size); dscreatplist.setDeflate (6); #ifdef H5_HAVE_FILTER_DEFLATE SUBTEST("Compression (setup)"); // Create the dataset dataset = new DataSet (file.createDataSet (DSET_COMPRESS_NAME, PredType::NATIVE_INT, space1, dscreatplist)); PASSED(); /*---------------------------------------------------------------------- * STEP 1: Read uninitialized data. It should be zero. *---------------------------------------------------------------------- */ SUBTEST("Compression (uninitialized read)"); dataset->read ((void*) check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); for (i=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { if (0!=check[i][j]) { H5_FAILED(); cerr << " Read a non-zero value." << endl; cerr << " At index " << (unsigned long)i << "," << (unsigned long)j << endl; throw Exception("test_compression", "Failed in uninitialized read"); } } } PASSED(); /*---------------------------------------------------------------------- * STEP 2: Test compression by setting up a chunked dataset and writing * to it. *---------------------------------------------------------------------- */ SUBTEST("Compression (write)"); for (i=n=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { points[i][j] = (int)n++; } } dataset->write ((void*) points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); PASSED(); /*---------------------------------------------------------------------- * STEP 3: Try to read the data we just wrote. *---------------------------------------------------------------------- */ SUBTEST("Compression (read)"); // Read the dataset back dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_compression", "Failed in read"); } PASSED(); /*---------------------------------------------------------------------- * STEP 4: Write new data over the top of the old data. The new data is * random thus not very compressible, and will cause the chunks to move * around as they grow. We only change values for the left half of the * dataset although we rewrite the whole thing. *---------------------------------------------------------------------- */ SUBTEST("Compression (modify)"); for (i=0; i<size[0]; i++) { for (j=0; j<size[1]/2; j++) { points[i][j] = rand (); } } dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Read the dataset back and check it dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_compression", "Failed in modify"); } PASSED(); /*---------------------------------------------------------------------- * STEP 5: Close the dataset and then open it and read it again. This * insures that the compression message is picked up properly from the * object header. *---------------------------------------------------------------------- */ SUBTEST("Compression (re-open)"); // close this dataset to reuse the var delete dataset; dataset = new DataSet (file.openDataSet (DSET_COMPRESS_NAME)); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_compression", "Failed in re-open"); } PASSED(); /*---------------------------------------------------------------------- * STEP 6: Test partial I/O by writing to and then reading from a * hyperslab of the dataset. The hyperslab does not line up on chunk * boundaries (we know that case already works from above tests). *---------------------------------------------------------------------- */ SUBTEST("Compression (partial I/O)"); const hsize_t hs_size[2] = {4, 50}; const hsize_t hs_offset[2] = {7, 30}; for (i = 0; i < hs_size[0]; i++) { for (j = 0; j < hs_size[1]; j++) { points[hs_offset[0]+i][hs_offset[1]+j] = rand (); } } space1.selectHyperslab( H5S_SELECT_SET, hs_size, hs_offset ); dataset->write ((void*)points, PredType::NATIVE_INT, space1, space1, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, space1, space1, xfer); // Check that the values read are the same as the values written for (i=0; i<hs_size[0]; i++) { for (j=0; j<hs_size[1]; j++) { if (points[hs_offset[0]+i][hs_offset[1]+j] != check[hs_offset[0]+i][hs_offset[1]+j]) { H5_FAILED(); cerr << " Read different values than written.\n" << endl; cerr << " At index " << (unsigned long)(hs_offset[0]+i) << "," << (unsigned long)(hs_offset[1]+j) << endl; cerr << " At original: " << (int)points[hs_offset[0]+i][hs_offset[1]+j] << endl; cerr << " At returned: " << (int)check[hs_offset[0]+i][hs_offset[1]+j] << endl; throw Exception("test_compression", "Failed in partial I/O"); } } // for j } // for i delete dataset; dataset = NULL; PASSED(); #else SUBTEST("deflate filter"); SKIPPED(); cerr << not_supported << endl; #endif /*---------------------------------------------------------------------- * STEP 7: Register an application-defined compression method and use it * to write and then read the dataset. *---------------------------------------------------------------------- */ SUBTEST("Compression (app-defined method)"); if (H5Zregister (H5Z_BOGUS)<0) throw Exception("test_compression", "Failed in app-defined method"); if (H5Pset_filter (dscreatplist.getId(), H5Z_FILTER_BOGUS, 0, 0, NULL)<0) throw Exception("test_compression", "Failed in app-defined method"); dscreatplist.setFilter (H5Z_FILTER_BOGUS, 0, 0, NULL); DataSpace space2 (2, size, NULL); dataset = new DataSet (file.createDataSet (DSET_BOGUS_NAME, PredType::NATIVE_INT, space2, dscreatplist)); dataset->write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); dataset->read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_compression", "Failed in app-defined method"); } PASSED(); /*---------------------------------------------------------------------- * Cleanup *---------------------------------------------------------------------- */ delete dataset; delete [] tconv_buf; return 0; } // end try // catch all dataset, file, space, and plist exceptions catch (Exception E) { cerr << " FAILED" << endl; cerr << " <<< " << E.getDetailMsg() << " >>>" << endl << endl; // clean up and return with failure if (dataset != NULL) delete dataset; if (tconv_buf) delete [] tconv_buf; return -1; } } // test_compression
void Generic_wrapper_hdf::add_dset(int rank, const unsigned int * dims, V_TYPE type, const void * data, const std::string & name ) { if (!(wrapper_open_)) throw runtime_error("wrapper must be open to add a dataset"); hsize_t hdims[rank]; for(int j = 0;j<rank;++j) hdims[j] = dims[j]; // make dspace DataSpace dspace(rank,hdims); // sort out type DataType hdf_type,mem_type; DSetCreatPropList plist; int fill_value_i = -31415; unsigned int fill_value_ui = 0; float fill_value_f = -3.1415; switch(type) { case V_INT: hdf_type = PredType::NATIVE_INT; mem_type = PredType::NATIVE_INT; plist.setFillValue(hdf_type,&fill_value_i); break; case V_FLOAT: hdf_type = PredType::NATIVE_FLOAT; mem_type = PredType::NATIVE_FLOAT; plist.setFillValue(hdf_type,&fill_value_f); break; case V_UINT: hdf_type = PredType::NATIVE_UINT; mem_type = PredType::NATIVE_UINT; plist.setFillValue(hdf_type,&fill_value_ui); break; case V_BOOL: case V_TIME: case V_GUID: case V_ERROR: case V_COMPLEX: case V_STRING: throw logic_error("generic_wrapper_hdf: un implemented types"); } /// @todo add compression logic for higher sizes // if the list is big enough, us compression if(rank ==1 && *hdims > CSIZE*5) { hsize_t csize = CSIZE; plist.setChunk(1,&csize); plist.setSzip(H5_SZIP_NN_OPTION_MASK,10); } // make data set DataSet dset; if(!group_open_ || name[0] == '/') { dset = file_ ->createDataSet(name,hdf_type,dspace,plist); } else if(group_) { dset = group_ ->createDataSet(name,hdf_type,dspace,plist); } else { throw runtime_error("gave relative path name with no open group"); } // shove in data dset.write(data,mem_type,dspace,dspace); // close everything is taken care of as all variables on stack }
long SaveContainerHdf5::_writeFile(void* f,Data &aData, CtSaving::HeaderMap &aHeader, CtSaving::FileFormat aFormat) { DEB_MEMBER_FUNCT(); _File* file = (_File*)f; size_t buf_size = 0; // get the proper data type PredType data_type(PredType::NATIVE_UINT8); switch (aData.type) { case Data::UINT8: break; case Data::INT8: data_type = PredType::NATIVE_INT8; break; case Data::UINT16: data_type = PredType::NATIVE_UINT16; break; case Data::INT16: data_type = PredType::NATIVE_INT16; break; case Data::UINT32: data_type = PredType::NATIVE_UINT32; break; case Data::INT32: data_type = PredType::NATIVE_INT32; break; case Data::UINT64: data_type = PredType::NATIVE_UINT64; break; case Data::INT64: data_type = PredType::NATIVE_INT64; break; case Data::FLOAT: data_type = PredType::NATIVE_FLOAT; break; case Data::DOUBLE: data_type = PredType::NATIVE_DOUBLE; break; case Data::UNDEF: default: THROW_CTL_ERROR(Error) << "Invalid image type"; } try { if (!file->m_format_written) { // ISO 8601 Time format time_t now; time(&now); char buf[sizeof("2011-10-08T07:07:09Z")]; #ifdef WIN32 struct tm gmtime_now; gmtime_s(&gmtime_now, &now); strftime(buf, sizeof(buf), "%FT%TZ", &gmtime_now); #else strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&now)); #endif string stime = string(buf); write_h5_dataset(*file->m_entry,"start_time",stime); // write header only once into "parameters" group // but we should write some keys into measurement, like motor_pos counter_pos (spec)??? if (!aHeader.empty()) { for (map<string, string>::const_iterator it = aHeader.begin(); it != aHeader.end(); it++) { string key = it->first; string value = it->second; write_h5_dataset(*file->m_measurement_detector_parameters, key.c_str(),value); } } delete file->m_measurement_detector_parameters; file->m_measurement_detector_parameters = NULL; // create the image data structure in the file hsize_t data_dims[3], max_dims[3]; data_dims[1] = aData.dimensions[1]; data_dims[2] = aData.dimensions[0]; data_dims[0] = m_nbframes; max_dims[1] = aData.dimensions[1]; max_dims[2] = aData.dimensions[0]; max_dims[0] = H5S_UNLIMITED; // Create property list for the dataset and setup chunk size DSetCreatPropList plist; hsize_t chunk_dims[RANK_THREE]; // test direct chunk write, so chunk dims is 1 image size chunk_dims[0] = 1; chunk_dims[1] = data_dims[1]; chunk_dims[2] = data_dims[2]; plist.setChunk(RANK_THREE, chunk_dims); #if defined(WITH_Z_COMPRESSION) if (aFormat == CtSaving::HDF5GZ) plist.setDeflate(m_compression_level); #endif #if defined(WITH_BS_COMPRESSION) if (aFormat == CtSaving::HDF5BS) { unsigned int opt_vals[2]= {0, BSHUF_H5_COMPRESS_LZ4}; plist.setFilter(BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, 2, opt_vals); } #endif // create new dspace file->m_image_dataspace = new DataSpace(RANK_THREE, data_dims, NULL); file->m_image_dataset = new DataSet(file->m_measurement_detector->createDataSet("data", data_type, *file->m_image_dataspace, plist)); string nxdata = "NXdata"; write_h5_attribute(*file->m_image_dataset, "NX_class", nxdata); string image = "image"; write_h5_attribute(*file->m_image_dataset, "interpretation", image); file->m_prev_images_written = 0; file->m_format_written = true; } else if (file->m_in_append && !m_is_multiset && !file->m_dataset_extended) { hsize_t allocated_dims[3]; file->m_image_dataset = new DataSet(file->m_measurement_detector-> openDataSet("data")); file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace()); file->m_image_dataspace->getSimpleExtentDims(allocated_dims); hsize_t data_dims[3]; data_dims[1] = aData.dimensions[1]; data_dims[2] = aData.dimensions[0]; data_dims[0] = allocated_dims[0] + m_nbframes; if (data_dims[1] != allocated_dims[1] && data_dims[2] != allocated_dims[2]) { THROW_CTL_ERROR(Error) << "You are trying to extend the dataset with mismatching image dimensions"; } file->m_image_dataset->extend(data_dims); file->m_image_dataspace->close(); delete file->m_image_dataset; file->m_image_dataspace = new DataSpace(file->m_image_dataset->getSpace()); file->m_prev_images_written = allocated_dims[0]; file->m_dataset_extended = true; } // write the image data hsize_t image_nb = aData.frameNumber % m_nbframes; // we test direct chunk write hsize_t offset[RANK_THREE] = {image_nb, 0U, 0U}; uint32_t filter_mask = 0; hid_t dataset = file->m_image_dataset->getId(); herr_t status; void * buf_data; hid_t dxpl; dxpl = H5Pcreate(H5P_DATASET_XFER); if ((aFormat == CtSaving::HDF5GZ) || (aFormat == CtSaving::HDF5BS)) { ZBufferType* buffers = _takeBuffer(aData.frameNumber); // with single chunk, only one buffer allocated buf_size = buffers->front()->used_size; buf_data = buffers->front()->buffer; //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size; status = H5DOwrite_chunk(dataset, dxpl , filter_mask, offset, buf_size, buf_data); if (status<0) { THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed"; } delete buffers->front(); delete buffers; } else { buf_data = aData.data(); buf_size = aData.size(); //DEB_ALWAYS() << "Image #"<< aData.frameNumber << " buf_size = "<< buf_size; status = H5DOwrite_chunk(dataset, dxpl , filter_mask, offset, buf_size, buf_data); if (status<0) { THROW_CTL_ERROR(Error) << "H5DOwrite_chunk() failed"; } } // else // catch failure caused by the DataSet operations }catch (DataSetIException& error) { THROW_CTL_ERROR(Error) << "DataSet not created successfully " << error.getCDetailMsg(); error.printError(); } // catch failure caused by the DataSpace operations catch (DataSpaceIException& error) { THROW_CTL_ERROR(Error) << "DataSpace not created successfully " << error.getCDetailMsg(); } // catch failure caused by any other HDF5 error catch (H5::Exception &e) { THROW_CTL_ERROR(Error) << e.getCDetailMsg(); } // catch anything not hdf5 related catch (Exception &e) { THROW_CTL_ERROR(Error) << e.getErrMsg(); } DEB_RETURN(); return buf_size; }
int main (void) { /* * Try block to detect exceptions raised by any of the calls inside it */ try { /* * Turn off the auto-printing when failure occurs so that we can * handle the errors appropriately */ Exception::dontPrint(); /* * Create the data space with unlimited dimensions. */ hsize_t dims[2] = { 3, 3}; // dataset dimensions at creation hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; DataSpace mspace1( RANK, dims, maxdims); /* * Create a new file. If file exists its contents will be overwritten. */ H5File file( FILE_NAME, H5F_ACC_TRUNC ); /* * Modify dataset creation properties, i.e. enable chunking. */ DSetCreatPropList cparms; hsize_t chunk_dims[2] ={2, 5}; cparms.setChunk( RANK, chunk_dims ); /* * Set fill value for the dataset */ int fill_val = 0; cparms.setFillValue( PredType::NATIVE_INT, &fill_val); /* * Create a new dataset within the file using cparms * creation properties. */ DataSet dataset = file.createDataSet( DATASET_NAME, PredType::NATIVE_INT, mspace1, cparms); /* * Extend the dataset. This call assures that dataset is at least 3 x 3. */ hsize_t size[2]; size[0] = 3; size[1] = 3; dataset.extend( size ); /* * Select a hyperslab. */ DataSpace fspace1 = dataset.getSpace (); hsize_t offset[2]; offset[0] = 0; offset[1] = 0; hsize_t dims1[2] = { 3, 3}; /* data1 dimensions */ fspace1.selectHyperslab( H5S_SELECT_SET, dims1, offset ); /* * Write the data to the hyperslab. */ int data1[3][3] = { {1, 1, 1}, /* data to write */ {1, 1, 1}, {1, 1, 1} }; dataset.write( data1, PredType::NATIVE_INT, mspace1, fspace1 ); /* * Extend the dataset. Dataset becomes 10 x 3. */ hsize_t dims2[2] = { 7, 1}; /* data2 dimensions */ dims[0] = dims1[0] + dims2[0]; size[0] = dims[0]; size[1] = dims[1]; dataset.extend( size ); /* * Select a hyperslab. */ DataSpace fspace2 = dataset.getSpace (); offset[0] = 3; offset[1] = 0; fspace2.selectHyperslab( H5S_SELECT_SET, dims2, offset ); /* * Define memory space */ DataSpace mspace2( RANK, dims2 ); /* * Write the data to the hyperslab. */ int data2[7] = { 2, 2, 2, 2, 2, 2, 2}; dataset.write( data2, PredType::NATIVE_INT, mspace2, fspace2 ); /* * Extend the dataset. Dataset becomes 10 x 5. */ hsize_t dims3[2] = { 2, 2}; /* data3 dimensions */ dims[1] = dims1[1] + dims3[1]; size[0] = dims[0]; size[1] = dims[1]; dataset.extend( size ); /* * Select a hyperslab */ DataSpace fspace3 = dataset.getSpace (); offset[0] = 0; offset[1] = 3; fspace3.selectHyperslab( H5S_SELECT_SET, dims3, offset ); /* * Define memory space. */ DataSpace mspace3( RANK, dims3 ); /* * Write the data to the hyperslab. */ int data3[2][2] = { {3, 3}, {3, 3} }; dataset.write( data3, PredType::NATIVE_INT, mspace3, fspace3 ); /* * Read the data from this dataset and display it. */ int i, j; int data_out[NX][NY]; for (i = 0; i < NX; i++) { for (j = 0; j < NY; j++) data_out[i][j] = 0; } dataset.read( data_out, PredType::NATIVE_INT ); /* * Resulting dataset * * 1 1 1 3 3 * 1 1 1 3 3 * 1 1 1 0 0 * 2 0 0 0 0 * 2 0 0 0 0 * 2 0 0 0 0 * 2 0 0 0 0 * 2 0 0 0 0 * 2 0 0 0 0 * 2 0 0 0 0 */ /* * Display the result. */ for (i=0; i < NX; i++) { for(j=0; j < NY; j++) cout << data_out[i][j] << " "; cout << endl; } } // end of try block // catch failure caused by the H5File operations catch( FileIException error ) { error.printErrorStack(); return -1; } // catch failure caused by the DataSet operations catch( DataSetIException error ) { error.printErrorStack(); return -1; } // catch failure caused by the DataSpace operations catch( DataSpaceIException error ) { error.printErrorStack(); return -1; } // catch failure caused by the DataSpace operations catch( DataTypeIException error ) { error.printErrorStack(); return -1; } return 0; }