void test_szip_filter(H5File& file1) { #ifdef H5_HAVE_FILTER_SZIP int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2]; unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK; unsigned szip_pixels_per_block=4; // Output message about test being performed SUBTEST("szip filter (with encoder)"); if ( h5_szip_can_encode() == 1) { char* tconv_buf = new char [1000]; try { const hsize_t size[2] = {DSET_DIM1, DSET_DIM2}; // Create the data space DataSpace space1(2, size, NULL); // Create a small conversion buffer to test strip mining (?) DSetMemXferPropList xfer; xfer.setBuffer (1000, tconv_buf, NULL); // Prepare dataset create property list DSetCreatPropList dsplist; dsplist.setChunk(2, chunk_size); // Set up for szip compression dsplist.setSzip(szip_options_mask, szip_pixels_per_block); // Create a dataset with szip compression DataSpace space2 (2, size, NULL); DataSet dataset(file1.createDataSet (DSET_SZIP_NAME, PredType::NATIVE_INT, space2, dsplist)); hsize_t i, j, n; for (i=n=0; i<size[0]; i++) { for (j=0; j<size[1]; j++) { points[i][j] = (int)n++; } } // Write to the dataset then read back the values dataset.write ((void*)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); dataset.read ((void*)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer); // Check that the values read are the same as the values written for (i = 0; i < size[0]; i++) for (j = 0; j < size[1]; j++) { int status = check_values (i, j, points[i][j], check[i][j]); if (status == -1) throw Exception("test_szip_filter", "Failed in testing szip method"); } dsplist.close(); PASSED(); } // end of try // catch all other exceptions catch (Exception E) { issue_fail_msg("test_szip_filter()", __LINE__, __FILE__, E.getCDetailMsg()); } delete[] tconv_buf; } // if szip presents else { SKIPPED(); } #else /* H5_HAVE_FILTER_SZIP */ SUBTEST("szip filter"); SKIPPED(); puts(" Szip filter not enabled"); #endif /* H5_HAVE_FILTER_SZIP */ } // test_szip_filter
void Generic_wrapper_hdf::add_dset(int rank, const unsigned int * dims, V_TYPE type, const void * data, const std::string & name ) { if (!(wrapper_open_)) throw runtime_error("wrapper must be open to add a dataset"); hsize_t hdims[rank]; for(int j = 0;j<rank;++j) hdims[j] = dims[j]; // make dspace DataSpace dspace(rank,hdims); // sort out type DataType hdf_type,mem_type; DSetCreatPropList plist; int fill_value_i = -31415; unsigned int fill_value_ui = 0; float fill_value_f = -3.1415; switch(type) { case V_INT: hdf_type = PredType::NATIVE_INT; mem_type = PredType::NATIVE_INT; plist.setFillValue(hdf_type,&fill_value_i); break; case V_FLOAT: hdf_type = PredType::NATIVE_FLOAT; mem_type = PredType::NATIVE_FLOAT; plist.setFillValue(hdf_type,&fill_value_f); break; case V_UINT: hdf_type = PredType::NATIVE_UINT; mem_type = PredType::NATIVE_UINT; plist.setFillValue(hdf_type,&fill_value_ui); break; case V_BOOL: case V_TIME: case V_GUID: case V_ERROR: case V_COMPLEX: case V_STRING: throw logic_error("generic_wrapper_hdf: un implemented types"); } /// @todo add compression logic for higher sizes // if the list is big enough, us compression if(rank ==1 && *hdims > CSIZE*5) { hsize_t csize = CSIZE; plist.setChunk(1,&csize); plist.setSzip(H5_SZIP_NN_OPTION_MASK,10); } // make data set DataSet dset; if(!group_open_ || name[0] == '/') { dset = file_ ->createDataSet(name,hdf_type,dspace,plist); } else if(group_) { dset = group_ ->createDataSet(name,hdf_type,dspace,plist); } else { throw runtime_error("gave relative path name with no open group"); } // shove in data dset.write(data,mem_type,dspace,dspace); // close everything is taken care of as all variables on stack }