예제 #1
0
herr_t write_byteoffset_data(hid_t group_id, const std::string &name, const pyublas::numpy_vector<int> &data, int width, int height)
{
  hsize_t dim[2] = {height, width};
  H5::DataSpace space2D(2, dim);

  unsigned int cd_values[CBF_H5Z_FILTER_CBF_NELMTS];
  hsize_t chunk2[2] = {dim[0], dim[1]};
  cd_values[CBF_H5Z_FILTER_CBF_COMPRESSION] = CBF_BYTE_OFFSET; // byte_offset
  cd_values[CBF_H5Z_FILTER_CBF_RESERVED]    = 0;
  cd_values[CBF_H5Z_FILTER_CBF_BINARY_ID]   = 1; // ?
  cd_values[CBF_H5Z_FILTER_CBF_PADDING]     = 4095; // ?
  cd_values[CBF_H5Z_FILTER_CBF_ELSIZE]      = 4; // always 32 bit?
  cd_values[CBF_H5Z_FILTER_CBF_ELSIGN]      = 1; // 1 for signed, 0 if unsigned
  cd_values[CBF_H5Z_FILTER_CBF_REAL]        = 0; // 1 if a real array, 0 if an integer array
  cd_values[CBF_H5Z_FILTER_CBF_DIMFAST]     = dim[1];
  cd_values[CBF_H5Z_FILTER_CBF_DIMMID]      = dim[0];
  cd_values[CBF_H5Z_FILTER_CBF_DIMSLOW]     = 1;
  hid_t valprop = H5Pcreate(H5P_DATASET_CREATE);
  herr_t status = H5Pset_chunk(valprop, 2, chunk2);
  status = H5Pset_filter(valprop, CBF_H5Z_FILTER_CBF, H5Z_FLAG_OPTIONAL,
			 CBF_H5Z_FILTER_CBF_NELMTS, cd_values);
  // Don't forget to set HDF5_PLUGIN_PATH!
  if (status < 0) {
    std::cout << "Error in H5Pset_filter\n==============================\n";
    H5Eprint(H5E_DEFAULT, stdout);
    std::cout << "==============================\n";
  }

  hid_t dset = H5Dcreate(group_id, name.c_str(), H5T_STD_I32LE, // always 32 bit?
			 space2D.getId(), H5P_DEFAULT, valprop, H5P_DEFAULT);
  status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, // always 32 bit?
		    &data[0]); // is this really a safe way??

  return status;
}
예제 #2
0
//--------------------------------------------------------------------------
// Function:	DSetCreatPropList::setFilter
///\brief	Adds a filter to the filter pipeline
///\param	filter_id - IN: Filter to add
///\param	flags     - IN: Specifies general properties of the filter
///\param	cd_nelmts - IN: Number of elements in cd_values
///\param	cd_values - IN: Auxiliary data for the filter
///\exception	H5::PropListIException
///\par Description
///		The \a flags argument is a bit vector of the field:
///		\c H5Z_FLAG_OPTIONAL(0x0001)
///\par
///		If this bit is set then the filter is optional.  If the filter
///		fails during a \c DataSet::write() operation then the filter
///		is just excluded from the pipeline for the chunk for which it
///		failed; the filter will not participate in the pipeline
///		during a \c DataSet::read() of the chunk.  If this bit is clear
///		and the filter fails then the entire I/O operation fails.
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DSetCreatPropList::setFilter( H5Z_filter_t filter_id, unsigned int flags,
	size_t cd_nelmts, const unsigned int cd_values[] ) const
{
   herr_t ret_value = H5Pset_filter( id, filter_id, flags, cd_nelmts, cd_values );
   if( ret_value < 0 )
   {
      throw PropListIException("DSetCreatPropList::setFilter",
                "H5Pset_filter failed");
   }
}
예제 #3
0
/*-------------------------------------------------------------------------
 * Function:  test_creating_groups_using_plugins
 *
 * Purpose:   Tests creating group with dynamically loaded filters
 *
 * Return:    SUCCEED/FAIL
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_creating_groups_using_plugins(hid_t fid)
{
    hid_t    gcpl_id = -1;
    hid_t    gid = -1;
    hid_t    sub_gid = -1;
    int      i;
    char     subgroup_name[256];

    TESTING("creating groups with filter plugin 4");

    if ((gcpl_id = H5Pcreate(H5P_GROUP_CREATE)) < 0)
        TEST_ERROR;

    /* Use a filter plugin for creating groups */
    if (H5Pset_filter(gcpl_id, FILTER4_ID, H5Z_FLAG_MANDATORY, (size_t)0, NULL) < 0)
        TEST_ERROR;

    /* Create a group using this filter */
    if ((gid = H5Gcreate2(fid, TOP_LEVEL_GROUP_NAME, H5P_DEFAULT, gcpl_id, H5P_DEFAULT)) < 0)
        TEST_ERROR;

    /* Create multiple groups under the top-level group */
    for (i = 0; i < N_SUBGROUPS; i++) {
        char *sp = subgroup_name;

        sp += HDsprintf(subgroup_name, SUBGROUP_PREFIX);
        HDsprintf(sp, "%d", i);

        if ((sub_gid = H5Gcreate2(gid, subgroup_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            TEST_ERROR;
        if (H5Gclose(sub_gid) < 0)
            TEST_ERROR;
    }

    /* Close everything */
    if (H5Gclose(gid) < 0)
        TEST_ERROR;
    if (H5Pclose(gcpl_id) < 0)
        TEST_ERROR;

    PASSED();

    return SUCCEED;

error:
    /* Clean up objects used for this test */
    H5E_BEGIN_TRY {
        H5Gclose(sub_gid);
        H5Gclose(gid);
        H5Pclose(gcpl_id);
    } H5E_END_TRY

    return FAIL;
} /* end test_creating_groups_using_plugins() */
/*-------------------------------------------------------------------------
 * Function:	create_file_with_bogus_filter
 *
 * Purpose:	Create a file and a dataset with a bogus filter enabled
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Raymond Lu
 *              2 June 2011
 *
 *-------------------------------------------------------------------------
 */
static herr_t
create_file_with_bogus_filter(void)
{
    hid_t     fid = -1;              /* file ID */
    hid_t     dsid = -1;             /* dataset ID */
    hid_t     sid = -1;              /* dataspace ID */
    hid_t     dcpl = -1;             /* dataset creation property list ID */
    hsize_t   dims[1] = {20};        /* dataspace dimensions */
    hsize_t   chunk_dims[1] = {10};  /* chunk dimensions */
    int       buf[20];
    int       rank = 1;
    int       i;

    for(i = 0; i < 20; i++)
        buf[i] = 1;

    /* create a file using default properties */
    if((fid = H5Fcreate(TESTFILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;

    /* create a data space */
    if((sid = H5Screate_simple(rank, dims, NULL)) < 0) goto error;

    /* create dcpl  */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;

    /* create chunking */ 
    if(H5Pset_chunk(dcpl, rank, chunk_dims) < 0) goto error;

    /* register bogus filter */
    if(H5Zregister (H5Z_BOGUS) < 0) goto error;
    if(H5Pset_filter(dcpl, H5Z_FILTER_BOGUS, 0, (size_t)0, NULL) < 0) goto error;

    /* create a dataset */
    if((dsid = H5Dcreate2(fid, DSETNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;

    if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;

    /* close */
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Dclose(dsid) < 0) goto error;
    if(H5Sclose(sid) < 0) goto error;
    if(H5Fclose(fid) < 0) goto error;

    return 0;

error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dsid);
        H5Sclose(sid);
        H5Fclose(fid);
    } H5E_END_TRY;
    return -1;
}
예제 #5
0
/*-------------------------------------------------------------------------
 * Function:    test_filter_write_failure
 *
 * Purpose:     Tests the library's behavior when a mandate filter returns 
 *              failure.  There're only 5 chunks with each of them having
 *              2 integers.  The filter will fail in the last chunk.  The 
 *              dataset should release all resources even though the last 
 *              chunk can't be flushed to file.  The file should close
 *              successfully.
 *
 * Return:  
 *              Success:         0
 *              Failure:         -1
 *
 * Programmer:  Raymond Lu
 *              25 August 2010
 *
 * Modifications:
 *              Raymond Lu
 *              5 Oct 2010
 *              Test when the chunk cache is enable and disabled to make 
 *              sure the library behaves properly.
 *-------------------------------------------------------------------------
 */
static herr_t
test_filter_write(char *file_name, hid_t my_fapl, hbool_t cache_enabled)
{
    hid_t        file = -1;
    hid_t        dataset=-1;                /* dataset ID */
    hid_t        sid=-1;                   /* dataspace ID */
    hid_t        dcpl=-1;                  /* dataset creation property list ID */
    hsize_t      dims[1]={DIM};           /* dataspace dimension - 10*/
    hsize_t      chunk_dims[1]={FILTER_CHUNK_DIM}; /* chunk dimension - 2*/
    int          points[DIM];          /* Data */
    herr_t       ret;                   /* generic return value */
    int          i;

    if(cache_enabled) {
        TESTING("data writing when a mandatory filter fails and chunk cache is enabled");
    } else {
        TESTING("data writing when a mandatory filter fails and chunk cache is disabled");
    }

    /* Create file */
    if((file = H5Fcreate(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) TEST_ERROR

    /* create the data space */
    if((sid = H5Screate_simple(1, dims, NULL)) < 0) TEST_ERROR

    /* Create dcpl and register the filter */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR

    if(H5Pset_chunk(dcpl, 1, chunk_dims) < 0) TEST_ERROR

    if(H5Zregister (H5Z_FAIL_TEST) < 0) TEST_ERROR

    /* Check that the filter was registered */
    if(TRUE != H5Zfilter_avail(H5Z_FILTER_FAIL_TEST)) FAIL_STACK_ERROR

    /* Enable the filter as mandatory */
    if(H5Pset_filter(dcpl, H5Z_FILTER_FAIL_TEST, 0, (size_t)0, NULL) < 0)
        TEST_ERROR

    /* create a dataset */
    if((dataset = H5Dcreate2(file, DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR 

    /* Initialize the write buffer */
    for(i = 0; i < DIM; i++)
        points[i] = i;

    /* Write data.  If the chunk cache is enabled, H5Dwrite should succeed.  If it is
     * diabled, H5Dwrite should fail. */
    if(cache_enabled) {
        if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, points) < 0) 
            TEST_ERROR
    } else {
예제 #6
0
파일: chunk.c 프로젝트: MattNapsAlot/rHDF5
/*-------------------------------------------------------------------------
 * Function:	create_dataset
 *
 * Purpose:	Creates a square dataset with square chunks, registers a
 *		stupid compress/uncompress pair for counting I/O, and
 *		initializes the dataset.  The chunk size is in bytes, the
 *		dataset size is in terms of chunks.
 *
 * Return:	void
 *
 * Programmer:	Robb Matzke
 *              Thursday, May 14, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static void
create_dataset (void)
{
    hid_t	file, space, dcpl, dset;
    hsize_t	size[2];
    signed char	*buf;

    /* The file */
    file = H5Fcreate (FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_g);

    /* The data space */
    size[0] = size[1] = DS_SIZE * CH_SIZE;
    space = H5Screate_simple (2, size, size);

    /* The storage layout and compression */
    dcpl = H5Pcreate (H5P_DATASET_CREATE);
    size[0] = size[1] = CH_SIZE;
    H5Pset_chunk (dcpl, 2, size);
#ifdef H5_WANT_H5_V1_4_COMPAT
    H5Zregister (FILTER_COUNTER, "counter", counter);
#else /* H5_WANT_H5_V1_4_COMPAT */
    H5Zregister (H5Z_COUNTER);
#endif /* H5_WANT_H5_V1_4_COMPAT */
    H5Pset_filter (dcpl, FILTER_COUNTER, 0, 0, NULL);

    /* The dataset */
    dset = H5Dcreate (file, "dset", H5T_NATIVE_SCHAR, space, dcpl);
    assert (dset>=0);

    /* The data */
    buf = calloc (1, SQUARE (DS_SIZE*CH_SIZE));
    H5Dwrite (dset, H5T_NATIVE_SCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
    free (buf);

    /* Close */
    H5Dclose (dset);
    H5Sclose (space);
    H5Pclose (dcpl);
    H5Fclose (file);
}
예제 #7
0
파일: plugin.c 프로젝트: schwehr/hdf5
/*-------------------------------------------------------------------------
 * Function:	test_filters_for_groups
 *
 * Purpose:	Tests creating group with dynamically loaded filters
 *
 * Return:	Success:	0
 *		Failure:	-1
 *
 * Programmer:	Raymond Lu
 *              1 April 2013
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_filters_for_groups(hid_t file)
{
    hid_t	gcpl, gid, group;
    int         i;
    char        gname[256];

    TESTING("Testing DYNLIB3 filter for group");

    if((gcpl = H5Pcreate(H5P_GROUP_CREATE)) < 0) goto error;
  
    /* Use DYNLIB3 for creating groups */ 
    if(H5Pset_filter (gcpl, H5Z_FILTER_DYNLIB3, H5Z_FLAG_MANDATORY, (size_t)0, NULL) < 0) goto error;

    /* Create a group using this filter */
    if((gid = H5Gcreate2(file, "group1", H5P_DEFAULT, gcpl, H5P_DEFAULT)) < 0) goto error;

    /* Create multiple groups under "group1" */
    for (i=0; i < GROUP_ITERATION; i++) {
        sprintf(gname, "group_%d", i);
        if((group = H5Gcreate2(gid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
        if(H5Gclose(group) < 0) goto error; 
    }

    /* Close the group */
    if(H5Gclose(gid) < 0) goto error;

    /* Clean up objects used for this test */
    if(H5Pclose (gcpl) < 0) goto error;

    PASSED();

    return 0;

error:
    return -1;
}
예제 #8
0
/*-------------------------------------------------------------------------
 * Function:	test_skip_compress_write2
 *
 * Purpose:	Test skipping compression filter when there are three filters
 *              for the dataset
 *
 * Return:	Success:	0
 *
 *		Failure:	1
 *
 * Programmer:  Raymond Lu	
 *              30 November 2012
 *
 *-------------------------------------------------------------------------
 */
static int
test_skip_compress_write2(hid_t file)
{
    hid_t       dataspace = -1, dataset = -1;
    hid_t       mem_space = -1;
    hid_t       cparms = -1, dxpl = -1;
    hsize_t     dims[2]  = {NX, NY};        
    hsize_t     maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
    hsize_t     chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
    herr_t      status;
    int         i, j, n;

    unsigned    filter_mask = 0;
    int         origin_direct_buf[CHUNK_NX][CHUNK_NY];
    int         direct_buf[CHUNK_NX][CHUNK_NY];
    int         check_chunk[CHUNK_NX][CHUNK_NY];
    hsize_t     offset[2] = {0, 0};
    size_t      buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
    int          aggression = 9;     /* Compression aggression setting */

    hsize_t start[2];  /* Start of hyperslab */
    hsize_t stride[2]; /* Stride of hyperslab */
    hsize_t count[2];  /* Block count */
    hsize_t block[2];  /* Block sizes */

    TESTING("skipping compression filters but keep two other filters");

    /*
     * Create the data space with unlimited dimensions.
     */
    if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
        goto error;

    if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0)
        goto error;

    /*
     * Modify dataset creation properties, i.e. enable chunking and compression.
     * The order of filters is bogus 1 + deflate + bogus 2.
     */
    if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        goto error;

    if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)
        goto error;

    /* Register and enable first bogus filter */
    if(H5Zregister (H5Z_BOGUS1) < 0) 
	goto error;

    if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS1, 0, (size_t)0, NULL) < 0) 
	goto error;

    /* Enable compression filter */
    if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0)
        goto error;

    /* Register and enable second bogus filter */
    if(H5Zregister (H5Z_BOGUS2) < 0) 
	goto error;

    if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS2, 0, (size_t)0, NULL) < 0) 
	goto error;

    /*
     * Create a new dataset within the file using cparms
     * creation properties.
     */
    if((dataset = H5Dcreate2(file, DATASETNAME3, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
			cparms, H5P_DEFAULT)) < 0)
        goto error;

    if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
        goto error;

    /* Initialize data for one chunk. Apply operations of two bogus filters to the chunk */
    for(i = n = 0; i < CHUNK_NX; i++)
        for(j = 0; j < CHUNK_NY; j++) {
	    origin_direct_buf[i][j] = n++;
	    direct_buf[i][j] = (origin_direct_buf[i][j] + ADD_ON) * FACTOR;
    }

    /* write the uncompressed chunk data repeatedly to dataset, using the direct writing function. 
     * Indicate skipping the compression filter but keep the other two bogus filters */ 
    offset[0] = CHUNK_NX;
    offset[1] = CHUNK_NY;

    /* compression filter is the middle one to be skipped */
    filter_mask = 0x00000002;

    if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0)
        goto error;

    if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) 
        goto error;

    if(H5Dclose(dataset) < 0)
        goto error;

    if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
        goto error;

    /*
     * Select hyperslab for one chunk in the file
     */
    start[0]  = CHUNK_NX; start[1]  = CHUNK_NY;
    stride[0] = 1; stride[1] = 1;
    count[0]  = 1; count[1]  = 1;
    block[0]  = CHUNK_NX; block[1]  = CHUNK_NY;
    if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0)
        goto error;

    /* Read the chunk back */
    if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
        goto error;

    /* Check that the values read are the same as the values written */
    for(i = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
            if(origin_direct_buf[i][j] != check_chunk[i][j]) {
                printf("    1. Read different values than written.");
                printf("    At index %d,%d\n", i, j);
                printf("    origin_direct_buf=%d, check_chunk=%d\n", origin_direct_buf[i][j], check_chunk[i][j]); 
                goto error;
            }
        }
    }

    /*
     * Close/release resources.
     */
    H5Dclose(dataset);
    H5Sclose(mem_space);
    H5Sclose(dataspace);
    H5Pclose(cparms);
    H5Pclose(dxpl);
    
    PASSED();
    return 0;

error:
    H5E_BEGIN_TRY {
        H5Dclose(dataset);
        H5Sclose(mem_space);
        H5Sclose(dataspace);
        H5Pclose(cparms);
        H5Pclose(dxpl);
    } H5E_END_TRY;

    return 1;
}
예제 #9
0
파일: t_prop.c 프로젝트: ElaraFX/hdf5
void
test_plist_ed(void)
{
    hid_t dcpl;	       	/* dataset create prop. list */
    hid_t dapl;	       	/* dataset access prop. list */
    hid_t dxpl;	       	/* dataset transfer prop. list */
    hid_t gcpl;	       	/* group create prop. list */
    hid_t lcpl;	       	/* link create prop. list */
    hid_t lapl;	       	/* link access prop. list */
    hid_t ocpypl;	/* object copy prop. list */
    hid_t ocpl;	        /* object create prop. list */
    hid_t fapl;	       	/* file access prop. list */
    hid_t fcpl;	       	/* file create prop. list */
    hid_t strcpl;	/* string create prop. list */
    hid_t acpl;	       	/* attribute create prop. list */

    int mpi_size, mpi_rank, recv_proc;

    hsize_t chunk_size = 16384;	/* chunk size */ 
    double fill = 2.7f;         /* Fill value */
    size_t nslots = 521*2;
    size_t nbytes = 1048576 * 10;
    double w0 = 0.5f;
    unsigned max_compact;
    unsigned min_dense;
    hsize_t max_size[1]; /*data space maximum size */
    const char* c_to_f = "x+32";
    H5AC_cache_config_t my_cache_config = {
        H5AC__CURR_CACHE_CONFIG_VERSION,
        TRUE,
        FALSE,
        FALSE,
        "temp",
        TRUE,
        FALSE,
        ( 2 * 2048 * 1024),
        0.3f,
        (64 * 1024 * 1024),
        (4 * 1024 * 1024),
        60000,
        H5C_incr__threshold,
        0.8f,
        3.0f,
        TRUE,
        (8 * 1024 * 1024),
        H5C_flash_incr__add_space,
        2.0f,
        0.25f,
        H5C_decr__age_out_with_threshold,
        0.997f,
        0.8f,
        TRUE,
        (3 * 1024 * 1024),
        3,
        FALSE,
        0.2f,
        (256 * 2048),
        H5AC__DEFAULT_METADATA_WRITE_STRATEGY};

    herr_t ret;         	/* Generic return value */

    if(VERBOSE_MED)
	printf("Encode/Decode DCPLs\n");

    /* set up MPI parameters */
    MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
    MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);

    if(mpi_size == 1)
        recv_proc = 0;
    else
        recv_proc = 1;

    dcpl = H5Pcreate(H5P_DATASET_CREATE);
    VRFY((dcpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_chunk(dcpl, 1, &chunk_size);
    VRFY((ret >= 0), "H5Pset_chunk succeeded");

    ret = H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE);
    VRFY((ret >= 0), "H5Pset_alloc_time succeeded");

    ret = H5Pset_fill_value(dcpl, H5T_NATIVE_DOUBLE, &fill);
    VRFY((ret>=0), "set fill-value succeeded");

    max_size[0] = 100;
    ret = H5Pset_external(dcpl, "ext1.data", (off_t)0, 
                          (hsize_t)(max_size[0] * sizeof(int)/4));
    VRFY((ret>=0), "set external succeeded");
    ret = H5Pset_external(dcpl, "ext2.data", (off_t)0, 
                          (hsize_t)(max_size[0] * sizeof(int)/4));
    VRFY((ret>=0), "set external succeeded");
    ret = H5Pset_external(dcpl, "ext3.data", (off_t)0, 
                          (hsize_t)(max_size[0] * sizeof(int)/4));
    VRFY((ret>=0), "set external succeeded");
    ret = H5Pset_external(dcpl, "ext4.data", (off_t)0, 
                          (hsize_t)(max_size[0] * sizeof(int)/4));
    VRFY((ret>=0), "set external succeeded");

    ret = test_encode_decode(dcpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(dcpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE DAPLS *****/
    dapl = H5Pcreate(H5P_DATASET_ACCESS);
    VRFY((dapl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_chunk_cache(dapl, nslots, nbytes, w0);
    VRFY((ret >= 0), "H5Pset_chunk_cache succeeded");

    ret = test_encode_decode(dapl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(dapl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE OCPLS *****/
    ocpl = H5Pcreate(H5P_OBJECT_CREATE);
    VRFY((ocpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_attr_creation_order(ocpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
    VRFY((ret >= 0), "H5Pset_attr_creation_order succeeded");

    ret = H5Pset_attr_phase_change(ocpl, 110, 105);
    VRFY((ret >= 0), "H5Pset_attr_phase_change succeeded");

    ret = H5Pset_filter(ocpl, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL);
    VRFY((ret >= 0), "H5Pset_filter succeeded");

    ret = test_encode_decode(ocpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(ocpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE DXPLS *****/
    dxpl = H5Pcreate(H5P_DATASET_XFER);
    VRFY((dxpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_btree_ratios(dxpl, 0.2f, 0.6f, 0.2f);
    VRFY((ret >= 0), "H5Pset_btree_ratios succeeded");

    ret = H5Pset_hyper_vector_size(dxpl, 5);
    VRFY((ret >= 0), "H5Pset_hyper_vector_size succeeded");

    ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
    VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded");

    ret = H5Pset_dxpl_mpio_collective_opt(dxpl, H5FD_MPIO_INDIVIDUAL_IO);
    VRFY((ret >= 0), "H5Pset_dxpl_mpio_collective_opt succeeded");

    ret = H5Pset_dxpl_mpio_chunk_opt(dxpl, H5FD_MPIO_CHUNK_MULTI_IO);
    VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt succeeded");

    ret = H5Pset_dxpl_mpio_chunk_opt_ratio(dxpl, 30);
    VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_ratio succeeded");

    ret = H5Pset_dxpl_mpio_chunk_opt_num(dxpl, 40);
    VRFY((ret >= 0), "H5Pset_dxpl_mpio_chunk_opt_num succeeded");

    ret = H5Pset_edc_check(dxpl, H5Z_DISABLE_EDC);
    VRFY((ret >= 0), "H5Pset_edc_check succeeded");

    ret = H5Pset_data_transform(dxpl, c_to_f);
    VRFY((ret >= 0), "H5Pset_data_transform succeeded");

    ret = test_encode_decode(dxpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(dxpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE GCPLS *****/
    gcpl = H5Pcreate(H5P_GROUP_CREATE);
    VRFY((gcpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_local_heap_size_hint(gcpl, 256);
    VRFY((ret >= 0), "H5Pset_local_heap_size_hint succeeded");

    ret = H5Pset_link_phase_change(gcpl, 2, 2);
    VRFY((ret >= 0), "H5Pset_link_phase_change succeeded");

    /* Query the group creation properties */
    ret = H5Pget_link_phase_change(gcpl, &max_compact, &min_dense);
    VRFY((ret >= 0), "H5Pget_est_link_info succeeded");

    ret = H5Pset_est_link_info(gcpl, 3, 9);
    VRFY((ret >= 0), "H5Pset_est_link_info succeeded");

    ret = H5Pset_link_creation_order(gcpl, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED));
    VRFY((ret >= 0), "H5Pset_link_creation_order succeeded");

    ret = test_encode_decode(gcpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(gcpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE LCPLS *****/
    lcpl = H5Pcreate(H5P_LINK_CREATE);
    VRFY((lcpl >= 0), "H5Pcreate succeeded");

    ret= H5Pset_create_intermediate_group(lcpl, TRUE);
    VRFY((ret >= 0), "H5Pset_create_intermediate_group succeeded");

    ret = test_encode_decode(lcpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(lcpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE LAPLS *****/
    lapl = H5Pcreate(H5P_LINK_ACCESS);
    VRFY((lapl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_nlinks(lapl, (size_t)134);
    VRFY((ret >= 0), "H5Pset_nlinks succeeded");

    ret = H5Pset_elink_acc_flags(lapl, H5F_ACC_RDONLY);
    VRFY((ret >= 0), "H5Pset_elink_acc_flags succeeded");

    ret = H5Pset_elink_prefix(lapl, "/tmpasodiasod");
    VRFY((ret >= 0), "H5Pset_nlinks succeeded");

    /* Create FAPL for the elink FAPL */
    fapl = H5Pcreate(H5P_FILE_ACCESS);
    VRFY((fapl >= 0), "H5Pcreate succeeded");
    ret = H5Pset_alignment(fapl, 2, 1024);
    VRFY((ret >= 0), "H5Pset_alignment succeeded");

    ret = H5Pset_elink_fapl(lapl, fapl);
    VRFY((ret >= 0), "H5Pset_elink_fapl succeeded");

    /* Close the elink's FAPL */
    ret = H5Pclose(fapl);
    VRFY((ret >= 0), "H5Pclose succeeded");

    ret = test_encode_decode(lapl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(lapl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE OCPYPLS *****/
    ocpypl = H5Pcreate(H5P_OBJECT_COPY);
    VRFY((ocpypl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_copy_object(ocpypl, H5O_COPY_EXPAND_EXT_LINK_FLAG);
    VRFY((ret >= 0), "H5Pset_copy_object succeeded");

    ret = H5Padd_merge_committed_dtype_path(ocpypl, "foo");
    VRFY((ret >= 0), "H5Padd_merge_committed_dtype_path succeeded");

    ret = H5Padd_merge_committed_dtype_path(ocpypl, "bar");
    VRFY((ret >= 0), "H5Padd_merge_committed_dtype_path succeeded");

    ret = test_encode_decode(ocpypl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(ocpypl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE FAPLS *****/
    fapl = H5Pcreate(H5P_FILE_ACCESS);
    VRFY((fapl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_family_offset(fapl, 1024);
    VRFY((ret >= 0), "H5Pset_family_offset succeeded");

    ret = H5Pset_meta_block_size(fapl, 2098452);
    VRFY((ret >= 0), "H5Pset_meta_block_size succeeded");

    ret = H5Pset_sieve_buf_size(fapl, 1048576);
    VRFY((ret >= 0), "H5Pset_sieve_buf_size succeeded");

    ret = H5Pset_alignment(fapl, 2, 1024);
    VRFY((ret >= 0), "H5Pset_alignment succeeded");

    ret = H5Pset_cache(fapl, 1024, 128, 10485760, 0.3f);
    VRFY((ret >= 0), "H5Pset_cache succeeded");

    ret = H5Pset_elink_file_cache_size(fapl, 10485760);
    VRFY((ret >= 0), "H5Pset_elink_file_cache_size succeeded");

    ret = H5Pset_gc_references(fapl, 1);
    VRFY((ret >= 0), "H5Pset_gc_references succeeded");

    ret = H5Pset_small_data_block_size(fapl, 2048);
    VRFY((ret >= 0), "H5Pset_small_data_block_size succeeded");

    ret = H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
    VRFY((ret >= 0), "H5Pset_libver_bounds succeeded");

    ret = H5Pset_fclose_degree(fapl, H5F_CLOSE_WEAK);
    VRFY((ret >= 0), "H5Pset_fclose_degree succeeded");

    ret = H5Pset_multi_type(fapl, H5FD_MEM_GHEAP);
    VRFY((ret >= 0), "H5Pset_multi_type succeeded");

    ret = H5Pset_mdc_config(fapl, &my_cache_config);
    VRFY((ret >= 0), "H5Pset_mdc_config succeeded");

    ret = test_encode_decode(fapl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(fapl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE FCPLS *****/
    fcpl = H5Pcreate(H5P_FILE_CREATE);
    VRFY((fcpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_userblock(fcpl, 1024);
    VRFY((ret >= 0), "H5Pset_userblock succeeded");

    ret = H5Pset_istore_k(fcpl, 3);
    VRFY((ret >= 0), "H5Pset_istore_k succeeded");

    ret = H5Pset_sym_k(fcpl, 4, 5);
    VRFY((ret >= 0), "H5Pset_sym_k succeeded");

    ret = H5Pset_shared_mesg_nindexes(fcpl, 8);
    VRFY((ret >= 0), "H5Pset_shared_mesg_nindexes succeeded");

    ret = H5Pset_shared_mesg_index(fcpl, 1,  H5O_SHMESG_SDSPACE_FLAG, 32);
    VRFY((ret >= 0), "H5Pset_shared_mesg_index succeeded");

    ret = H5Pset_shared_mesg_phase_change(fcpl, 60, 20);
    VRFY((ret >= 0), "H5Pset_shared_mesg_phase_change succeeded");

    ret = H5Pset_sizes(fcpl, 8, 4);
    VRFY((ret >= 0), "H5Pset_sizes succeeded");

    ret = test_encode_decode(fcpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(fcpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE STRCPLS *****/
    strcpl = H5Pcreate(H5P_STRING_CREATE);
    VRFY((strcpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_char_encoding(strcpl, H5T_CSET_UTF8);
    VRFY((ret >= 0), "H5Pset_char_encoding succeeded");

    ret = test_encode_decode(strcpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(strcpl);
    VRFY((ret >= 0), "H5Pclose succeeded");


    /******* ENCODE/DECODE ACPLS *****/
    acpl = H5Pcreate(H5P_ATTRIBUTE_CREATE);
    VRFY((acpl >= 0), "H5Pcreate succeeded");

    ret = H5Pset_char_encoding(acpl, H5T_CSET_UTF8);
    VRFY((ret >= 0), "H5Pset_char_encoding succeeded");

    ret = test_encode_decode(acpl, mpi_rank, recv_proc);
    VRFY((ret >= 0), "test_encode_decode succeeded");

    ret = H5Pclose(acpl);
    VRFY((ret >= 0), "H5Pclose succeeded");
}
예제 #10
0
파일: gen_cross.c 프로젝트: Starlink/hdf5
/*-------------------------------------------------------------------------
 * Function:    create_fletcher_dsets_float
 *
 * Purpose:     Create a dataset of FLOAT datatype with fletcher filter
 *
 * Return:      Success:        0
 *              Failure:        -1
 *
 * Programmer:  Raymond Lu
 *              29 March 2011
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
int
create_fletcher_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
{
    hid_t       dataset         = -1;   /* dataset handles */
    hid_t       dcpl            = -1;
    float       data[NX][NY];           /* data to write */
    float       fillvalue = -2.2f;
    hsize_t     chunk[RANK] = {CHUNK0, CHUNK1};
    int         i, j;

    /*
     * Data and output buffer initialization.
     */
    for (j = 0; j < NX; j++) {
        for (i = 0; i < NY; i++)
            data[j][i] = ((float)(i + j + 1))/3;
    }

    /*
     * Create the dataset creation property list, add the Scale-Offset
     * filter, set the chunk size, and set the fill value.
     */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR
    if(H5Pset_filter(dcpl, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL) < 0)
        TEST_ERROR
    if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
        TEST_ERROR
    if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
        TEST_ERROR

    /*
     * Create a new dataset within the file using defined dataspace, little
     * endian datatype and default dataset creation properties.
     */
    if((dataset = H5Dcreate2(fid, DATASETNAME14, H5T_IEEE_F32LE, fsid,
            H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        TEST_ERROR

    /*
     * Write the data to the dataset using default transfer properties.
     */
    if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
        TEST_ERROR

    /* Close dataset */
    if(H5Dclose(dataset) < 0)
        TEST_ERROR

    /* Now create a dataset with a big-endian type */
    if((dataset = H5Dcreate2(fid, DATASETNAME15, H5T_IEEE_F32BE, fsid,
            H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        TEST_ERROR
    if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
        TEST_ERROR
    if(H5Dclose(dataset) < 0)
        TEST_ERROR

    /*
     * Close/release resources.
     */
    if(H5Pclose(dcpl) < 0)
        TEST_ERROR

    return 0;

error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dataset);
    } H5E_END_TRY;

    return -1;
}
예제 #11
0
파일: gen_plist.c 프로젝트: Starlink/hdf5
int
main(void)
{
    hid_t dcpl1;	       	/* dataset create prop. list */
    hid_t dapl1;	       	/* dataset access prop. list */
    hid_t dxpl1;	       	/* dataset xfer prop. list */
    hid_t gcpl1;	       	/* group create prop. list */
    hid_t ocpypl1;		    /* object copy prop. list */
    hid_t ocpl1;	        /* object create prop. list */
    hid_t lcpl1;	       	/* link create prop. list */
    hid_t lapl1;	       	/* link access prop. list */
    hid_t fapl1;	       	/* file access prop. list */
    hid_t fcpl1;	       	/* file create prop. list */
    hid_t strcpl1;	       	/* string create prop. list */
    hid_t acpl1;	       	/* attribute create prop. list */

    herr_t ret = 0;
    hsize_t chunk_size = 16384;	/* chunk size */ 
    int fill = 2;            /* Fill value */
    hsize_t max_size[1];        /* data space maximum size */
    size_t nslots = 521 * 2;
    size_t nbytes = 1048576 * 10;
    double w0 = 0.5f;
    unsigned max_compact;
    unsigned min_dense;
    const char* c_to_f = "x+32";
    int little_endian;
    int word_length;
    H5AC_cache_config_t my_cache_config = {
        H5AC__CURR_CACHE_CONFIG_VERSION,
        1 /*TRUE*/,
        0 /*FALSE*/,
        0 /*FALSE*/,
        "temp",
        1 /*TRUE*/,
        0 /*FALSE*/,
        ( 2 * 2048 * 1024),
        0.3f,
        (64 * 1024 * 1024),
        (4 * 1024 * 1024),
        60000,
        H5C_incr__threshold,
        0.8f,
        3.0f,
        1 /*TRUE*/,
        (8 * 1024 * 1024),
        H5C_flash_incr__add_space,
        2.0f,
        0.25f,
        H5C_decr__age_out_with_threshold,
        0.997f,
        0.8f,
        1 /*TRUE*/,
        (3 * 1024 * 1024),
        3,
        0 /*FALSE*/,
        0.2f,
        (256 * 2048),
        H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY};
    H5AC_cache_image_config_t my_cache_image_config = {
        H5AC__CURR_CACHE_IMAGE_CONFIG_VERSION,
        TRUE,
	FALSE,
        -1};


    /* check endianess */
    {
        short int word = 0x0001;
        char *byte = (char *) &word;

        if(byte[0] == 1)
            /* little endian */
            little_endian = 1;
        else
            /* big endian */
            little_endian = 0;
    }

    /* check word length */
    {
        word_length = 8 * sizeof(void *);
    }

    /* Explicitly initialize the library, since we are including the private header file */
    H5open();

    /******* ENCODE/DECODE DCPLS *****/
    if((dcpl1 = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        assert(dcpl1 > 0);

    if((ret = encode_plist(dcpl1, little_endian, word_length, "testfiles/plist_files/def_dcpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_chunk(dcpl1, 1, &chunk_size)) < 0)
        assert(ret > 0);

    if((ret = H5Pset_alloc_time(dcpl1, H5D_ALLOC_TIME_LATE)) < 0)
        assert(ret > 0);

    ret = H5Tconvert(H5T_NATIVE_INT, H5T_STD_I32BE, (size_t)1, &fill, NULL, H5P_DEFAULT);
    assert(ret >= 0);
    if((ret = H5Pset_fill_value(dcpl1, H5T_STD_I32BE, &fill)) < 0)
        assert(ret > 0);

    max_size[0] = 100;
    if((ret = H5Pset_external(dcpl1, "ext1.data", (off_t)0, 
                         (hsize_t)(max_size[0] * sizeof(int)/4))) < 0)
        assert(ret > 0);
    if((ret = H5Pset_external(dcpl1, "ext2.data", (off_t)0, 
                         (hsize_t)(max_size[0] * sizeof(int)/4))) < 0)
        assert(ret > 0);
    if((ret = H5Pset_external(dcpl1, "ext3.data", (off_t)0, 
                         (hsize_t)(max_size[0] * sizeof(int)/4))) < 0)
        assert(ret > 0);
    if((ret = H5Pset_external(dcpl1, "ext4.data", (off_t)0, 
                         (hsize_t)(max_size[0] * sizeof(int)/4))) < 0)
        assert(ret > 0);

    if((ret = encode_plist(dcpl1, little_endian, word_length, "testfiles/plist_files/dcpl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(dcpl1)) < 0)
         assert(ret > 0);


    /******* ENCODE/DECODE DAPLS *****/
    if((dapl1 = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
        assert(dapl1 > 0);

    if((ret = encode_plist(dapl1, little_endian, word_length, "testfiles/plist_files/def_dapl_")) < 0)
        assert(ret > 0);
        
    if((ret = H5Pset_chunk_cache(dapl1, nslots, nbytes, w0)) < 0)
        assert(ret > 0);

    if((ret = encode_plist(dapl1, little_endian, word_length, "testfiles/plist_files/dapl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(dapl1)) < 0)
         assert(ret > 0);

    /******* ENCODE/DECODE DXPLS *****/
    if((dxpl1 = H5Pcreate(H5P_DATASET_XFER)) < 0)
        assert(dxpl1 > 0);

    if((ret = encode_plist(dxpl1, little_endian, word_length, "testfiles/plist_files/def_dxpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_btree_ratios(dxpl1, 0.2f, 0.6f, 0.2f)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_hyper_vector_size(dxpl1, 5)) < 0)
        assert(ret > 0);
#ifdef H5_HAVE_PARALLEL
    if((ret = H5Pset_dxpl_mpio(dxpl1, H5FD_MPIO_COLLECTIVE)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_dxpl_mpio_collective_opt(dxpl1, H5FD_MPIO_INDIVIDUAL_IO)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_dxpl_mpio_chunk_opt(dxpl1, H5FD_MPIO_CHUNK_MULTI_IO)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_dxpl_mpio_chunk_opt_ratio(dxpl1, 30)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_dxpl_mpio_chunk_opt_num(dxpl1, 40)) < 0)
        assert(ret > 0);
#endif/* H5_HAVE_PARALLEL */
    if((ret = H5Pset_edc_check(dxpl1, H5Z_DISABLE_EDC)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_data_transform(dxpl1, c_to_f)) < 0)
        assert(ret > 0);

    if((ret = encode_plist(dxpl1, little_endian, word_length, "testfiles/plist_files/dxpl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(dxpl1)) < 0)
         assert(ret > 0);


    /******* ENCODE/DECODE GCPLS *****/
    if((gcpl1 = H5Pcreate(H5P_GROUP_CREATE)) < 0)
        assert(gcpl1 > 0);

    if((ret = encode_plist(gcpl1, little_endian, word_length, "testfiles/plist_files/def_gcpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_local_heap_size_hint(gcpl1, 256)) < 0)
         assert(ret > 0);

    if((ret = H5Pset_link_phase_change(gcpl1, 2, 2)) < 0)
         assert(ret > 0);

    /* Query the group creation properties */
    if((ret = H5Pget_link_phase_change(gcpl1, &max_compact, &min_dense)) < 0)
         assert(ret > 0);

    if((ret = H5Pset_est_link_info(gcpl1, 3, 9)) < 0)
         assert(ret > 0);

    if((ret = H5Pset_link_creation_order(gcpl1, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED))) < 0)
         assert(ret > 0);

    if((ret = encode_plist(gcpl1, little_endian, word_length, "testfiles/plist_files/gcpl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(gcpl1)) < 0)
         assert(ret > 0);

    /******* ENCODE/DECODE LCPLS *****/
    if((lcpl1 = H5Pcreate(H5P_LINK_CREATE)) < 0)
        assert(lcpl1 > 0);

    if((ret = encode_plist(lcpl1, little_endian, word_length, "testfiles/plist_files/def_lcpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_create_intermediate_group(lcpl1, 1 /*TRUE*/)) < 0)
        assert(ret > 0);

    if((ret = encode_plist(lcpl1, little_endian, word_length, "testfiles/plist_files/lcpl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(lcpl1)) < 0)
        assert(ret > 0);

    /******* ENCODE/DECODE OCPYLS *****/
    if((ocpypl1 = H5Pcreate(H5P_OBJECT_COPY)) < 0)
        assert(ocpypl1 > 0);

    if((ret = encode_plist(ocpypl1, little_endian, word_length, "testfiles/plist_files/def_ocpypl_")) < 0)
        assert(ret > 0);

    ret = H5Pset_copy_object(ocpypl1, H5O_COPY_EXPAND_EXT_LINK_FLAG);
    assert(ret >= 0);

    ret = H5Padd_merge_committed_dtype_path(ocpypl1, "foo");
    assert(ret >= 0);

    ret = H5Padd_merge_committed_dtype_path(ocpypl1, "bar");
    assert(ret >= 0);

    if((ret = encode_plist(ocpypl1, little_endian, word_length, "testfiles/plist_files/ocpypl_")) < 0)
        assert(ret > 0);
        
    /* release resource */
    if((ret = H5Pclose(ocpypl1)) < 0)
         assert(ret > 0);

    /******* ENCODE/DECODE OCPLS *****/
    if((ocpl1 = H5Pcreate(H5P_OBJECT_CREATE)) < 0)
        assert(ocpl1 > 0);

    if((ret = encode_plist(ocpl1, little_endian, word_length, "testfiles/plist_files/def_ocpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_attr_creation_order(ocpl1, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED))) < 0)
         assert(ret > 0);

    if((ret = H5Pset_attr_phase_change (ocpl1, 110, 105)) < 0)
         assert(ret > 0);

    if((ret = H5Pset_filter (ocpl1, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL)) < 0)
        assert(ret > 0);

    if((ret = encode_plist(ocpl1, little_endian, word_length, "testfiles/plist_files/ocpl_")) < 0)
        assert(ret > 0);

    /* release resource */
    if((ret = H5Pclose(ocpl1)) < 0)
        assert(ret > 0);

    /******* ENCODE/DECODE LAPLS *****/
    if((lapl1 = H5Pcreate(H5P_LINK_ACCESS)) < 0)
        assert(lapl1 > 0);

    if((ret = encode_plist(lapl1, little_endian, word_length, "testfiles/plist_files/def_lapl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_nlinks(lapl1, (size_t)134)) < 0)
        assert(ret > 0);

    if((ret = H5Pset_elink_acc_flags(lapl1, H5F_ACC_RDONLY)) < 0)
        assert(ret > 0);

    if((ret = H5Pset_elink_prefix(lapl1, "/tmpasodiasod")) < 0)
        assert(ret > 0);

    /* Create FAPL for the elink FAPL */
    if((fapl1 = H5Pcreate(H5P_FILE_ACCESS)) < 0)
        assert(fapl1 > 0);
    if((ret = H5Pset_alignment(fapl1, 2, 1024)) < 0)
        assert(ret > 0);

    if((ret = H5Pset_elink_fapl(lapl1, fapl1)) < 0)
        assert(ret > 0);

    /* Close the elink's FAPL */
    if((ret = H5Pclose(fapl1)) < 0)
        assert(ret > 0);

    if((ret = encode_plist(lapl1, little_endian, word_length, "testfiles/plist_files/lapl_")) < 0)
        assert(ret > 0);

    /* release resource */
    if((ret = H5Pclose(lapl1)) < 0)
        assert(ret > 0);

    /******* ENCODE/DECODE FAPLS *****/
    if((fapl1 = H5Pcreate(H5P_FILE_ACCESS)) < 0)
        assert(fapl1 > 0);

    if((ret = encode_plist(fapl1, little_endian, word_length, "testfiles/plist_files/def_fapl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_family_offset(fapl1, 1024)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_meta_block_size(fapl1, 2098452)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_sieve_buf_size(fapl1, 1048576)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_alignment(fapl1, 2, 1024)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_cache(fapl1, 1024, 128, 10485760, 0.3f)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_elink_file_cache_size(fapl1, 10485760)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_gc_references(fapl1, 1)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_small_data_block_size(fapl1, 2048)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_libver_bounds(fapl1, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_fclose_degree(fapl1, H5F_CLOSE_WEAK)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_multi_type(fapl1, H5FD_MEM_GHEAP)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_mdc_config(fapl1, &my_cache_config)) < 0)
        assert(ret > 0);
    if((ret = H5Pset_mdc_image_config(fapl1, &my_cache_image_config)) < 0)
        assert(ret > 0);

    if((ret = H5Pset_core_write_tracking(fapl1, TRUE, (size_t)(1024 * 1024))) < 0)
        assert(ret > 0);

    if((ret = encode_plist(fapl1, little_endian, word_length, "testfiles/plist_files/fapl_")) < 0)
        assert(ret > 0);

    /* release resource */
    if((ret = H5Pclose(fapl1)) < 0)
        assert(ret > 0);

    /******* ENCODE/DECODE FCPLS *****/
    if((fcpl1 = H5Pcreate(H5P_FILE_CREATE)) < 0)
        assert(fcpl1 > 0);

    if((ret = encode_plist(fcpl1, little_endian, word_length, "testfiles/plist_files/def_fcpl_")) < 0)
        assert(ret > 0);

    if((ret = H5Pset_userblock(fcpl1, 1024) < 0))
         assert(ret > 0);

    if((ret = H5Pset_istore_k(fcpl1, 3) < 0))
         assert(ret > 0);

    if((ret = H5Pset_sym_k(fcpl1, 4, 5) < 0))
         assert(ret > 0);

    if((ret = H5Pset_shared_mesg_nindexes(fcpl1, 8) < 0))
         assert(ret > 0);

    if((ret = H5Pset_shared_mesg_index(fcpl1, 1,  H5O_SHMESG_SDSPACE_FLAG, 32) < 0))
         assert(ret > 0);

   if((ret = H5Pset_shared_mesg_phase_change(fcpl1, 60, 20) < 0))
         assert(ret > 0);

    if((ret = H5Pset_sizes(fcpl1, 8, 4) < 0))
         assert(ret > 0);

    if((ret = H5Pset_file_space_strategy(fcpl1, H5F_FSPACE_STRATEGY_PAGE, TRUE, (hsize_t)1)) < 0)
         assert(ret > 0);

    if((ret = H5Pset_file_space_page_size(fcpl1, (hsize_t)4096)) < 0)
         assert(ret > 0);

    if((ret = encode_plist(fcpl1, little_endian, word_length, "testfiles/plist_files/fcpl_")) < 0)
        assert(ret > 0);

    /* release resource */
    if((ret = H5Pclose(fcpl1)) < 0)
        assert(ret > 0);

    /******* ENCODE/DECODE STRCPLS *****/
    strcpl1 = H5Pcreate(H5P_STRING_CREATE);
    assert(strcpl1 > 0);

    ret = encode_plist(strcpl1, little_endian, word_length, "testfiles/plist_files/def_strcpl_");
    assert(ret > 0);

    ret = H5Pset_char_encoding(strcpl1, H5T_CSET_UTF8);
    assert(ret >= 0);

    ret = encode_plist(strcpl1, little_endian, word_length, "testfiles/plist_files/strcpl_");
    assert(ret > 0);

    /* release resource */
    ret = H5Pclose(strcpl1);
    assert(ret >= 0);

    /******* ENCODE/DECODE ACPLS *****/
    acpl1 = H5Pcreate(H5P_ATTRIBUTE_CREATE);
    assert(acpl1 > 0);

    ret = encode_plist(acpl1, little_endian, word_length, "testfiles/plist_files/def_acpl_");
    assert(ret > 0);

    ret = H5Pset_char_encoding(acpl1, H5T_CSET_UTF8);
    assert(ret >= 0);

    ret = encode_plist(acpl1, little_endian, word_length, "testfiles/plist_files/acpl_");
    assert(ret > 0);

    /* release resource */
    ret = H5Pclose(acpl1);
    assert(ret >= 0);

    return 0;
}
예제 #12
0
/*-------------------------------------------------------------------------
 * Function:  test_dataset_write_with_filters
 *
 * Purpose:   Tests creating datasets and writing data with dynamically loaded filters
 *
 * Return:    SUCCEED/FAIL
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_dataset_write_with_filters(hid_t fid)
{
    hid_t           dcpl_id = -1;           /* Dataset creation property list ID        */
    unsigned int    compress_level;         /* Deflate compression level                */
    unsigned int    filter1_data;           /* Data used by filter 1                    */
    unsigned int    libver_values[4];       /* Used w/ the filter that makes HDF5 calls */

    /*----------------------------------------------------------
     * STEP 1: Test deflation by itself.
     *----------------------------------------------------------
     */
    HDputs("Testing dataset writes with deflate filter");
#ifdef H5_HAVE_FILTER_DEFLATE
    if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR;
    if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0)
        TEST_ERROR;
    compress_level = 6;
    if (H5Pset_deflate(dcpl_id, compress_level) < 0)
        TEST_ERROR;

    /* Ensure the filter works */
    if (ensure_filter_works(fid, DSET_DEFLATE_NAME, dcpl_id) < 0)
        TEST_ERROR;

    /* Clean up objects used for this test */
    if (H5Pclose(dcpl_id) < 0)
        TEST_ERROR;
#else /* H5_HAVE_FILTER_DEFLATE */
    SKIPPED();
    HDputs("    Deflate filter not enabled");
#endif /* H5_HAVE_FILTER_DEFLATE */

    /*----------------------------------------------------------
     * STEP 2: Test filter plugin 1 by itself.
     *----------------------------------------------------------
     */
    HDputs("    dataset writes with filter plugin 1");
    if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR;
    if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0)
        TEST_ERROR;

    /* Set up the filter, passing in the amount the filter will add and subtract
     * from each data element. Note that this value has an arbitrary max of 9.
     */
    filter1_data = 9;
    if (H5Pset_filter(dcpl_id, FILTER1_ID, H5Z_FLAG_MANDATORY, (size_t)1, &filter1_data) < 0)
        TEST_ERROR;

    /* Ensure the filter works */
    if (ensure_filter_works(fid, DSET_FILTER1_NAME, dcpl_id) < 0)
        TEST_ERROR;

    /* Clean up objects used for this test */
    if (H5Pclose(dcpl_id) < 0)
        TEST_ERROR;

    /* Unregister the dynamic filter for testing purpose. The next time when this test is run for
     * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries
     * for this filter.
     */
    if (H5Zunregister(FILTER1_ID) < 0)
        TEST_ERROR;

    /*----------------------------------------------------------
     * STEP 3: Test filter plugin 2 by itself.
     *----------------------------------------------------------
     */
    HDputs("    dataset writes with filter plugin 2");
    if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR;
    if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0)
        TEST_ERROR;
    if (H5Pset_filter(dcpl_id, FILTER2_ID, H5Z_FLAG_MANDATORY, 0, NULL) < 0)
        TEST_ERROR;

    /* Ensure the filter works */
    if (ensure_filter_works(fid, DSET_FILTER2_NAME, dcpl_id) < 0)
        TEST_ERROR;

    /* Clean up objects used for this test */
    if (H5Pclose(dcpl_id) < 0)
        TEST_ERROR;

    /* Unregister the dynamic filter for testing purpose. The next time when this test is run for
     * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries
     * for this filter.
     */
    if (H5Zunregister(FILTER2_ID) < 0)
        TEST_ERROR;

    /*----------------------------------------------------------
     * STEP 4: Test filter plugin 3 by itself.
     *         (This filter plugin makes HDF5 API calls)
     *----------------------------------------------------------
     */
    HDputs("    dataset writes with filter plugin 3");
    if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        TEST_ERROR;
    if (H5Pset_chunk(dcpl_id, 2, chunk_sizes_g) < 0)
        TEST_ERROR;

    /* Set the add/subtract value for the filter */
    libver_values[0] = 9;

    /* Get the library bounds and add to the filter data */
    if (H5get_libversion(&libver_values[1], &libver_values[2], &libver_values[3]) < 0)
        TEST_ERROR;
    if (H5Pset_filter(dcpl_id, FILTER3_ID, H5Z_FLAG_MANDATORY, (size_t)4, libver_values) < 0)
        TEST_ERROR;

    /* Ensure the filter works */
    if (ensure_filter_works(fid, DSET_FILTER3_NAME, dcpl_id) < 0)
        TEST_ERROR;

    /* Clean up objects used for this test */
    if (H5Pclose(dcpl_id) < 0)
        TEST_ERROR;

    /* Unregister the dynamic filter for testing purpose. The next time when this test is run for
     * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries
     * for this filter.
     */
    if (H5Zunregister(FILTER3_ID) < 0)
        TEST_ERROR;

    return SUCCEED;

error:
    /* Clean up objects used for this test */
    H5E_BEGIN_TRY {
        H5Pclose(dcpl_id);
    } H5E_END_TRY

    return FAIL;
} /* end test_dataset_write_with_filters() */
예제 #13
0
파일: H5ARRAY.c 프로젝트: 87/PyTables
herr_t H5ARRAYmake( hid_t loc_id,
		    const char *dset_name,
		    const char *obversion,
		    const int rank,
		    const hsize_t *dims,
		    int   extdim,
		    hid_t type_id,
		    hsize_t *dims_chunk,
		    void  *fill_data,
		    int   compress,
		    char  *complib,
		    int   shuffle,
		    int   fletcher32,
		    const void *data)
{

 hid_t   dataset_id, space_id;
 hsize_t *maxdims = NULL;
 hid_t   plist_id = 0;
 unsigned int cd_values[6];
 int     chunked = 0;
 int     i;

 /* Check whether the array has to be chunked or not */
 if (dims_chunk) {
   chunked = 1;
 }

 if(chunked) {
   maxdims = malloc(rank*sizeof(hsize_t));
   if(!maxdims) return -1;

   for(i=0;i<rank;i++) {
     if (i == extdim) {
       maxdims[i] = H5S_UNLIMITED;
     }
     else {
       maxdims[i] = dims[i] < dims_chunk[i] ? dims_chunk[i] : dims[i];
     }
   }
 }

 /* Create the data space for the dataset. */
 if ( (space_id = H5Screate_simple( rank, dims, maxdims )) < 0 )
   return -1;

 if (chunked) {
   /* Modify dataset creation properties, i.e. enable chunking  */
   plist_id = H5Pcreate (H5P_DATASET_CREATE);
   if ( H5Pset_chunk ( plist_id, rank, dims_chunk ) < 0 )
     return -1;

   /* Set the fill value using a struct as the data type. */
   if (fill_data) {
       if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
	 return -1;
   }
   else {
     if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
       return -1;
   }

   /*
      Dataset creation property list is modified to use
   */

   /* Fletcher must be first */
   if (fletcher32) {
     if ( H5Pset_fletcher32( plist_id) < 0 )
       return -1;
   }
   /* Then shuffle (not if blosc is activated) */
   if ((shuffle) && (strcmp(complib, "blosc") != 0)) {
     if ( H5Pset_shuffle( plist_id) < 0 )
       return -1;
   }
   /* Finally compression */
   if (compress) {
     cd_values[0] = compress;
     cd_values[1] = (int)(atof(obversion) * 10);
     if (extdim <0)
       cd_values[2] = CArray;
     else
       cd_values[2] = EArray;

     /* The default compressor in HDF5 (zlib) */
     if (strcmp(complib, "zlib") == 0) {
       if ( H5Pset_deflate( plist_id, compress) < 0 )
	 return -1;
     }
     /* The Blosc compressor does accept parameters */
     else if (strcmp(complib, "blosc") == 0) {
       cd_values[4] = compress;
       cd_values[5] = shuffle;
       if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
	 return -1;
     }
     /* The LZO compressor does accept parameters */
     else if (strcmp(complib, "lzo") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     /* The bzip2 compress does accept parameters */
     else if (strcmp(complib, "bzip2") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     else {
       /* Compression library not supported */
       fprintf(stderr, "Compression library not supported\n");
       return -1;
     }
   }

   /* Create the (chunked) dataset */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, plist_id )) < 0 )
     goto out;
 }
 else {  			/* Not chunked case */
   /* Create the dataset. */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, H5P_DEFAULT )) < 0 )
     goto out;
 }

 /* Write the dataset only if there is data to write */

 if (data)
 {
   if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
   goto out;
 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  return -1;

 /* End access to the property list */
 if (plist_id)
   if ( H5Pclose( plist_id ) < 0 )
     goto out;

 /* Release resources */
 if (maxdims)
   free(maxdims);

 return dataset_id;

out:
 H5Dclose( dataset_id );
 H5Sclose( space_id );
 if (maxdims)
   free(maxdims);
 if (dims_chunk)
   free(dims_chunk);
 return -1;

}
예제 #14
0
파일: plugin.c 프로젝트: schwehr/hdf5
/*-------------------------------------------------------------------------
 * Function:	test_filters_for_datasets
 *
 * Purpose:	Tests creating datasets and writing data with dynamically
 *              loaded filters
 *
 * Return:	Success:	0
 *		Failure:	-1
 *
 * Programmer:	Raymond Lu
 *              14 March 2013
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_filters_for_datasets(hid_t file)
{
    hid_t	dc;                 /* Dataset creation property list ID */
    const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2};  /* Chunk dimensions */
    unsigned int         compress_level = 9;

    /*----------------------------------------------------------
     * STEP 1: Test deflation by itself.
     *----------------------------------------------------------
     */
#ifdef H5_HAVE_FILTER_DEFLATE
    puts("Testing deflate filter");
    if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
    if(H5Pset_deflate (dc, 6) < 0) goto error;

    if(test_filter_internal(file,DSET_DEFLATE_NAME,dc) < 0) goto error;
    /* Clean up objects used for this test */
    if(H5Pclose (dc) < 0) goto error;
#else /* H5_HAVE_FILTER_DEFLATE */
    TESTING("deflate filter");
    SKIPPED();
    puts("    Deflate filter not enabled");
#endif /* H5_HAVE_FILTER_DEFLATE */

    /*----------------------------------------------------------
     * STEP 2: Test DYNLIB1 by itself.
     *----------------------------------------------------------
     */
    puts("Testing DYNLIB1 filter");
    if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
    if(H5Pset_filter (dc, H5Z_FILTER_DYNLIB1, H5Z_FLAG_MANDATORY, (size_t)1, &compress_level) < 0) goto error;

    if(test_filter_internal(file,DSET_DYNLIB1_NAME,dc) < 0) goto error;

    /* Clean up objects used for this test */
    if(H5Pclose (dc) < 0) goto error;

    /* Unregister the dynamic filter DYNLIB1 for testing purpose. The next time when this test is run for 
     * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries
     * for this filter. */
    if(H5Zunregister(H5Z_FILTER_DYNLIB1) < 0) goto error;

    /*----------------------------------------------------------
     * STEP 3: Test DYNLIB2 by itself.
     *----------------------------------------------------------
     */
    puts("Testing DYNLIB2 filter");
    if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
    if(H5Pset_filter (dc, H5Z_FILTER_DYNLIB2, H5Z_FLAG_MANDATORY, 0, NULL) < 0) goto error;

    if(test_filter_internal(file,DSET_DYNLIB2_NAME,dc) < 0) goto error;

    /* Clean up objects used for this test */
    if(H5Pclose (dc) < 0) goto error;

    /* Unregister the dynamic filter DYNLIB2 for testing purpose. The next time when this test is run for 
     * the new file format, the library's H5PL code has to search in the table of loaded plugin libraries
     * for this filter. */
    if(H5Zunregister(H5Z_FILTER_DYNLIB2) < 0) goto error;

    return 0;

error:
    return -1;
}
예제 #15
0
/*-------------------------------------------------------------------------
 * Function:    test_filter_write_failure
 *
 * Purpose:     Tests the library's behavior when a mandate filter returns 
 *              failure.  There're only 5 chunks with each of them having
 *              2 integers.  The filter will fail in the last chunk.  The 
 *              dataset should release all resources even though the last 
 *              chunk can't be flushed to file.  The file should close
 *              successfully.
 *
 * Return:  
 *              Success:         0
 *              Failure:         -1
 *
 * Programmer:  Raymond Lu
 *              25 August 2010
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_filter_write(char *file_name, hid_t my_fapl)
{
    char         filename[1024];
    hid_t        file = -1;
    hid_t        dataset=-1;                /* dataset ID */
    hid_t        sid=-1;                   /* dataspace ID */
    hid_t        dcpl=-1;                  /* dataset creation property list ID */
    hsize_t      dims[1]={DIM};           /* dataspace dimension - 10*/
    hsize_t      chunk_dims[1]={FILTER_CHUNK_DIM}; /* chunk dimension - 2*/
    int          nfilters;              /* number of filters in DCPL */
    unsigned     flags;                 /* flags for filter */
    int          points[DIM];          /* Data */
    int          rbuf[DIM];          /* Data */
    herr_t       ret;                   /* generic return value */
    int          i;

    TESTING("data writing when a mandatory filter fails");

    /* Create file */
    if((file = H5Fcreate(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) TEST_ERROR

    /* create the data space */
    if((sid = H5Screate_simple(1, dims, NULL)) < 0) TEST_ERROR

    /* Create dcpl and register the filter */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR

    if(H5Pset_chunk(dcpl, 1, chunk_dims) < 0) TEST_ERROR

    if(H5Zregister (H5Z_FAIL_TEST) < 0) TEST_ERROR

    /* Check that the filter was registered */
    if(TRUE != H5Zfilter_avail(H5Z_FILTER_FAIL_TEST)) FAIL_STACK_ERROR

    /* Enable the filter as mandatory */
    if(H5Pset_filter(dcpl, H5Z_FILTER_FAIL_TEST, 0, (size_t)0, NULL) < 0)
        TEST_ERROR

    /* create a dataset */
    if((dataset = H5Dcreate2(file, DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR 

    /* Initialize the write buffer */
    for(i = 0; i < DIM; i++)
        points[i] = i;

    /* Write data */
    if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, points) < 0) TEST_ERROR

    /* clean up objects used for this test */
    if(H5Pclose (dcpl) < 0) TEST_ERROR
    if(H5Sclose (sid) < 0) TEST_ERROR

    /* Dataset closing should fail */
    H5E_BEGIN_TRY {
        ret = H5Dclose (dataset);
    } H5E_END_TRY;
    if(ret >= 0) {
	H5_FAILED();
	puts("    Dataset is supposed to fail because the chunk can't be flushed to file.");
	TEST_ERROR
    }
예제 #16
0
herr_t H5TBOmake_table( const char *table_title,
                        hid_t loc_id,
                        const char *dset_name,
                        char *version,
                        const char *class_,
                        hid_t type_id,
                        hsize_t nrecords,
                        hsize_t chunk_size,
                        void  *fill_data,
                        int compress,
                        char *complib,
                        int shuffle,
                        int fletcher32,
                        const void *data )
{

 hid_t   dataset_id;
 hid_t   space_id;
 hid_t   plist_id;
 hsize_t dims[1];
 hsize_t dims_chunk[1];
 hsize_t maxdims[1] = { H5S_UNLIMITED };
 unsigned int cd_values[7];
 int     blosc_compcode;
 char    *blosc_compname = NULL;

 dims[0]       = nrecords;
 dims_chunk[0] = chunk_size;

 /* Create a simple data space with unlimited size */
 if ( (space_id = H5Screate_simple( 1, dims, maxdims )) < 0 )
  return -1;

 /* Modify dataset creation properties, i.e. enable chunking  */
 plist_id = H5Pcreate (H5P_DATASET_CREATE);
 if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
  return -1;

 /* Set the fill value using a struct as the data type. */
 if ( fill_data)
   {
     if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
       return -1;
   }
 else {
   if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
     return -1;
 }

 /*
  Dataset creation property list is modified to use filters
  */

 /* Fletcher must be first */
 if (fletcher32) {
   if ( H5Pset_fletcher32( plist_id) < 0 )
     return -1;
 }
 /* Then shuffle (blosc shuffles inplace) */
 if ((shuffle && compress) && (strncmp(complib, "blosc", 5) != 0)) {
   if ( H5Pset_shuffle( plist_id) < 0 )
     return -1;
 }
 /* Finally compression */
 if ( compress )
 {
   cd_values[0] = compress;
   cd_values[1] = (int)(atof(version) * 10);
   cd_values[2] = Table;
   /* The default compressor in HDF5 (zlib) */
   if (strcmp(complib, "zlib") == 0) {
     if ( H5Pset_deflate( plist_id, compress) < 0 )
       return -1;
   }
   /* The Blosc compressor does accept parameters */
   else if (strcmp(complib, "blosc") == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
       return -1;
   }
   /* The Blosc compressor can use other compressors */
   else if (strncmp(complib, "blosc:", 6) == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     blosc_compname = complib + 6;
     blosc_compcode = blosc_compname_to_compcode(blosc_compname);
     cd_values[6] = blosc_compcode;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 7, cd_values) < 0 )
       return -1;
   }
   /* The LZO compressor does accept parameters */
   else if (strcmp(complib, "lzo") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   /* The bzip2 compress does accept parameters */
   else if (strcmp(complib, "bzip2") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   else {
     /* Compression library not supported */
     return -1;
   }

 }

 /* Create the dataset. */
 if ( (dataset_id = H5Dcreate( loc_id, dset_name, type_id, space_id,
                               H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
  goto out;

 /* Only write if there is something to write */
 if ( data )
 {

 /* Write data to the dataset. */
 if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
  goto out;

 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  goto out;

 /* End access to the property list */
 if ( H5Pclose( plist_id ) < 0 )
  goto out;

 /* Return the object unique ID for future references */
 return dataset_id;

/* error zone, gracefully close */
out:
 H5E_BEGIN_TRY {
  H5Dclose(dataset_id);
  H5Sclose(space_id);
  H5Pclose(plist_id);
 } H5E_END_TRY;
 return -1;

}
예제 #17
0
파일: H5VLARRAY.c 프로젝트: r0k3/PyTables
herr_t H5VLARRAYmake( hid_t loc_id,
                      const char *dset_name,
                      const char *obversion,
                      const int rank,
                      const hsize_t *dims,
                      hid_t type_id,
                      hsize_t chunk_size,
                      void  *fill_data,
                      int   compress,
                      char  *complib,
                      int   shuffle,
                      int   fletcher32,
                      const void *data)
{

    hvl_t   vldata;
    hid_t   dataset_id, space_id, datatype, tid1;
    hsize_t dataset_dims[1];
    hsize_t maxdims[1] = { H5S_UNLIMITED };
    hsize_t dims_chunk[1];
    hid_t   plist_id;
    unsigned int cd_values[6];

    if (data)
        /* if data, one row will be filled initially */
        dataset_dims[0] = 1;
    else
        /* no data, so no rows on dataset initally */
        dataset_dims[0] = 0;

    dims_chunk[0] = chunk_size;

    /* Fill the vldata estructure with the data to write */
    /* This is currectly not used */
    vldata.p = (void *)data;
    vldata.len = 1;        /* Only one array type to save */

    /* Create a VL datatype */
    if (rank == 0) {
        datatype = H5Tvlen_create(type_id);
    }
    else {
        tid1 = H5Tarray_create(type_id, rank, dims);
        datatype = H5Tvlen_create(tid1);
        H5Tclose( tid1 );   /* Release resources */
    }

    /* The dataspace */
    space_id = H5Screate_simple( 1, dataset_dims, maxdims );

    /* Modify dataset creation properties, i.e. enable chunking  */
    plist_id = H5Pcreate (H5P_DATASET_CREATE);
    if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
        return -1;

    /*
       Dataset creation property list is modified to use
    */

    /* Fletcher must be first */
    if (fletcher32) {
        if ( H5Pset_fletcher32( plist_id) < 0 )
            return -1;
    }
    /* Then shuffle (blosc shuffles inplace) */
    if (shuffle && (strcmp(complib, "blosc") != 0)) {
        if ( H5Pset_shuffle( plist_id) < 0 )
            return -1;
    }
    /* Finally compression */
    if (compress) {
        cd_values[0] = compress;
        cd_values[1] = (int)(atof(obversion) * 10);
        cd_values[2] = VLArray;
        /* The default compressor in HDF5 (zlib) */
        if (strcmp(complib, "zlib") == 0) {
            if ( H5Pset_deflate( plist_id, compress) < 0 )
                return -1;
        }
        /* The Blosc compressor does accept parameters */
        else if (strcmp(complib, "blosc") == 0) {
            cd_values[4] = compress;
            cd_values[5] = shuffle;
            if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
                return -1;
        }
        /* The LZO compressor does accept parameters */
        else if (strcmp(complib, "lzo") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        /* The bzip2 compress does accept parameters */
        else if (strcmp(complib, "bzip2") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        else {
            /* Compression library not supported */
            fprintf(stderr, "Compression library not supported\n");
            return -1;
        }
    }

    /* Create the dataset. */
    if ((dataset_id = H5Dcreate(loc_id, dset_name, datatype, space_id,
                                H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
        goto out;

    /* Write the dataset only if there is data to write */
    if (data)
        if ( H5Dwrite( dataset_id, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, &vldata ) < 0 )
            goto out;

    /* Terminate access to the data space. */
    if ( H5Sclose( space_id ) < 0 )
        return -1;

    /* Release the datatype in the case that it is not an atomic type */
    if ( H5Tclose( datatype ) < 0 )
        return -1;

    /* End access to the property list */
    if ( H5Pclose( plist_id ) < 0 )
        goto out;

    return dataset_id;

out:

    return -1;

}