示例#1
0
文件: dangle.c 项目: Starlink/hdf5
/*-------------------------------------------------------------------------
 * Function:    test_dangle_dataset
 *
 * Purpose:    Check for dangling dataset IDs causing problems on library
 *              shutdown
 *
 * Return:    Success:    zero
 *        Failure:    non-zero
 *
 * Programmer:    Quincey Koziol
 *              Tuesday, May 13, 2003
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static int
test_dangle_dataset(H5F_close_degree_t degree)
{
    char    filename[1024];
    hid_t fid;  /* File ID */
    hid_t fapl; /* File access property list */
    hid_t dsid; /* Dataset ID */
    hid_t sid;  /* Dataspace ID */
    unsigned u; /* Local index variable */

    TESTING("    dangling dataset IDs");

    if(H5open() < 0)
        TEST_ERROR;

    /* Create file access property list */
    if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
        TEST_ERROR;

    /* Set file close degree */
    if(H5Pset_fclose_degree(fapl, degree) < 0)
        TEST_ERROR;

    h5_fixname(FILENAME[0], H5P_DEFAULT, filename, sizeof filename);
    if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
        TEST_ERROR;

    if((sid = H5Screate(H5S_SCALAR)) < 0)
        TEST_ERROR;

    if((dsid = H5Dcreate2(fid, DSETNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        TEST_ERROR;

    if(H5Dclose(dsid) < 0)
        TEST_ERROR;

    /* Try creating duplicate dataset */
    H5E_BEGIN_TRY {
        if((dsid = H5Dcreate2(fid, DSETNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) >= 0)
            TEST_ERROR;
    } H5E_END_TRY;

    if(H5Sclose(sid) < 0)
        TEST_ERROR;

    /* Leave open a _lot_ of objects */
    for(u = 0; u < MAX_DANGLE; u++)
        if((dsid = H5Dopen2(fid, DSETNAME, H5P_DEFAULT)) < 0)
            TEST_ERROR;

    if(degree == H5F_CLOSE_SEMI) {
        H5E_BEGIN_TRY {
            if(H5Fclose(fid) >= 0)
                TEST_ERROR;
        } H5E_END_TRY;
    } /* end if */
示例#2
0
int
main(void)
{
    hid_t   file=-1, dcpl=-1, space=-1, dset1=-1, dset2=-1;
    hsize_t cur_size[2]= {8, 8};
    H5D_space_status_t  allocation;
    int     fill_val1 = 4444, fill_val2=5555;

    if((file=H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) <0) goto error;
    if((space=H5Screate_simple(2, cur_size, cur_size)) < 0) goto error;
    if((dcpl=H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;

    /* Create a dataset with space being allocated and fill value written */
    if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) goto error;
    if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) goto error;
    if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fill_val1) < 0) goto error;
    if((dset1 = H5Dcreate2(file, "dset1", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Dget_space_status(dset1, &allocation) < 0) goto error;
    if(allocation == H5D_SPACE_STATUS_NOT_ALLOCATED) {
        puts("    Got unallocated space instead of allocated.");
        printf("    Got %d\n", allocation);
        goto error;
    }
    if(H5Dclose(dset1) < 0) goto error;

    /* Create a dataset with space allocation being delayed */
    if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE) < 0) goto error;
    if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) goto error;
    if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fill_val2) < 0) goto error;
    if((dset2 = H5Dcreate2(file, "dset2", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Dget_space_status(dset2, &allocation) < 0) goto error;
    if(allocation != H5D_SPACE_STATUS_NOT_ALLOCATED) {
        puts("    Got allocated space instead of unallocated.");
        printf("    Got %d\n", allocation);
        goto error;
    }
    if(H5Dclose(dset2) < 0) goto error;

    if(H5Sclose(space) < 0) goto error;
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Fclose(file) < 0) goto error;

    return 0;

error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Sclose(space);
        H5Dclose(dset1);
        H5Dclose(dset2);
        H5Fclose(file);
    } H5E_END_TRY;
    return 1;
}
示例#3
0
int saveHdf5(state_type &OUT, state_type &TIME){
    hid_t   hdf_file,hdf_group,hdf_data,dataspace_id;
    herr_t  status;

    fprintf(stdout,"Writing file %s ...",allparams.outfilename);
    hdf_file = H5Fcreate(allparams.outfilename,H5F_ACC_TRUNC,H5P_DEFAULT,H5P_DEFAULT);
    if (hdf_file < 0){
        return -1;
    }

    /*
    if ( (hdf_group=H5Gopen2(hdf_file,"/",H5P_DEFAULT)) < 0){
        H5Fclose(hdf_file);
        return -1;
    }*/
    
    /* Write particle positions and velocities.
     * Ordered in chunk where first Nstep+1 lines correspond to particle 1,
     * step Nstep+1 chunk correspond to particle 2 etc. */
    std::cout << "Writing positions and velocities\n";
    hsize_t dims[1]={OUT.size()};
    dataspace_id=H5Screate_simple(1,dims,NULL);
    if ( (hdf_data=H5Dcreate2(hdf_file,"x",H5T_NATIVE_DOUBLE,dataspace_id,H5P_DEFAULT,H5P_DEFAULT,H5P_DEFAULT)) < 0){
        H5Dclose(hdf_data);
        return -1;
    }
    status=H5Dwrite(hdf_data, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &OUT[0]);

    /*Write times*/
    std::cout << "Writing times\n";
    hsize_t dims2[1]={TIME.size()};
    dataspace_id=H5Screate_simple(1,dims2,NULL);
    if ( (hdf_data=H5Dcreate2(hdf_file,"t",H5T_NATIVE_DOUBLE,dataspace_id,H5P_DEFAULT,H5P_DEFAULT,H5P_DEFAULT)) < 0){
        H5Dclose(hdf_data);
        return -1;
    }
    status=H5Dwrite(hdf_data, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &TIME[0]);

    /*Write no. of components*/
    dims2[0]={1};
    dataspace_id=H5Screate_simple(1,dims2,NULL);
    if ( (hdf_data=H5Dcreate2(hdf_file,"NumComponents",H5T_NATIVE_INT,dataspace_id,H5P_DEFAULT,H5P_DEFAULT,H5P_DEFAULT)) < 0){
        H5Dclose(hdf_data);
        return -1;
    }
    status=H5Dwrite(hdf_data, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &allparams.NumComponents);


    H5Fclose(hdf_file);
    H5Dclose(hdf_data);
    H5Sclose(dataspace_id);
    fprintf(stdout," file written successfully!\n");
    return 0;
}
示例#4
0
文件: dataset.cpp 项目: gcross/HDF
void Dataset::createAndInitialize(
    Location const& location
  , Datatype const& datatype
  , Dataspace const& dataspace
  , optional<void const*> const& optional_data
  , optional<DatasetCreationProperties const&> const& optional_creation_properties
  , optional<DatasetAccessProperties const&> const& optional_access_properties
  , optional<LinkCreationProperties const&> const& optional_link_creation_properties
) { // {{{
    identity =
        make_shared<Identity>(
            assertSuccess(
                "creating dataset",
                H5Dcreate2(
                    location.getParentId(),
                    location.getNameAsCStr(),
                    datatype.getDatatypeId(),
                    dataspace.getId(),
                    getOptionalPropertiesId(optional_link_creation_properties),
                    getOptionalPropertiesId(optional_creation_properties),
                    getOptionalPropertiesId(optional_access_properties)
                )
            ),
            H5Dclose
        );
    if(optional_data) write(*optional_data,datatype);
} // }}}
示例#5
0
int H5mdfile::H5_Dcreate2(int argc, char **argv, Tcl_Interp *interp)
{
  /* Create the dataset */
    dataset_id = H5Dcreate2(file_id, argv[2], dataset_type_id, dataspace_simple_id, H5P_DEFAULT, prop_id, H5P_DEFAULT);
    dataspace_id = H5Dget_space (dataset_id);
    return TCL_OK;
}
示例#6
0
/*-------------------------------------------------------------------------
 * Function:    gent_compressed
 *
 * Purpose:     Generate a compressed dataset in LOC_ID
 *
 *-------------------------------------------------------------------------
 */
static void gent_compressed(hid_t loc_id)
{
    hid_t   sid, did, pid;
    hsize_t dims[1] = {6};
    hsize_t chunk_dims[1] = {2};
    int     buf[6]  = {1,2,3,4,5,6};

    /* create dataspace */
    sid = H5Screate_simple(1, dims, NULL);

    /* create property plist for chunk*/
    pid = H5Pcreate(H5P_DATASET_CREATE);
    H5Pset_chunk(pid, 1, chunk_dims);

    /* set the deflate filter */
#if defined (H5_HAVE_FILTER_DEFLATE)
    H5Pset_deflate(pid, 1);
#endif

    /* create dataset */
    did = H5Dcreate2(loc_id, DATASET_COMPRESSED, H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT);

    /* write */
    H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);

    /* close */
    H5Sclose(sid);
    H5Dclose(did);
    H5Pclose(pid);
}
示例#7
0
文件: istore.c 项目: ElaraFX/hdf5
/*-------------------------------------------------------------------------
 * Function:	new_object
 *
 * Purpose:	Creates a new object that refers to a indexed storage of raw
 *		data.  No raw data is stored.
 *
 * Return:	Success:	ID of dataset
 *
 *		Failure:	-1
 *
 * Programmer:	Robb Matzke
 *		Wednesday, October 15, 1997
 *
 * Modifications:
 *              Converted to use datasets instead of directly messing with
 *              the istore routines, etc. since the new raw data architecture
 *              performs hyperslab operations at a higher level than the
 *              istore routines did and the new istore routines can't handle
 *              I/O on more than one chunk at a time. QAK - 2003/04/16
 *
 *-------------------------------------------------------------------------
 */
static hid_t
new_object(hid_t f, const char *name, int ndims, hsize_t dims[], hsize_t cdims[])
{
    hid_t dataset;      /* Dataset ID */
    hid_t space;        /* Dataspace ID */
    hid_t dcpl;         /* Dataset creation property list ID */

    /* Create the dataset creation property list */
    if ((dcpl=H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;

    /* Set the chunk dimensions */
    if(H5Pset_chunk(dcpl, ndims, cdims) < 0) TEST_ERROR;

    /* Create the dataspace */
    if((space = H5Screate_simple(ndims, dims, NULL)) < 0) TEST_ERROR;

    /* Create the dataset */
    if((dataset = H5Dcreate2(f, name, TEST_DATATYPE, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Clean up */

    /* Close property lists */
    if(H5Pclose(dcpl) < 0) TEST_ERROR;

    /* Close dataspace */
    if(H5Sclose(space) < 0) TEST_ERROR;

    return dataset;

error:
    return -1;
}
示例#8
0
文件: perf_meta.c 项目: Starlink/hdf5
/*-------------------------------------------------------------------------
 * Function:	create_dsets
 *
 * Purpose:	Attempts to create some datasets.
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Raymond Lu
 *		Friday, Oct 3, 2003
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static herr_t
create_dsets(hid_t file)
{
    hid_t	dataset;
    char	dset_name[32];
    int		i;

    /*
     * Create a dataset using the default dataset creation properties.
     */
    for(i = 0; i < NUM_DSETS; i++) {
	sprintf(dset_name, "dataset %d", i);
    	if((dataset = H5Dcreate2(file, dset_name, H5T_NATIVE_DOUBLE, space,
                H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;

    	if(H5Dclose(dataset) < 0)
            goto error;
    } /* end for */

    return 0;

error:
    return -1;

}
示例#9
0
void *tts_dcreate_creator(void *_thread_data)
{
    hid_t   dataspace, dataset;
    herr_t  ret;
    hsize_t dimsf[1]; /* dataset dimensions */
    struct thread_info thread_data;

    memcpy(&thread_data, _thread_data, sizeof(struct thread_info));

    /* define dataspace for dataset */
    dimsf[0] = 1;
    dataspace = H5Screate_simple(1, dimsf, NULL);
    assert(dataspace >= 0);

    /* create a new dataset within the file */
    dataset = H5Dcreate2(thread_data.file, thread_data.dsetname,
                        H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    assert(dataset >= 0);

    /* initialize data for dataset and write value to dataset */
    ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
             H5P_DEFAULT, &thread_data.id);
    assert(ret >= 0);

    /* close dataset and dataspace resources */
    ret = H5Dclose(dataset);
    assert(ret >= 0);
    ret = H5Sclose(dataspace);
    assert(ret >= 0);

    return NULL;
}
示例#10
0
文件: H5Df.c 项目: ElaraFX/hdf5
/****if* H5Df/h5dcreate_c
 * NAME
 *  h5dcreate_c
 * PURPOSE
 *  Call H5Dcreate2 to create a dataset
 * INPUTS
 *  loc_id   - file or group identifier
 *  name     - name of the dataset
 *  namelen  - name length
 *  type_id  - datatype identifier
 *  space_id - dataspace identifier
 *  crt_pr   - identifier of creation property list
 * OUTPUTS
 *  dset_id  - dataset identifier
 * RETURNS
 *  0 on success, -1 on failure
 * AUTHOR
 *  Elena Pourmal
 *  Wednesday, August 4, 1999
 * HISTORY
 *   - Added optional parameters introduced in version 1.8
 *     February, 2008
 * SOURCE
*/
int_f
h5dcreate_c (hid_t_f *loc_id, _fcd name, int_f *namelen, hid_t_f *type_id, hid_t_f *space_id,
	      hid_t_f *lcpl_id, hid_t_f *dcpl_id, hid_t_f *dapl_id, hid_t_f *dset_id)
/******/
{
     char *c_name = NULL;
     hid_t c_dset_id;
     int ret_value = -1;

     /*
      * Convert FORTRAN name to C name
      */
     if(NULL == ( c_name = (char *)HD5f2cstring(name, (size_t)*namelen)))
         goto DONE;

     /*
      * Call H5Dcreate2 function.
      */
     if((c_dset_id = H5Dcreate2((hid_t)*loc_id, c_name, (hid_t)*type_id, (hid_t)*space_id,
				(hid_t)*lcpl_id, (hid_t)*dcpl_id, (hid_t)*dapl_id)) < 0)
         goto DONE;
     *dset_id = (hid_t_f)c_dset_id;

     ret_value = 0;

DONE:
    if(c_name)
        HDfree(c_name);
    return ret_value;
}
示例#11
0
/*-------------------------------------------------------------------------
 * Function:	test_1c
 *
 * Purpose:	Test a single external file which is large enough to
 *		represent the current data and large enough to represent the
 *		eventual size of the data.
 *
 * Return:	Success:	0
 *
 *		Failure:	number of errors
 *
 * Programmer:	Robb Matzke
 *              Monday, November 23, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static int
test_1c(hid_t file)
{
    hid_t	dcpl=-1;		/*dataset creation properties	*/
    hid_t	space=-1;		/*data space			*/
    hid_t	dset=-1;		/*dataset			*/
    hsize_t	cur_size[1];		/*current data space size       */
    hsize_t	max_size[1];		/*maximum data space size	*/

    TESTING("extendible dataspace, exact external size");

    if((dcpl = H5Pcreate (H5P_DATASET_CREATE)) < 0) goto error;
    cur_size[0] = 100;
    max_size[0] = 200;
    if(H5Pset_external(dcpl, "ext1.data", (off_t)0,
	(hsize_t)(max_size[0] * sizeof(int))) < 0) goto error;
    if((space = H5Screate_simple(1, cur_size, max_size)) < 0) goto error;
    if((dset = H5Dcreate2(file, "dset3", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
	goto error;
    if(H5Dclose(dset) < 0) goto error;
    if(H5Sclose(space) < 0) goto error;
    if(H5Pclose(dcpl) < 0) goto error;

    PASSED();
    return 0;

error:
    H5E_BEGIN_TRY {
	H5Dclose(dset);
	H5Pclose(dcpl);
	H5Sclose(space);
    } H5E_END_TRY;
    return 1;
}
示例#12
0
/*-------------------------------------------------------------------------
 * Function:    gent_compound
 *
 * Purpose:     Generate a compound dataset in LOC_ID
 *
 *-------------------------------------------------------------------------
 */
static void gent_compound(hid_t loc_id)
{
    typedef struct s_t
    {
        char str1[20];
        char str2[20];
    } s_t;
    hid_t   sid, did, tid_c, tid_s;
    hsize_t dims[1] = {2};
    s_t     buf[2]  = {{"str1", "str2"}, {"str3", "str4"}};

    /* create dataspace */
    sid = H5Screate_simple(1, dims, NULL);

    /* create a compound type */
    tid_c = H5Tcreate(H5T_COMPOUND, sizeof(s_t));
    tid_s = H5Tcopy(H5T_C_S1);
    H5Tset_size(tid_s, 20);

    H5Tinsert(tid_c, "str1", HOFFSET(s_t,str1), tid_s);
    H5Tinsert(tid_c, "str2", HOFFSET(s_t,str2), tid_s);

    /* create dataset */
    did = H5Dcreate2(loc_id, DATASET_COMPOUND, tid_c, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    /* write */
    H5Dwrite(did, tid_c, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);

    /* close */
    H5Sclose(sid);
    H5Dclose(did);
    H5Tclose(tid_c);
    H5Tclose(tid_s);
}
示例#13
0
asynStatus NDFileHDF5AttributeDataset::createHDF5Dataset()
{
  asynStatus status = asynSuccess;

  cparm_ = H5Pcreate(H5P_DATASET_CREATE);

  H5Pset_fill_value(cparm_, datatype_, ptrFillValue_);

  H5Pset_chunk(cparm_, rank_, chunk_);

  dataspace_ = H5Screate_simple(rank_, dims_, maxdims_);

  // Open the group by its name
  hid_t dsetgroup;

  if (groupName_ != ""){
    dsetgroup = H5Gopen(file_, groupName_.c_str(), H5P_DEFAULT);
  } else {
    dsetgroup = file_;
  }

  // Now create the dataset
  dataset_ = H5Dcreate2(dsetgroup, dsetName_.c_str(),
                        datatype_, dataspace_,
                        H5P_DEFAULT, cparm_, H5P_DEFAULT);

  if (groupName_ != ""){
    H5Gclose(dsetgroup);
  }

  memspace_ = H5Screate_simple(rank_, elementSize_, NULL);

  return status;
}
示例#14
0
/*-------------------------------------------------------------------------
 * Function:    gent_chunked
 *
 * Purpose:     Generate a chunked dataset in LOC_ID
 *
 *-------------------------------------------------------------------------
 */
static void gent_chunked(hid_t loc_id)
{
    hid_t   sid, did, pid;
    hsize_t dims[1] = {6};
    hsize_t chunk_dims[1] = {2};
    int     buf[6]  = {1,2,3,4,5,6};

    /* create dataspace */
    sid = H5Screate_simple(1, dims, NULL);

    /* create property plist */
    pid = H5Pcreate(H5P_DATASET_CREATE);
    H5Pset_chunk(pid, 1, chunk_dims);

    /* create dataset */
    did = H5Dcreate2(loc_id, DATASET_CHUNK, H5T_NATIVE_INT, sid, H5P_DEFAULT, pid, H5P_DEFAULT);

    /* write */
    H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);

    /* close */
    H5Sclose(sid);
    H5Dclose(did);
    H5Pclose(pid);
}
示例#15
0
/*
Write an existing noise correlation object to disk.
If filename is already a file, fail.
*/
void WriteNoiseCorrelations(std::string filename, const NoiseCorrelations& noise)
{
  hid_t fileID = H5Fcreate(filename.c_str(), H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);

  // Write a version number of 1.
  {
    unsigned char version = 1;
    hid_t scalarID = H5Screate(H5S_SCALAR);
    hid_t attID = H5Acreate2(fileID, "version", H5T_STD_U8LE, scalarID, H5P_DEFAULT, H5P_DEFAULT);
    H5Awrite(attID, H5T_NATIVE_UCHAR, reinterpret_cast<void*>(&version));
    H5Aclose(attID);
    H5Sclose(scalarID);
  }

  // Write the channel index as an ordered list of included channels.
  HDF5Helper::WriteMapAsAttribute(noise.GetNoiseBlockIndex().MinorIndex(),
                                  fileID,
                                  "channel_list");

  // Write the actual noise information.
  // We choose to write one dataset per frequency, since those are stored in memory as separate arrays.
  // However, we need to manually pick out only the entries which contain non-redundant information.
  std::vector<double> PackedArray; // Reuse rather than re-allocating each time.
  for(size_t f = 0; f < 1024; f++) {
    const NoiseMatrix& mat = noise.GetMatrixForIndex(f);
    const NoiseCorrelations::NoiseBlockIndexT& NoiseBlockIndex = noise.GetNoiseBlockIndex();
    assert(NoiseBlockIndex.MaxIndex() % 2 == 0 and NoiseBlockIndex.MaxIndex() > 0);

    // Create the name for this dataset.
    std::ostringstream strstream;
    strstream << "/noise_corr_" << std::setfill('0') << std::setw(4) << f;
    std::string dataset_name = strstream.str();

    // Allocate space in the temporary vector.
    hsize_t ExpectedSize = ExpectedPackedSize(f, NoiseBlockIndex.MaxIndex());
    PackedArray.resize(0);
    PackedArray.reserve(NoiseBlockIndex.MaxIndex()*NoiseBlockIndex.MaxIndex()/4);

    // Fill PackedArray. Take into account all appropriate symmetries.
    for(size_t i = 0; i < NoiseBlockIndex.MaxIndex(); i++) {
      for(size_t j = i; j < NoiseBlockIndex.MaxIndex(); j++) {
        if(not IncludeEntryInPackedArray(f, i, j, NoiseBlockIndex)) continue;
        PackedArray.push_back(mat.GetCorrByIndex(i, j));
      }
    }
    assert(PackedArray.size() == ExpectedSize);

    // Write the array to file.
    hid_t vectorID = H5Screate_simple(1, &ExpectedSize, NULL);
    hid_t datasetID = H5Dcreate2(fileID, dataset_name.c_str(), H5T_IEEE_F64LE, vectorID, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    H5Dwrite(datasetID, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
             reinterpret_cast<void*>(&PackedArray[0]));
    H5Dclose(datasetID);
    H5Sclose(vectorID);
  }

  assert(H5Fget_obj_count(fileID, H5F_OBJ_ALL) == 1); // The file should be the only object left.
  H5Fclose(fileID);
}
示例#16
0
 int64_t GWriteHDFFile::WriteBlock(std::string BlockName, int type, void *data, int partlen, uint32_t np_write, uint32_t begin)
 {
           herr_t herr;
           hid_t handle = H5Fopen(filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT);
           hid_t group = H5Gopen2(handle, g_name[type], H5P_DEFAULT);
           if(group < 0)
               return group;
           hsize_t size[2];
           int rank=1;
           //Get type
           char b_type = get_block_type(BlockName);
           hid_t dtype;
           if(b_type == 'f') {
               size[1] = partlen/sizeof(float);
               dtype=H5T_NATIVE_FLOAT;
           }else if (b_type == 'i') {
               size[1] = partlen/sizeof(int64_t);
               //Hopefully this is 64 bits; the HDF5 manual is not clear.
               dtype = H5T_NATIVE_LLONG;
           }
           else{
               return -1000;
           }
           if (size[1] > 1) {
                   rank = 2;
           }
           /* I don't totally understand why the below works (it is not clear to me from the documentation).
            * I gleaned it from a posting to the HDF5 mailing list and a related stack overflow thread here:
            * http://stackoverflow.com/questions/24883461/hdf5-updating-a-cell-in-a-table-of-integers
            * http://lists.hdfgroup.org/pipermail/hdf-forum_lists.hdfgroup.org/2014-July/007966.html
            * The important thing seems to be that we have a dataspace for the whole array and create a hyperslab on that dataspace.
            * Then we need another dataspace with the size of the stuff we want to write.*/
           //Make space in memory for the whole array
           //Create a hyperslab that we will write to
           size[0] = npart[type];
           hid_t full_space_id = H5Screate_simple(rank, size, NULL);
           //If this is the first write, create the dataset
           if (begin==0) {
               H5Dcreate2(group,BlockName.c_str(),dtype, full_space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
           }
           hid_t dset = H5Dopen2(group,BlockName.c_str(),H5P_DEFAULT);
           if (dset < 0)
               return dset;
           size[0] = np_write;
           hid_t space_id = H5Screate_simple(rank, size, NULL);
           hsize_t begins[2]={begin,0};
           //Select the hyperslab of elements we are about to write to
           H5Sselect_hyperslab(full_space_id, H5S_SELECT_SET, begins, NULL, size, NULL);
           /* Write to the dataset */
           herr = H5Dwrite(dset, dtype, space_id, full_space_id, H5P_DEFAULT, data);
           H5Dclose(dset);
           H5Sclose(space_id);
           H5Sclose(full_space_id);
           H5Gclose(group);
           H5Fclose(handle);
           if (herr < 0)
               return herr;
           return np_write;
 }
示例#17
0
文件: file.c 项目: chrismullins/moab
void
mhdf_writeHistory( mhdf_FileHandle file_handle, 
                   const char** strings, 
                   int num_strings,
                   mhdf_Status* status )
{
  FileHandle* file_ptr;
  hid_t data_id, type_id, space_id;
  hsize_t dim = (hsize_t)num_strings;
  herr_t rval;
  API_BEGIN;
  
  file_ptr = (FileHandle*)(file_handle);
  if (!mhdf_check_valid_file( file_ptr, status ))
    return;
    
  type_id = H5Tcopy( H5T_C_S1 );
  if (type_id < 0 || H5Tset_size( type_id, H5T_VARIABLE ) < 0)
  {
    if (type_id >= 0) H5Tclose(type_id);
    mhdf_setFail( status, "Could not create variable length string type." );
    return;
  }
  
  space_id = H5Screate_simple( 1, &dim, NULL );
  if (space_id < 0)
  {
    H5Tclose( type_id );
    mhdf_setFail( status, "H5Screate_simple failed." );
    return;
  }
  
#if defined(H5Dcreate_vers) && H5Dcreate_vers > 1
  data_id = H5Dcreate2( file_ptr->hdf_handle, HISTORY_PATH, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
#else
  data_id = H5Dcreate( file_ptr->hdf_handle, HISTORY_PATH, type_id, space_id, H5P_DEFAULT );
#endif
  H5Sclose( space_id );
  if (data_id < 0)
  {
    H5Tclose( type_id );
    mhdf_setFail( status, "Failed to create \"%s\".", HISTORY_PATH );
    return;
  }
    
  rval = H5Dwrite( data_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, strings );
  H5Dclose( data_id );
  H5Tclose( type_id );
  if (rval < 0)
  {
    H5Gunlink( file_ptr->hdf_handle, HISTORY_PATH );
    mhdf_setFail( status, "Failed to write history data." );
    return;
  }
  
  mhdf_setOkay( status );
  API_END;
}
NDFileHDF5Dataset *createTestDataset(int rank, int *max_dim_size, asynUser *pasynUser, hid_t groupID, const std::string& dsetname)
{
  // Add the test dataset.
  hid_t datasetID = -1;

  hid_t dset_access_plist = H5Pcreate(H5P_DATASET_ACCESS);
  hsize_t nbytes = 1024;
  hsize_t nslots = 50001;
  hid_t datatype = H5T_NATIVE_INT8;
  hsize_t dims[rank];
  for (int i=0; i < rank-2; i++) dims[i] = 1;
  for (int i=rank-2; i < rank; i++) dims[i] = max_dim_size[i];
  hsize_t maxdims[rank];
  for (int i=0; i < rank; i++) maxdims[i] = max_dim_size[i];
  hsize_t chunkdims[rank];
  for (int i=0; i < rank-2; i++) chunkdims[i] = 1;
  for (int i=rank-2; i < rank; i++) chunkdims[i] = max_dim_size[i];
  //hid_t dataspace = H5Screate_simple(rank, dims, maxdims);
  dataspace = H5Screate_simple(rank, dims, maxdims);
  hid_t cparms = H5Pcreate(H5P_DATASET_CREATE);
  H5Pset_chunk(cparms, rank, chunkdims);
  void *ptrFillValue = (void*)calloc(8, sizeof(char));
  *(char *)ptrFillValue = (char)0;
  H5Pset_fill_value(cparms, datatype, ptrFillValue);

  H5Pset_chunk_cache(dset_access_plist, (size_t)nslots, (size_t)nbytes, 1.0);
  datasetID = H5Dcreate2(groupID, dsetname.c_str(), datatype, dataspace, H5P_DEFAULT, cparms, dset_access_plist);

  // Now create a dataset
  NDFileHDF5Dataset *dataset = new NDFileHDF5Dataset(pasynUser, dsetname, datasetID);
  int extraDims = rank-2;
  int extra_dims[extraDims];
  for (int i=0; i < extraDims; i++) extra_dims[i] = max_dim_size[i];
  int user_chunking[extraDims];
  for (int i=0; i < extraDims; i++) user_chunking[i] = 1;

  // Create a test array
  NDArrayInfo_t arrinfo;
  parr = new NDArray();
  parr->dataType = NDInt8;
  parr->ndims = 2;
  parr->pNDArrayPool = NULL;
  parr->getInfo(&arrinfo);
  parr->dataSize = arrinfo.bytesPerElement;
  for (unsigned int i = 0; i < 2; i++){
    unsigned int dim_index = rank-(i+1);
    parr->dataSize *= max_dim_size[dim_index];
    parr->dims[i].size = max_dim_size[dim_index];
  }
  parr->pData = calloc(parr->dataSize, sizeof(char));
  memset(parr->pData, 0, parr->dataSize);
  parr->uniqueId = 0;

  dataset->configureDims(parr, true, extraDims, extra_dims, user_chunking);

  return dataset;
}
示例#19
0
void OHDF5mpipp::registerHDF5DataSet(HDF5DataSet& dataset, char* name)
{
  //hsize_t dimsext[2] = {1,1}; 
  //dataset.memspace = H5Screate_simple (RANK, dimsext, NULL);
  
  int chunk_size = buf_size/dataset.sizeof_entry;
  
  std::cout << "chunk_size=" << chunk_size << std::endl;
  std::cout << "dataset.all_window_size=" << dataset.all_window_size << std::endl;
  
  hsize_t maxdims[2]={H5S_UNLIMITED,1};
  hsize_t dims[2]={dataset.all_window_size, 1};
  hsize_t chunk_dims[2]={5*chunk_size,1};			//numberOfValues is to small
  /* Create the data space with unlimited dimensions. */
  
  dataset.plist_id = H5Pcreate(H5P_DATASET_XFER);
  if (logger_type==nestio::Standard || logger_type==nestio::Buffered)
    H5Pset_dxpl_mpio(dataset.plist_id, H5FD_MPIO_INDEPENDENT);
  else
    H5Pset_dxpl_mpio(dataset.plist_id, H5FD_MPIO_COLLECTIVE);
  
  //hid_t filespace=H5Screate_simple (RANK, dims, maxdims);
  dataset.filespace=H5Screate_simple (RANK, dims, maxdims);
  
  /* Modify dataset creation properties, i.e. enable chunking  */
  
  hid_t prop=H5Pcreate (H5P_DATASET_CREATE);
  status = H5Pset_chunk (prop, RANK, chunk_dims);
  /*
     * Create the compound datatype for the file.  Because the standard
     * types we are using for the file may have different sizes than
     * the corresponding native types, we must manually calculate the
     * offset of each member.
     */

  hid_t filetype = H5Tcreate (H5T_COMPOUND, 3*8+dataset.max_numberOfValues*8);
  status = H5Tinsert (filetype, "id", 0, H5T_STD_I64BE);
  status = H5Tinsert (filetype, "neuron id", 8, H5T_STD_I64BE);
  status = H5Tinsert (filetype, "timestamp", 16, H5T_STD_I64BE);
  for (int i=0; i<dataset.max_numberOfValues; i++) {
    std::stringstream ss;
    ss << "V" << i;
    status = H5Tinsert (filetype, ss.str().c_str(), 24+i*8, H5T_IEEE_F64BE); //third argument: offset
  }

  /* Create a new dataset within the file using chunk 
      creation properties.  */
  
  std::cout << "H5Dcreate2 name=" << name << " max_numberOfValues=" << dataset.max_numberOfValues << std::endl;

  dataset.dset_id=H5Dcreate2 (file, name, filetype, dataset.filespace,
	    H5P_DEFAULT, prop, H5P_DEFAULT);
  
  status = H5Pclose(prop);
  status = H5Tclose(filetype);
  //status = H5Sclose (filespace);
}
示例#20
0
void NSDFWriter::createEventMap()
{
    herr_t status;    
    hid_t eventMapContainer = require_group(filehandle_, MAPEVENTSRC);
    // Open the container for the event maps
    // Create the Datasets themselves (one for each field - each row
    // for one object).
    for (map< string, vector < string > >::iterator ii = classFieldToEventSrc_.begin();
         ii != classFieldToEventSrc_.end();
         ++ii){
        vector < string > pathTokens;
        tokenize(ii->first, "/", pathTokens);
        string className = pathTokens[0];
        string fieldName = pathTokens[1];
        hid_t classGroup = require_group(eventMapContainer, className);
        hid_t strtype = H5Tcopy(H5T_C_S1);
        status = H5Tset_size(strtype, H5T_VARIABLE);
        // create file space
        hid_t ftype = H5Tcreate(H5T_COMPOUND, sizeof(hvl_t) +sizeof(hobj_ref_t));
        status = H5Tinsert(ftype, "source", 0, strtype);
        status = H5Tinsert(ftype, "data", sizeof(hvl_t), H5T_STD_REF_OBJ);
        hsize_t dims[1] = {ii->second.size()};
        hid_t space = H5Screate_simple(1, dims, NULL);
        // The dataset for mapping is named after the field
        hid_t ds = H5Dcreate2(classGroup, fieldName.c_str(), ftype, space,
                              H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        status = H5Sclose(space);
        map_type * buf = (map_type*)calloc(ii->second.size(), sizeof(map_type));
        // Populate the buffer entries with source uid and data
        // reference
        for (unsigned int jj = 0; jj < ii->second.size(); ++jj){
            buf->source = ii->second[jj].c_str();
            char * dsname = (char*)calloc(256, sizeof(char));
            ssize_t size = H5Iget_name(classFieldToEvent_[ii->first][jj], dsname, 255);
            if (size > 255){
                free(dsname);
                dsname = (char*)calloc(size, sizeof(char));
                size = H5Iget_name(classFieldToEvent_[ii->first][jj], dsname, 255);
            }
            status = H5Rcreate(&(buf->data), filehandle_, dsname, H5R_OBJECT, -1);
            free(dsname);
            assert(status >= 0);            
        }
        // create memory space
        hid_t memtype = H5Tcreate(H5T_COMPOUND, sizeof(map_type));
        status = H5Tinsert(memtype, "source",
                           HOFFSET(map_type, source), strtype);
        status = H5Tinsert(memtype, "data",
                           HOFFSET(map_type, data), H5T_STD_REF_OBJ);
        status = H5Dwrite(ds, memtype,  H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
        free(buf);
        status = H5Tclose(strtype);
        status = H5Tclose(ftype);
        status = H5Tclose(memtype);
        status = H5Dclose(ds);
    }
}
示例#21
0
void Foam::h5Write::meshWritePoints()
{   
    Info<< "  meshWritePoints" << endl;
    
    const pointField& points = mesh_.points();
    
    
    // Find out how many points each process has
    List<label> nPoints(Pstream::nProcs());
    nPoints[Pstream::myProcNo()] = points.size();
    Pstream::gatherList(nPoints);
    Pstream::scatterList(nPoints);
    
    
    // Create the different datasets (needs to be done collectively)
    char datasetName[80];
    hsize_t dimsf[2];
    hid_t fileSpace;
    hid_t dsetID;
    hid_t plistID;
    
    forAll(nPoints, proc)
    {
        
        // Create the dataspace for the dataset
        dimsf[0] = nPoints[proc];
        dimsf[1] = 3;
        fileSpace = H5Screate_simple(2, dimsf, NULL);
        
        // Set property to create parent groups as neccesary
        plistID = H5Pcreate(H5P_LINK_CREATE);
        H5Pset_create_intermediate_group(plistID, 1);
        
        // Create the dataset for points
        sprintf
            (
                datasetName,
                "MESH/%s/processor%i/POINTS",
                mesh_.time().timeName().c_str(),
                proc
            );
        
        dsetID = H5Dcreate2
            (
                fileID_,
                datasetName,
                H5T_SCALAR,
                fileSpace,
                plistID,
                H5P_DEFAULT,
                H5P_DEFAULT
            );
        H5Dclose(dsetID);
        H5Pclose(plistID);
        H5Sclose(fileSpace);
    }
/*-------------------------------------------------------------------------
 * Function:	main
 *
 * Purpose:
 *
 * Return:	Success:
 *
 *		Failure:
 *
 * Programmer:	Quincey Koziol
 *              Thursday, November 14, 2002
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
int
main(void)
{
    hid_t	file, space, dset, dcpl;
    hsize_t	dims[SPACE_RANK]={SPACE_DIM1,SPACE_DIM2};
    hsize_t	chunk_dims[SPACE_RANK]={CHUNK_DIM1,CHUNK_DIM2};
    size_t      i,j;            /* Local index variables */

    /* Initialize the data */
    /* (Try for something easily compressible) */
    for(i=0; i<SPACE_DIM1; i++)
        for(j=0; j<SPACE_DIM2; j++)
            data[i][j] = (int)(j % 5);

    /* Create the file */
    file = H5Fcreate(TESTFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    if(file<0)
        printf("file<0!\n");

    /* Create the dataspace */
    space = H5Screate_simple(SPACE_RANK, dims, NULL);
    if(space<0)
        printf("space<0!\n");

    /* Create the dataset creation property list */
    dcpl = H5Pcreate(H5P_DATASET_CREATE);
    if(dcpl<0)
        printf("dcpl<0!\n");

    /* Set up for deflated data */
    if(H5Pset_chunk(dcpl, 2, chunk_dims)<0)
        printf("H5Pset_chunk() failed!\n");
    if(H5Pset_deflate(dcpl, 9)<0)
        printf("H5Pset_deflate() failed!\n");

    /* Create the compressed dataset */
    dset = H5Dcreate2(file, "Dataset1", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
    if(dset<0)
        printf("dset<0!\n");

    /* Write the data to the dataset */
    if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data)<0)
        printf("H5Dwrite() failed!\n");

    /* Close everything */
    if(H5Pclose(dcpl)<0)
        printf("H5Pclose() failed!\n");
    if(H5Dclose(dset)<0)
        printf("H5Dclose() failed!\n");
    if(H5Sclose(space)<0)
        printf("H5Sclose() failed!\n");
    if(H5Fclose(file)<0)
        printf("H5Fclose() failed!\n");

    return 0;
}
示例#23
0
hdf5_dataset::hdf5_dataset
(
    hdf5_file &file,
    std::string const& path,
    hdf5_datatype const& datatype,
    hdf5_dataspace const& dataspace
)
    :
      path_(path)
{
    // Check if name exists in this file.
    htri_t status = H5Lexists(file.get_id(), path.c_str(), H5P_DEFAULT);
    if(status > 0) { // Full path exists.
        // Attempt to open it as a dataset
        set_id(H5Dopen2(file.get_id(), path.c_str(), H5P_DEFAULT));
        if(get_id() < 0) {
            boost::serialization::throw_exception(
                hdf5_archive_exception(
                    hdf5_archive_exception::hdf5_archive_dataset_access_error,
                    path.c_str()
                )
            );
        }
    }
    else if(status == 0){ // Final link in path does not exist.
        // Create the dataset.
        set_id(H5Dcreate2(
                   file.get_id(),
                   path.c_str(),
                   datatype.get_id(),
                   dataspace.get_id(),
                   H5P_DEFAULT,
                   H5P_DEFAULT,
                   H5P_DEFAULT
                )
        );
        if(get_id() < 0) {
            boost::serialization::throw_exception(
                hdf5_archive_exception(
                    hdf5_archive_exception::hdf5_archive_dataset_create_error,
                    path.c_str()
                )
            );
        }
    }
    else { // intermediate link does not exist, or other error
        boost::serialization::throw_exception(
            hdf5_archive_exception(
                hdf5_archive_exception::hdf5_archive_bad_path_error,
                path.c_str()
            )
        );
    }
}
示例#24
0
void HDF5Output::open(const std::string& filename) {
	file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

	sid = H5Tcreate(H5T_COMPOUND, sizeof(OutputRow));
	H5Tinsert(sid, "D", HOFFSET(OutputRow, D), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "z", HOFFSET(OutputRow, z), H5T_NATIVE_DOUBLE);

	H5Tinsert(sid, "SN", HOFFSET(OutputRow, SN), H5T_NATIVE_UINT64);
	H5Tinsert(sid, "ID", HOFFSET(OutputRow, ID), H5T_NATIVE_INT32);
	H5Tinsert(sid, "E", HOFFSET(OutputRow, E), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "X", HOFFSET(OutputRow, X), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Y", HOFFSET(OutputRow, Y), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Z", HOFFSET(OutputRow, Z), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Px", HOFFSET(OutputRow, Px), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Py", HOFFSET(OutputRow, Py), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Pz", HOFFSET(OutputRow, Pz), H5T_NATIVE_DOUBLE);

	H5Tinsert(sid, "SN0", HOFFSET(OutputRow, SN0), H5T_NATIVE_UINT64);
	H5Tinsert(sid, "ID0", HOFFSET(OutputRow, ID0), H5T_NATIVE_INT32);
	H5Tinsert(sid, "E0", HOFFSET(OutputRow, E0), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "X0", HOFFSET(OutputRow, X0), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Y0", HOFFSET(OutputRow, Y0), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Z0", HOFFSET(OutputRow, Z0), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P0x", HOFFSET(OutputRow, P0x), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P0y", HOFFSET(OutputRow, P0y), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P0z", HOFFSET(OutputRow, P0z), H5T_NATIVE_DOUBLE);

	H5Tinsert(sid, "SN1", HOFFSET(OutputRow, SN1), H5T_NATIVE_UINT64);
	H5Tinsert(sid, "ID1", HOFFSET(OutputRow, ID1), H5T_NATIVE_INT32);
	H5Tinsert(sid, "E1", HOFFSET(OutputRow, E1), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "X1", HOFFSET(OutputRow, X1), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Y1", HOFFSET(OutputRow, Y1), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "Z1", HOFFSET(OutputRow, Z1), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P1x", HOFFSET(OutputRow, P1x), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P1y", HOFFSET(OutputRow, P1y), H5T_NATIVE_DOUBLE);
	H5Tinsert(sid, "P1z", HOFFSET(OutputRow, P1z), H5T_NATIVE_DOUBLE);

	// chunked prop
	hid_t plist = H5Pcreate(H5P_DATASET_CREATE);
	H5Pset_layout(plist, H5D_CHUNKED);
	hsize_t chunk_dims[RANK] = {BUFFER_SIZE};
	H5Pset_chunk(plist, RANK, chunk_dims);
	H5Pset_deflate(plist, 5);

	hsize_t dims[RANK] = {0};
	hsize_t max_dims[RANK] = {H5S_UNLIMITED};
	dataspace = H5Screate_simple(RANK, dims, max_dims);

	dset = H5Dcreate2(file, "CRPROPA3", sid, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT);

	H5Pclose(plist);

	buffer.reserve(BUFFER_SIZE);
}
示例#25
0
/*-------------------------------------------------------------------------
 * Function:	create_file_with_bogus_filter
 *
 * Purpose:	Create a dataset with the fletcher filter.
 *	        This function is used to create the test file `test_filters.h5' 
 *              which has a dataset with the "fletcher" I/O filter.  This dataset 
 *              will be used to verify the correct behavior of the library in 
 *              the test "dsets"
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:  Pedro Vicente <*****@*****.**>
 *              Thursday, March 25, 2004
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_filters_endianess(void)
{
#if defined H5_HAVE_FILTER_FLETCHER32
    hid_t     fid = -1;              /* file ID */
    hid_t     dsid = -1;             /* dataset ID */
    hid_t     sid = -1;              /* dataspace ID */
    hid_t     dcpl = -1;             /* dataset creation property list ID */
    hsize_t   dims[1] = {20};        /* dataspace dimensions */
    hsize_t   chunk_dims[1] = {10};  /* chunk dimensions */
    int       buf[20];
    int       rank = 1;
    int       i;

    for(i = 0; i < 20; i++)
        buf[i] = 1;

    /* create a file using default properties */
    if((fid = H5Fcreate(TESTFILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;

    /* create a data space */
    if((sid = H5Screate_simple(rank, dims, NULL)) < 0) goto error;

    /* create dcpl  */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_chunk(dcpl, rank, chunk_dims) < 0) goto error;

    if(H5Pset_fletcher32(dcpl) < 0) goto error;

    /* create a dataset */
    if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;

    if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;

    /* close */
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Dclose(dsid) < 0) goto error;
    if(H5Sclose(sid) < 0) goto error;
    if(H5Fclose(fid) < 0) goto error;

#endif /* H5_HAVE_FILTER_FLETCHER32 */
    return 0;

#if defined H5_HAVE_FILTER_FLETCHER32
error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dsid);
        H5Sclose(sid);
        H5Fclose(fid);
    } H5E_END_TRY;
    return -1;
#endif /* H5_HAVE_FILTER_FLETCHER32 */
} /* end test_filters_endianess() */
/*-------------------------------------------------------------------------
 * Function:	create_file_with_bogus_filter
 *
 * Purpose:	Create a file and a dataset with a bogus filter enabled
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:	Raymond Lu
 *              2 June 2011
 *
 *-------------------------------------------------------------------------
 */
static herr_t
create_file_with_bogus_filter(void)
{
    hid_t     fid = -1;              /* file ID */
    hid_t     dsid = -1;             /* dataset ID */
    hid_t     sid = -1;              /* dataspace ID */
    hid_t     dcpl = -1;             /* dataset creation property list ID */
    hsize_t   dims[1] = {20};        /* dataspace dimensions */
    hsize_t   chunk_dims[1] = {10};  /* chunk dimensions */
    int       buf[20];
    int       rank = 1;
    int       i;

    for(i = 0; i < 20; i++)
        buf[i] = 1;

    /* create a file using default properties */
    if((fid = H5Fcreate(TESTFILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;

    /* create a data space */
    if((sid = H5Screate_simple(rank, dims, NULL)) < 0) goto error;

    /* create dcpl  */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;

    /* create chunking */ 
    if(H5Pset_chunk(dcpl, rank, chunk_dims) < 0) goto error;

    /* register bogus filter */
    if(H5Zregister (H5Z_BOGUS) < 0) goto error;
    if(H5Pset_filter(dcpl, H5Z_FILTER_BOGUS, 0, (size_t)0, NULL) < 0) goto error;

    /* create a dataset */
    if((dsid = H5Dcreate2(fid, DSETNAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;

    if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;

    /* close */
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Dclose(dsid) < 0) goto error;
    if(H5Sclose(sid) < 0) goto error;
    if(H5Fclose(fid) < 0) goto error;

    return 0;

error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dsid);
        H5Sclose(sid);
        H5Fclose(fid);
    } H5E_END_TRY;
    return -1;
}
示例#27
0
/*-------------------------------------------------------------------------
 * Function:    test_filter_write_failure
 *
 * Purpose:     Tests the library's behavior when a mandate filter returns 
 *              failure.  There're only 5 chunks with each of them having
 *              2 integers.  The filter will fail in the last chunk.  The 
 *              dataset should release all resources even though the last 
 *              chunk can't be flushed to file.  The file should close
 *              successfully.
 *
 * Return:  
 *              Success:         0
 *              Failure:         -1
 *
 * Programmer:  Raymond Lu
 *              25 August 2010
 *
 * Modifications:
 *              Raymond Lu
 *              5 Oct 2010
 *              Test when the chunk cache is enable and disabled to make 
 *              sure the library behaves properly.
 *-------------------------------------------------------------------------
 */
static herr_t
test_filter_write(char *file_name, hid_t my_fapl, hbool_t cache_enabled)
{
    hid_t        file = -1;
    hid_t        dataset=-1;                /* dataset ID */
    hid_t        sid=-1;                   /* dataspace ID */
    hid_t        dcpl=-1;                  /* dataset creation property list ID */
    hsize_t      dims[1]={DIM};           /* dataspace dimension - 10*/
    hsize_t      chunk_dims[1]={FILTER_CHUNK_DIM}; /* chunk dimension - 2*/
    int          points[DIM];          /* Data */
    herr_t       ret;                   /* generic return value */
    int          i;

    if(cache_enabled) {
        TESTING("data writing when a mandatory filter fails and chunk cache is enabled");
    } else {
        TESTING("data writing when a mandatory filter fails and chunk cache is disabled");
    }

    /* Create file */
    if((file = H5Fcreate(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) TEST_ERROR

    /* create the data space */
    if((sid = H5Screate_simple(1, dims, NULL)) < 0) TEST_ERROR

    /* Create dcpl and register the filter */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR

    if(H5Pset_chunk(dcpl, 1, chunk_dims) < 0) TEST_ERROR

    if(H5Zregister (H5Z_FAIL_TEST) < 0) TEST_ERROR

    /* Check that the filter was registered */
    if(TRUE != H5Zfilter_avail(H5Z_FILTER_FAIL_TEST)) FAIL_STACK_ERROR

    /* Enable the filter as mandatory */
    if(H5Pset_filter(dcpl, H5Z_FILTER_FAIL_TEST, 0, (size_t)0, NULL) < 0)
        TEST_ERROR

    /* create a dataset */
    if((dataset = H5Dcreate2(file, DSET_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR 

    /* Initialize the write buffer */
    for(i = 0; i < DIM; i++)
        points[i] = i;

    /* Write data.  If the chunk cache is enabled, H5Dwrite should succeed.  If it is
     * diabled, H5Dwrite should fail. */
    if(cache_enabled) {
        if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, points) < 0) 
            TEST_ERROR
    } else {
示例#28
0
/*
 * test_vl_string
 * Tests variable-length string datatype with UTF-8 strings.
 */
void test_vl_string(hid_t fid, const char *string)
{
  hid_t type_id, space_id, dset_id;
  hsize_t dims = 1;
  hsize_t size;  /* Number of bytes used */
  char *read_buf[1];
  herr_t ret;

  /* Create dataspace for datasets */
  space_id = H5Screate_simple(RANK, &dims, NULL);
  CHECK(space_id, FAIL, "H5Screate_simple");

  /* Create a datatype to refer to */
  type_id = H5Tcopy(H5T_C_S1);
  CHECK(type_id, FAIL, "H5Tcopy");
  ret = H5Tset_size(type_id, H5T_VARIABLE);
  CHECK(ret, FAIL, "H5Tset_size");

  /* Create a dataset */
  dset_id = H5Dcreate2(fid, VL_DSET1_NAME, type_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  CHECK(dset_id, FAIL, "H5Dcreate2");

  /* Write dataset to disk */
  ret = H5Dwrite(dset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &string);
  CHECK(ret, FAIL, "H5Dwrite");

  /* Make certain the correct amount of memory will be used */
  ret = H5Dvlen_get_buf_size(dset_id, type_id, space_id, &size);
  CHECK(ret, FAIL, "H5Dvlen_get_buf_size");
  VERIFY(size, (hsize_t)HDstrlen(string) + 1, "H5Dvlen_get_buf_size");

  /* Read dataset from disk */
  ret = H5Dread(dset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
  CHECK(ret, FAIL, "H5Dread");

  /* Compare data read in */
  VERIFY(HDstrcmp(string, read_buf[0]), 0, "strcmp");

  /* Reclaim the read VL data */
  ret = H5Dvlen_reclaim(type_id, space_id, H5P_DEFAULT, read_buf);
  CHECK(ret, FAIL, "H5Dvlen_reclaim");

  /* Close all */
  ret = H5Dclose(dset_id);
  CHECK(ret, FAIL, "H5Dclose");
  ret = H5Tclose(type_id);
  CHECK(ret, FAIL, "H5Tclose");
  ret = H5Sclose(space_id);
  CHECK(ret, FAIL, "H5Sclose");
}
示例#29
0
文件: tvlstr.c 项目: ElaraFX/hdf5
/* Helper routine for test_vl_rewrite() */
static void write_scalar_dset(hid_t file, hid_t type, hid_t space, char *name, char *data)
{
      hid_t dset;
      herr_t ret;

      dset = H5Dcreate2(file, name, type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
      CHECK(dset, FAIL, "H5Dcreate2");

      ret = H5Dwrite(dset, type, space, space, H5P_DEFAULT, &data);
      CHECK(ret, FAIL, "H5Dwrite");

      ret = H5Dclose(dset);
      CHECK(ret, FAIL, "H5Dclose");
}
示例#30
0
/*
 * test_fl_string
 * Tests that UTF-8 can be used for fixed-length string data.
 * Writes the string to a dataset and reads it back again.
 */
void test_fl_string(hid_t fid, const char *string)
{
  hid_t dtype_id, space_id, dset_id;
  hsize_t dims = 1;
  char read_buf[MAX_STRING_LENGTH];
  H5T_cset_t cset;
  herr_t ret;

  /* Create the datatype, ensure that the character set behaves
   * correctly (it should default to ASCII and can be set to UTF8)
   */
  dtype_id = H5Tcopy(H5T_C_S1);
  CHECK(dtype_id, FAIL, "H5Tcopy");
  ret = H5Tset_size(dtype_id, (size_t)MAX_STRING_LENGTH);
  CHECK(ret, FAIL, "H5Tset_size");
  cset = H5Tget_cset(dtype_id);
  VERIFY(cset, H5T_CSET_ASCII, "H5Tget_cset");
  ret = H5Tset_cset(dtype_id, H5T_CSET_UTF8);
  CHECK(ret, FAIL, "H5Tset_cset");
  cset = H5Tget_cset(dtype_id);
  VERIFY(cset, H5T_CSET_UTF8, "H5Tget_cset");

  /* Create dataspace for a dataset */
  space_id = H5Screate_simple(RANK, &dims, NULL);
  CHECK(space_id, FAIL, "H5Screate_simple");

  /* Create a dataset */
  dset_id = H5Dcreate2(fid, DSET1_NAME, dtype_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  CHECK(dset_id, FAIL, "H5Dcreate2");

  /* Write UTF-8 string to dataset */
  ret = H5Dwrite(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, string);
  CHECK(ret, FAIL, "H5Dwrite");

  /* Read string back and make sure it is unchanged */
  ret = H5Dread(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_buf);
  CHECK(ret, FAIL, "H5Dread");

  VERIFY(HDstrcmp(string, read_buf), 0, "strcmp");

  /* Close all */
  ret = H5Dclose(dset_id);
  CHECK(ret, FAIL, "H5Dclose");

  ret = H5Tclose(dtype_id);
  CHECK(ret, FAIL, "H5Tclose");
  ret = H5Sclose(space_id);
  CHECK(ret, FAIL, "H5Sclose");
}