コード例 #1
0
ファイル: overhead.c プロジェクト: MichaelToal/hdf5
/*-------------------------------------------------------------------------
 * Function:  usage
 *
 * Purpose:  Prints a usage message and exits.
 *
 * Return:  never returns
 *
 * Programmer:  Robb Matzke
 *              Wednesday, September 30, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static void
usage(const char *prog)
{
    fprintf(stderr, "usage: %s [STYLE|cache] [LEFT [MIDDLE [RIGHT]]]\n",
      prog);
    fprintf(stderr, "\
    STYLE is the order that the dataset is filled and should be one of:\n\
        forward   --  Fill the dataset from lowest address to highest\n\
                      address. This style tests the right split ratio.\n\
        reverse   --  Fill the dataset from highest address to lowest\n\
                      address.  This is the reverse order of `forward' and\n\
                      tests the left split ratio.\n\
        inward    --  Fill beginning at both the lowest and highest\n\
                      addresses and work in toward the center of the\n\
                      dataset.  This tests the middle split ratio.\n\
        outward   --  Start at the center of the dataset and work outward\n\
                      toward the lowest and highest addresses.  This tests\n\
                      both left and right split ratios.\n\
        random    --  Write the chunks of the dataset in random order.  This\n\
                      tests all split ratios.\n\
    If no fill style is specified then all fill styles are tried and a\n\
    single value is printed for each one.\n\
\n\
    If the word `cache' is used instead of a fill style then the raw data\n\
    cache is enabled.  It is not possible to enable the raw data cache when\n\
    a specific fill style is used because H5Fflush() is called after each\n\
    chunk is written in order to calculate overhead during the test.  If\n\
    the cache is enabled then chunks are written to disk in different orders\n\
    than the actual H5Dwrite() calls in the test due to collisions and the\n\
    resulting B-tree will be split differently.\n\
\n\
    LEFT, MIDDLE, and RIGHT are the ratios to use for splitting and should\n\
    be values between zero and one, inclusive.\n");
    exit(1);
}
コード例 #2
0
ファイル: flush1.c プロジェクト: MattNapsAlot/rHDF5
/*-------------------------------------------------------------------------
 * Function:	main
 *
 * Purpose:	Part 1 of a two-part H5Fflush() test.
 *
 * Return:	Success:	0
 *
 *		Failure:	1
 *
 * Programmer:	Robb Matzke
 *              Friday, October 23, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
int
main(void)
{
    hid_t	fapl, file, dcpl, space, dset, groups, grp;
    hsize_t	ds_size[2] = {100, 100};
    hsize_t	ch_size[2] = {5, 5};
    size_t	i, j;
    char	name[1024];

    h5_reset();
    fapl = h5_fileaccess();

    TESTING("H5Fflush (part1)");

    /* Create the file */
    h5_fixname(FILENAME[0], fapl, name, sizeof name);
    if ((file=H5Fcreate(name, H5F_ACC_TRUNC, H5P_DEFAULT, fapl))<0) goto error;

    /* Create a chunked dataset */
    if ((dcpl=H5Pcreate(H5P_DATASET_CREATE))<0) goto error;
    if (H5Pset_chunk(dcpl, 2, ch_size)<0) goto error;
    if ((space=H5Screate_simple(2, ds_size, NULL))<0) goto error;
    if ((dset=H5Dcreate(file, "dset", H5T_NATIVE_FLOAT, space, H5P_DEFAULT))<0)
	goto error;

    /* Write some data */
    for (i=0; i<ds_size[0]; i++) {
	/*
	 * The extra cast in the following statement is a bug workaround
	 * for the Win32 version 5.0 compiler.
	 * 1998-11-06 ptl
	 */
	for (j=0; j<ds_size[1]; j++) {
	    the_data[i][j] = (double)(hssize_t)i/(hssize_t)(j+1);
	}
    }
    if (H5Dwrite(dset, H5T_NATIVE_DOUBLE, space, space, H5P_DEFAULT,
		 the_data)<0) goto error;

    /* Create some groups */
    if ((groups=H5Gcreate(file, "some_groups", 0))<0) goto error;
    for (i=0; i<100; i++) {
	sprintf(name, "grp%02u", (unsigned)i);
	if ((grp=H5Gcreate(groups, name, 0))<0) goto error;
	if (H5Gclose(grp)<0) goto error;
    }

    /* Flush and exit without closing the library */
    if (H5Fflush(file, H5F_SCOPE_GLOBAL)<0) goto error;
    PASSED();
    fflush(stdout);
    fflush(stderr);
    HD_exit(0);
    return 0;

 error:
    HD_exit(1);
    return 1;
}
コード例 #3
0
void EstimatorManager::collectBlockAverages(int num_threads)
{
  if(Options[COLLECT])
  {
    //copy cached data to RemoteData[0]
    int n1=AverageCache.size();
    int n2=n1+AverageCache.size();
    int n3=n2+PropertyCache.size();
    {
      BufferType::iterator cur(RemoteData[0]->begin());
      std::copy(AverageCache.begin(),AverageCache.end(),cur);
      std::copy(SquaredAverageCache.begin(),SquaredAverageCache.end(),cur+n1);
      std::copy(PropertyCache.begin(),PropertyCache.end(),cur+n2);
    }
#if defined(QMC_ASYNC_COLLECT)
    if(Options[MANAGE])
    {
      //wait all the message but we can choose to wait one-by-one with a timer
      wait_all(myRequest.size(),&myRequest[0]);
      for(int is=1; is<myComm->size(); is++)
        accumulate_elements(RemoteData[is]->begin(),RemoteData[is]->end(), RemoteData[0]->begin());
    }
    else //not a master, pack and send the data
      myRequest[0]=myComm->isend(0,myComm->rank(),*RemoteData[0]);
#else
    myComm->reduce(*RemoteData[0]);
#endif
    if(Options[MANAGE])
    {
      BufferType::iterator cur(RemoteData[0]->begin());
      std::copy(cur,cur+n1, AverageCache.begin());
      std::copy(cur+n1,cur+n2, SquaredAverageCache.begin());
      std::copy(cur+n2,cur+n3, PropertyCache.begin());
      RealType nth=1.0/static_cast<RealType>(myComm->size());
      AverageCache *= nth;
      SquaredAverageCache *= nth;
      //do not weight weightInd
      for(int i=1; i<PropertyCache.size(); i++)
        PropertyCache[i] *= nth;
    }
  }
  //add the block average to summarize
  energyAccumulator(AverageCache[0]);
  varAccumulator(SquaredAverageCache[0]-AverageCache[0]*AverageCache[0]);
  if(Archive)
  {
    *Archive << setw(10) << RecordCount;
    int maxobjs=std::min(BlockAverages.size(),max4ascii);
    for(int j=0; j<maxobjs; j++)
      *Archive << setw(FieldWidth) << AverageCache[j];
    for(int j=0; j<PropertyCache.size(); j++)
      *Archive << setw(FieldWidth) << PropertyCache[j];
    *Archive << endl;
    for(int o=0; o<h5desc.size(); ++o)
      h5desc[o]->write(AverageCache.data(),SquaredAverageCache.data());
    H5Fflush(h_file,H5F_SCOPE_LOCAL);
  }
  RecordCount++;
}
コード例 #4
0
ファイル: H5Object.cpp プロジェクト: VisBlank/hdf5
//--------------------------------------------------------------------------
// Function:	H5Object::flush
///\brief	Flushes all buffers associated with a file to disk.
///\param	scope - IN: Specifies the scope of the flushing action,
///		which can be either of these values:
///		\li \c H5F_SCOPE_GLOBAL - Flushes the entire virtual file
///		\li \c H5F_SCOPE_LOCAL - Flushes only the specified file
///\exception	H5::AttributeIException
///\par Description
///		This object is used to identify the file to be flushed.
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void H5Object::flush(H5F_scope_t scope) const
{
   herr_t ret_value = H5Fflush(getId(), scope);
   if( ret_value < 0 )
   {
      throw FileIException(inMemFunc("flush"), "H5Fflush failed");
   }
}
コード例 #5
0
ファイル: H5F.c プロジェクト: patperry/rhdf5
/* herr_t H5Fflush(hid_t object_id, H5F_scope_t scope ) */
SEXP _H5Fflush(SEXP _object_id, SEXP _scope ) {
  hid_t object_id =  INTEGER(_object_id)[0];
  H5F_scope_t scope = INTEGER(_scope)[0];

  herr_t herr = H5Fflush(object_id, scope );
  SEXP Rval = ScalarInteger(herr);
  return(Rval);
}
コード例 #6
0
ファイル: H5File.cpp プロジェクト: ScilabOrg/scilab
void H5File::flush(const bool local) const
{
    herr_t err = H5Fflush(file, local ? H5F_SCOPE_LOCAL : H5F_SCOPE_GLOBAL);
    if (err < 0)
    {
        throw H5Exception(__LINE__, __FILE__, _("Error in flushing the file."));
    }
}
コード例 #7
0
ファイル: rb_h5f.c プロジェクト: himotoyoshi/carray-hdf5
static VALUE
rb_H5Fflush (VALUE mod, VALUE v_obj_id, VALUE v_scope)
{
  herr_t status;

  status = H5Fflush(NUM2INT(v_obj_id), NUM2INT(v_scope));

  if ( status < 0 )
    rb_hdf5_raise("can't flush object");
  
  return INT2NUM(status);
}
コード例 #8
0
ファイル: minc_compat.c プロジェクト: BIC-MNI/minc
MNCAPI int
MI2sync(int fd)
{
    if (MI2_ISH5OBJ(fd)) {
        /* Commit the (entire) file to disk. */
        if (H5Fflush(fd, H5F_SCOPE_GLOBAL) < 0) {
            return (MI_ERROR);
        }
        else {
            return (MI_NOERROR);
        }
    }
    else {
        return (ncsync(fd));
    }
}
コード例 #9
0
ファイル: HDF5DataWriter.cpp プロジェクト: hrani/moose-core
void HDF5DataWriter::flush()
{
    if (filehandle_ < 0){
        cerr << "HDF5DataWriter::flush() - "
                "Filehandle invalid. Cannot write data." << endl;
        return;
    }

    for (unsigned int ii = 0; ii < datasets_.size(); ++ii){
        herr_t status = appendToDataset(datasets_[ii], data_[ii]);
        data_[ii].clear();
        if (status < 0){
            cerr << "Warning: appending data for object " << src_[ii]
                 << " returned status " << status << endl;
        }
    }
    HDF5WriterBase::flush();
    H5Fflush(filehandle_, H5F_SCOPE_LOCAL);
}
コード例 #10
0
ファイル: HDF5DataWriter.cpp プロジェクト: csiki/MOOSE
void HDF5DataWriter::flush()
{
    if (filehandle_ < 0){
        cerr << "HDF5DataWriter::flush() - Filehandle invalid. Cannot write data." << endl;
        return;
    }
    for (map < string, vector < double > >::iterator ii = datamap_.begin(); ii != datamap_.end(); ++ ii){
        hid_t dataset = nodemap_[ii->first];
        if (dataset < 0){
            dataset = get_dataset(ii->first);
            nodemap_[ii->first] = dataset;
        }
        herr_t status = appendToDataset(dataset, ii->second);
        if (status < 0){
            cerr << "Warning: appending data for object " << ii->first << " returned status " << status << endl;                
        }
        ii->second.clear();        
    }
    H5Fflush(filehandle_, H5F_SCOPE_LOCAL);
}
コード例 #11
0
ファイル: H5Ff.c プロジェクト: MattNapsAlot/rHDF5
/*----------------------------------------------------------------------------
 * Name:        h5fflush_c
 * Purpose:     Call H5Fflush to flush the object
 * Inputs:      object_id - identifier of either a file, a dataset,
 *                          a group, an attribute or a named data type
 *              scope - integer to specify the flushing action, either
 *                      H5F_SCOPE_GLOBAL or H5F_SCOPE_LOCAL
 * Returns:     0 on success, -1 on failure
 * Programmer:  Xiangyang Su
 *              Friday, November 5, 1999
 * Modifications:
 *---------------------------------------------------------------------------*/
int_f
nh5fflush_c (hid_t_f *object_id, int_f *scope)
{
     int ret_value = -1;
     hid_t c_file_id;
     H5F_scope_t  c_scope;
     htri_t status;
     c_scope = (H5F_scope_t)*scope;

     /*
      * Call H5Fflush function.
      */

     c_file_id = *object_id;

     status = H5Fflush(c_file_id, c_scope);

     if (status >= 0)  ret_value = 0;

     return ret_value;
}
コード例 #12
0
ファイル: H5File.cpp プロジェクト: ScilabOrg/scilab
H5File::~H5File()
{
    H5Fflush(file, H5F_SCOPE_GLOBAL);
    cleanup();
    if (file >= 0)
    {

#if defined(__HDF5OBJECTS_DEBUG__)

        std::cout << "File " << filename << " is closing." << std::endl
                  << "Open groups: " << H5Fget_obj_count(file, H5F_OBJ_GROUP) << std::endl
                  << "Open datasets: " << H5Fget_obj_count(file, H5F_OBJ_DATASET) << std::endl
                  << "Open datatypes: " << H5Fget_obj_count(file, H5F_OBJ_DATATYPE) << std::endl
                  << "Open attributes: " << H5Fget_obj_count(file, H5F_OBJ_ATTR) << std::endl
                  << "Open all (except the file itself): " << H5Fget_obj_count(file, H5F_OBJ_ALL) - 1 << std::endl;

#endif

        H5Fclose(file);
        H5garbage_collect();
    }
}
コード例 #13
0
 void CompositeEstimatorSet::recordBlock()
 {
   for(int i=0; i< Estimators.size(); i++) Estimators[i]->recordBlock();
   H5Fflush(GroupID,H5F_SCOPE_LOCAL);
 }
コード例 #14
0
ファイル: bigarray.hpp プロジェクト: elen4/GURLS
void BigArray<T>::flush()
{
    herr_t status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);
    CHECK_HDF5_ERR(status, "Error flushing data to file")
}
コード例 #15
0
ファイル: flushrefresh.c プロジェクト: FilipeMaia/hdf5
/*-------------------------------------------------------------------------
 * Function:    test_refresh
 *
 * Purpose:     This function tests refresh (evict/reload) of individual 
 *              objects' metadata from the metadata cache.
 *
 * Return:      0 on Success, 1 on Failure
 *
 * Programmer:  Mike McGreevy
 *              August 17, 2010
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
herr_t test_refresh(void) 
{
    /**************************************************************************
     *
     * Test Description:
     *
     * This test will build an HDF5 file with several objects in a varying 
     * hierarchical layout. It will then flush the entire file to disk. Then,
     * an attribute will be added to each object in the file.
     * 
     * One by one, this process will flush each object to disk, individually.
     * It will also be coordinating with another process, which will open
     * the object before it is flushed by this process, and then refresh the
     * object after it's been flushed, comparing the before and after object
     * information to ensure that they are as expected. (i.e., most notably,
     * that an attribute has been added, and is only visible after a 
     * successful call to a H5*refresh function).
     * 
     * As with the flush case, the implemention is a bit tricky as it's 
     * dealing with signals going back and forth between the two processes
     * to ensure the timing is correct, but basically, an example: 
     *
     * Step 1. Dataset is created.
     * Step 2. Dataset is flushed.
     * Step 3. Attribute on Dataset is created.
     * Step 4. Another process opens the dataset and verifies that it does
     *         not see an attribute (as the attribute hasn't been flushed yet).
     * Step 5. This process flushes the dataset again (with Attribute attached).
     * Step 6. The other process calls H5Drefresh, which should evict/reload
     *         the object's metadata, and thus pick up the attribute that's
     *         attached to it. Most other before/after object information is 
     *         compared for sanity as well.
     * Step 7. Rinse and Repeat for each object in the file.
     *
     **************************************************************************/

    /**************************************************************************
      * Generated Test File will look like this:
      * 
      * GROUP "/"
      *   DATASET "Dataset1"
      *   GROUP "Group1" {
      *     DATASET "Dataset2"
      *     GROUP "Group2" {
      *       DATATYPE "CommittedDatatype3"
      *     }
      *   }
      *   GROUP "Group3" {
      *     DATASET "Dataset3"
      *     DATATYPE "CommittedDatatype2"
      *   }
      *   DATATYPE "CommittedDatatype1"
     **************************************************************************/

    /* Variables */
    hid_t aid,fid,sid,tid1,did,dcpl,fapl = 0;
    hid_t gid,gid2,gid3,tid2,tid3,did2,did3,status = 0;
    hsize_t dims[2] = {50,50};
    hsize_t cdims[2] = {1,1};
    int fillval = 2;

    /* Testing Message */
    HDfprintf(stdout, "Testing individual object refresh behavior:\n");

    /* Cleanup any old error or signal files */
    CLEANUP_FILES;

    /* ================ */
    /* CREATE TEST FILE */
    /* ================ */

    /* Create File */
    if ((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR;
    if (H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) TEST_ERROR;
    if ((fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC|H5F_ACC_SWMR_WRITE, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;

    /* Create data space and types */
    if ((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
    if ( H5Pset_chunk(dcpl, 2, cdims) < 0 ) TEST_ERROR;
    if ( H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fillval) < 0 ) TEST_ERROR;
    if ((sid = H5Screate_simple(2, dims, dims)) < 0) TEST_ERROR;
    if ((tid1 = H5Tcopy(H5T_NATIVE_INT)) < 0) TEST_ERROR;
    if ((tid2 = H5Tcopy(H5T_NATIVE_CHAR)) < 0) TEST_ERROR;
    if ((tid3 = H5Tcopy(H5T_NATIVE_LONG)) < 0) TEST_ERROR;

    /* Create Group1 */
    if ((gid = H5Gcreate2(fid, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create Group2 */
    if ((gid2 = H5Gcreate2(gid, "Group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create Group3 */
    if ((gid3 = H5Gcreate2(fid, "Group3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create Dataset1 */
    if ((did = H5Dcreate2(fid, "Dataset1", tid1, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create Dataset2 */
    if ((did2 = H5Dcreate2(gid, "Dataset2", tid3, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create Dataset3 */
    if ((did3 = H5Dcreate2(gid3, "Dataset3", tid2, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create CommittedDatatype1 */
    if ((status = H5Tcommit2(fid, "CommittedDatatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create CommittedDatatype2 */
    if ((status = H5Tcommit2(gid2, "CommittedDatatype2", tid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Create CommittedDatatype3 */
    if ((status = H5Tcommit2(gid3, "CommittedDatatype3", tid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;

    /* Flush File to Disk */
    if (H5Fflush(fid, H5F_SCOPE_GLOBAL) < 0) TEST_ERROR;

    /* Create an attribute on each object. These will not immediately hit disk, 
        and thus be unavailable to another process until this process flushes
        the object and the other process refreshes from disk. */
    if ((aid = H5Acreate2(did, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(did2, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(did3, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(gid, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(gid2, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(gid3, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(tid1, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(tid2, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;
    if ((aid = H5Acreate2(tid3, "Attribute", tid1, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
    if (H5Aclose(aid) < 0) TEST_ERROR;

    /* ================ */
    /* Refresh Datasets */
    /* ================ */

    TESTING("to ensure that H5Drefresh correctly refreshes single datasets");

    /* Verify First Dataset can be refreshed with H5Drefresh */
    if (start_refresh_verification_process(D1) != 0) TEST_ERROR;
    if (H5Oflush(did) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    /* Verify Second Dataset can be refreshed with H5Drefresh */
    if (start_refresh_verification_process(D2) != 0) TEST_ERROR;
    if (H5Oflush(did2) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    PASSED();

    /* ============== */
    /* Refresh Groups */
    /* ============== */

    TESTING("to ensure that H5Grefresh correctly refreshes single groups");

    /* Verify First Group can be refreshed with H5Grefresh */
    if (start_refresh_verification_process(G1) != 0) TEST_ERROR;
    if (H5Oflush(gid) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    /* Verify Second Group can be refreshed with H5Grefresh */
    if (start_refresh_verification_process(G2) != 0) TEST_ERROR;
    if (H5Oflush(gid2) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    PASSED();

    /* ================= */
    /* Refresh Datatypes */
    /* ================= */

    TESTING("to ensure that H5Trefresh correctly refreshes single datatypes");

    /* Verify First Committed Datatype can be refreshed with H5Trefresh */
    if (start_refresh_verification_process(T1) != 0) TEST_ERROR;
    if (H5Oflush(tid1) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    /* Verify Second Committed Datatype can be refreshed with H5Trefresh */
    if (start_refresh_verification_process(T2) != 0) TEST_ERROR;
    if (H5Oflush(tid2) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    PASSED();

    /* =============== */
    /* Refresh Objects */
    /* =============== */

    TESTING("to ensure that H5Orefresh correctly refreshes single objects");

    /* Verify Third Dataset can be refreshed with H5Orefresh */
    if (start_refresh_verification_process(D3) != 0) TEST_ERROR;
    if (H5Oflush(did3) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    /* Verify Third Group can be refreshed with H5Orefresh */
    if (start_refresh_verification_process(G3) != 0) TEST_ERROR;
    if (H5Oflush(gid3) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    /* Verify Third Committed Datatype can be refreshed with H5Orefresh */
    if (start_refresh_verification_process(T3) != 0) TEST_ERROR;
    if (H5Oflush(tid3) < 0) TEST_ERROR;
    if (end_refresh_verification_process() != 0) TEST_ERROR;

    PASSED();

    /* ================== */
    /* Cleanup and Return */  
    /* ================== */

    /* Close Stuff */
    if (H5Pclose(fapl) < 0) TEST_ERROR;
    if (H5Tclose(tid1) < 0) TEST_ERROR;
    if (H5Tclose(tid2) < 0) TEST_ERROR;
    if (H5Tclose(tid3) < 0) TEST_ERROR;
    if (H5Dclose(did) < 0) TEST_ERROR;
    if (H5Dclose(did2) < 0) TEST_ERROR;
    if (H5Dclose(did3) < 0) TEST_ERROR;
    if (H5Gclose(gid) < 0) TEST_ERROR;
    if (H5Gclose(gid2) < 0) TEST_ERROR;
    if (H5Gclose(gid3) < 0) TEST_ERROR;
    if (H5Sclose(sid) < 0) TEST_ERROR;
    if (H5Fclose(fid) < 0) TEST_ERROR;

    /* Delete Test File */
    HDremove(FILENAME);

    if (end_verification() < 0) TEST_ERROR;

    return SUCCEED;

error:
    /* Return */
    return FAIL;

} /* test_refresh() */
コード例 #16
0
ファイル: writeindata_p.c プロジェクト: kouui/rh
/* ------- begin --------------------------   init_hdf5_indata.c  --- */
void init_hdf5_indata_new(void)
/* Creates the netCDF file for the input data */
{
  const char routineName[] = "init_hdf5_indata_new";
  int     i, PRD_angle_dep;
  double  *eweight, *eabund, *x, *y;
  /* This value is harcoded for efficiency.
     Maximum number of iterations ever needed */
  int     NMaxIter = 1500;
  hid_t   plist, ncid, file_dspace, ncid_input, ncid_atmos, ncid_mpi;
  hsize_t dims[4];
  bool_t   XRD;
  char    startJ[MAX_LINE_SIZE], StokesMode[MAX_LINE_SIZE], angleSet[MAX_LINE_SIZE];

  /* Create the file  */
  if (( plist = H5Pcreate(H5P_FILE_ACCESS )) < 0) HERR(routineName);
  if (( H5Pset_fapl_mpio(plist, mpi.comm, mpi.info) ) < 0) HERR(routineName);
  if (( ncid = H5Fcreate(INPUTDATA_FILE, H5F_ACC_TRUNC, H5P_DEFAULT,
                         plist) ) < 0) HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);

  /* Create groups */
  if (( ncid_input = H5Gcreate(ncid, "/input", H5P_DEFAULT, H5P_DEFAULT,
                               H5P_DEFAULT) ) < 0) HERR(routineName);
  if (( ncid_atmos = H5Gcreate(ncid, "/atmos", H5P_DEFAULT, H5P_DEFAULT,
                               H5P_DEFAULT) ) < 0) HERR(routineName);
  if (( ncid_mpi = H5Gcreate(ncid, "/mpi", H5P_DEFAULT, H5P_DEFAULT,
                               H5P_DEFAULT) ) < 0) HERR(routineName);

  /* --- Definitions for the root group --- */
  /* dimensions as attributes */
  if (( H5LTset_attribute_int(ncid, "/", "nx", &mpi.nx, 1) ) < 0)
      HERR(routineName);
  if (( H5LTset_attribute_int(ncid, "/", "ny", &mpi.ny, 1) ) < 0)
      HERR(routineName);
  if (( H5LTset_attribute_int(ncid, "/", "nz", (int *) &infile.nz, 1 )) < 0)
      HERR(routineName);
  /* attributes */
  if (( H5LTset_attribute_string(ncid, "/", "atmosID", atmos.ID)) < 0)
    HERR(routineName);
  if (( H5LTset_attribute_string(ncid, "/", "rev_id", mpi.rev_id) ) < 0)
    HERR(routineName);

  /* --- Definitions for the INPUT group --- */
  /* attributes */
  if ( atmos.NPRDactive > 0)
    PRD_angle_dep = input.PRD_angle_dep;
  else
    PRD_angle_dep=0;

  XRD = (input.XRD  &&  atmos.NPRDactive > 0);

  if (( H5LTset_attribute_uchar(ncid_input, ".", "Magneto_optical",
          (unsigned char *) &input.magneto_optical, 1)) < 0) HERR(routineName);
  if (( H5LTset_attribute_uchar(ncid_input, ".", "PRD_angle_dep",
          (unsigned char *) &PRD_angle_dep, 1)) < 0) HERR(routineName);
  if (( H5LTset_attribute_uchar(ncid_input, ".", "XRD",
          (unsigned char *) &XRD, 1)) < 0) HERR(routineName);
  if (( H5LTset_attribute_uchar(ncid_input, ".", "Background_polarization",
          (unsigned char *) &input.backgr_pol, 1)) < 0) HERR(routineName);

  switch (input.startJ) {
  case UNKNOWN:
    strcpy(startJ, "Unknown");
    break;
  case LTE_POPULATIONS:
    strcpy(startJ, "LTE_POPULATIONS");
    break;
  case ZERO_RADIATION:
    strcpy(startJ, "ZERO_RADIATION");
    break;
  case OLD_POPULATIONS:
    strcpy(startJ, "OLD_POPULATIONS");
    break;
  case ESCAPE_PROBABILITY:
    strcpy(startJ, "ESCAPE_PROBABILITY");
    break;
  case NEW_J:
    strcpy(startJ, "NEW_J");
    break;
  case OLD_J:
    strcpy(startJ, "OLD_J");
    break;
  }
  if (( H5LTset_attribute_string(ncid_input, ".", "Start_J", startJ)) < 0)
    HERR(routineName);

  switch (input.StokesMode) {
  case NO_STOKES:
    strcpy(StokesMode, "NO_STOKES");
    break;
  case FIELD_FREE:
    strcpy(StokesMode, "FIELD_FREE");
    break;
  case POLARIZATION_FREE:
    strcpy(StokesMode, "POLARIZATION_FREE");
    break;
  case FULL_STOKES:
    strcpy(StokesMode, "FULL_STOKES");
    break;
  }
  if (( H5LTset_attribute_string(ncid_input, ".", "Stokes_mode",
                                 StokesMode) ) < 0) HERR(routineName);

  switch (atmos.angleSet.set) {
  case SET_VERTICAL:
    strcpy(angleSet, "SET_VERTICAL");
    break;
  case SET_GL:
    strcpy(angleSet, "SET_GL");
    break;
  case SET_A2:
    strcpy(angleSet, "SET_A2");
    break;
  case SET_A4:
    strcpy(angleSet, "SET_A4");
    break;
  case SET_A6:
    strcpy(angleSet, "SET_A6");
    break;
  case SET_A8:
    strcpy(angleSet, "SET_A8");
    break;
  case SET_B4:
    strcpy(angleSet, "SET_B4");
    break;
  case SET_B6:
    strcpy(angleSet, "SET_B6");
    break;
  case SET_B8:
    strcpy(angleSet, "SET_B8");
    break;
  case NO_SET:
    strcpy(angleSet, "NO_SET");
    break;
  }
  if (( H5LTset_attribute_string(ncid_input, ".", "Angle_set",
                                 angleSet) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_input, ".", "Atmos_file",
                                 input.atmos_input) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_input, ".", "Abundances_file",
                                 input.abund_input) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_input, ".", "Kurucz_PF_data",
                                 input.pfData) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_double(ncid_input, ".", "Iteration_limit",
                                 &input.iterLimit, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_double(ncid_input, ".", "PRD_Iteration_limit",
                              &input.PRDiterLimit, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "N_max_iter",
                              &input.NmaxIter, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "Ng_delay",
                              &input.Ngdelay, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "Ng_order",
                              &input.Ngorder, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "Ng_period",
                              &input.Ngperiod, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "PRD_N_max_iter",
                              &input.PRD_NmaxIter, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "PRD_Ng_delay",
                              &input.PRD_Ngdelay, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "PRD_Ng_order",
                              &input.PRD_Ngorder, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_input, ".", "PRD_Ng_period",
                              &input.PRD_Ngperiod, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_double(ncid_input, ".", "Metallicity",
                               &input.metallicity, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_double(ncid_input, ".", "Lambda_reference",
                                &atmos.lambda_ref, 1) ) < 0) HERR(routineName);

  /* --- Definitions for the ATMOS group --- */
  /* dimensions */
  if (( H5LTset_attribute_int(ncid_atmos, ".", "nhydr",
                              &atmos.H->Nlevel, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_atmos, ".", "nelements",
                              &atmos.Nelem, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_atmos, ".", "nrays",
                              &geometry.Nrays, 1) ) < 0) HERR(routineName);


  /* variables*/
  dims[0] = mpi.nx;
  dims[1] = mpi.ny;
  dims[2] = infile.nz;
  if (( file_dspace = H5Screate_simple(3, dims, NULL) ) < 0) HERR(routineName);
  if (( plist = H5Pcreate(H5P_DATASET_CREATE) ) < 0) HERR(routineName);
  if (( H5Pset_fill_value(plist, H5T_NATIVE_FLOAT, &FILLVALUE) ) < 0)
    HERR(routineName);
  if (( H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY) ) < 0) HERR(routineName);
  if (( H5Pset_fill_time(plist, H5D_FILL_TIME_ALLOC) ) < 0) HERR(routineName);

  if (( io.in_atmos_T = H5Dcreate(ncid_atmos, "temperature", H5T_NATIVE_FLOAT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_atmos_vz = H5Dcreate(ncid_atmos, "velocity_z", H5T_NATIVE_FLOAT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_atmos_z = H5Dcreate(ncid_atmos, "height", H5T_NATIVE_FLOAT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);
  if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);
  /* --- Write some data that does not depend on xi, yi, ATMOS group --- */
  /* arrays of number of elements */
  eweight = (double *) malloc(atmos.Nelem * sizeof(double));
  eabund = (double *) malloc(atmos.Nelem * sizeof(double));
  for (i=0; i < atmos.Nelem; i++) {
    eweight[i] = atmos.elements[i].weight;
    eabund[i] = atmos.elements[i].abund;
  }
  dims[0] = atmos.Nelem;
  if (( H5LTmake_dataset(ncid_atmos, "element_weight", 1, dims,
                H5T_NATIVE_DOUBLE, eweight) ) < 0) HERR(routineName);
  if (( H5LTmake_dataset(ncid_atmos, "element_abundance", 1, dims,
                H5T_NATIVE_DOUBLE, eabund) ) < 0) HERR(routineName);
  /* Not writing element_id for now
  dims[1] = strlen;
  if (( H5LTmake_dataset(ncid_atmos, "element_id", 2, dims,
                H5T_C_S1, eID) ) < 0) HERR(routineName);
  */
  free(eweight);
  free(eabund);

  dims[0] = geometry.Nrays;
  if (( H5LTmake_dataset(ncid_atmos, "muz", 1, dims,
              H5T_NATIVE_DOUBLE, geometry.muz) ) < 0) HERR(routineName);
  if (( H5LTmake_dataset(ncid_atmos, "wmu", 1, dims,
              H5T_NATIVE_DOUBLE, geometry.wmu) ) < 0) HERR(routineName);

  x = (double *) malloc(mpi.nx * sizeof(double));
  y = (double *) malloc(mpi.ny * sizeof(double));
  for (i=0; i < mpi.nx; i++) x[i] = infile.x[mpi.xnum[i]];
  for (i=0; i < mpi.ny; i++) y[i] = infile.y[mpi.ynum[i]];
  dims[0] = mpi.nx;
  if (( H5LTmake_dataset(ncid_atmos, "x", 1, dims,
                         H5T_NATIVE_DOUBLE, x) ) < 0) HERR(routineName);
  dims[0] = mpi.ny;
  if (( H5LTmake_dataset(ncid_atmos, "y", 1, dims,
                         H5T_NATIVE_DOUBLE, y) ) < 0) HERR(routineName);
  free(x);
  free(y);

  /* attributes */
  if (( H5LTset_attribute_uchar(ncid_atmos, ".", "moving",
                   (unsigned char *) &atmos.moving, 1)) < 0) HERR(routineName);
  if (( H5LTset_attribute_uchar(ncid_atmos, ".", "stokes",
                   (unsigned char *) &atmos.Stokes, 1)) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos, "temperature", "units",
                                 "K") ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos,  "velocity_z", "units",
                                 "m s^-1") ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos,  "height", "units",
                                 "m") ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos,  "element_weight", "units",
                                 "atomic_mass_units") ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos,  "x", "units",
                                 "m") ) < 0) HERR(routineName);
  if (( H5LTset_attribute_string(ncid_atmos,  "y", "units",
                                 "m") ) < 0) HERR(routineName);

  /* --- Definitions for the MPI group --- */
  /* dimensions */
  if (( H5LTset_attribute_int(ncid_mpi, ".", "nprocesses",
                              &mpi.size, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "niterations",
                              &NMaxIter, 1) ) < 0) HERR(routineName);
  /* variables*/
  dims[0] = mpi.nx;
  if (( H5LTmake_dataset(ncid_mpi, XNUM_NAME, 1, dims,
                H5T_NATIVE_INT, mpi.xnum) ) < 0) HERR(routineName);
  dims[0] = mpi.ny;
  if (( H5LTmake_dataset(ncid_mpi, YNUM_NAME, 1, dims,
                H5T_NATIVE_INT, mpi.ynum) ) < 0) HERR(routineName);
  dims[0] = mpi.nx;
  dims[1] = mpi.ny;
  if (( file_dspace = H5Screate_simple(2, dims, NULL) ) < 0) HERR(routineName);
  if (( plist = H5Pcreate(H5P_DATASET_CREATE) ) < 0) HERR(routineName);
  if (( H5Pset_fill_value(plist, H5T_NATIVE_FLOAT, &FILLVALUE) ) < 0)
    HERR(routineName);
  if (( H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY) ) < 0) HERR(routineName);
  if (( H5Pset_fill_time(plist, H5D_FILL_TIME_ALLOC) ) < 0) HERR(routineName);
  if (( io.in_mpi_tm = H5Dcreate(ncid_mpi, TASK_MAP, H5T_NATIVE_LONG,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_mpi_tn = H5Dcreate(ncid_mpi, TASK_NUMBER, H5T_NATIVE_LONG,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_mpi_it = H5Dcreate(ncid_mpi, ITER_NAME, H5T_NATIVE_LONG,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_mpi_conv = H5Dcreate(ncid_mpi, CONV_NAME, H5T_NATIVE_LONG,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_mpi_dm = H5Dcreate(ncid_mpi, DM_NAME, H5T_NATIVE_FLOAT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( io.in_mpi_zc = H5Dcreate(ncid_mpi, ZC_NAME, H5T_NATIVE_INT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);
  if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);

  dims[0] = mpi.nx;
  dims[1] = mpi.ny;
  dims[2] = NMaxIter;
  if (( file_dspace = H5Screate_simple(3, dims, NULL) ) < 0) HERR(routineName);
  if (( plist = H5Pcreate(H5P_DATASET_CREATE) ) < 0) HERR(routineName);
  if (( H5Pset_fill_value(plist, H5T_NATIVE_FLOAT, &FILLVALUE) ) < 0)
    HERR(routineName);
  if (( H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY) ) < 0) HERR(routineName);
  if (( H5Pset_fill_time(plist, H5D_FILL_TIME_ALLOC) ) < 0) HERR(routineName);
  if (( io.in_mpi_dmh = H5Dcreate(ncid_mpi, DMH_NAME, H5T_NATIVE_FLOAT,
         file_dspace, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0) HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);
  if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);

  /* attributes */
  if (( H5LTset_attribute_int(ncid_mpi, ".", "x_start",
                              &input.p15d_x0, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "x_end",
                              &input.p15d_x1, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "x_step",
                              &input.p15d_xst, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "y_start",
                              &input.p15d_y0, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "y_end",
                              &input.p15d_y1, 1) ) < 0) HERR(routineName);
  if (( H5LTset_attribute_int(ncid_mpi, ".", "y_step",
                              &input.p15d_yst, 1) ) < 0) HERR(routineName);

  /* Tiago: most of the arrays involving Ntasks or rank as index are not
            currently being written. They should eventually be migrated into
            arrays of [ix, iy] and be written for each task. This is to
            avoid causing problems with pool mode, where these quantities are
            not known from the start.
  */

  /* Flush ensures file is created in case of crash */
  if (( H5Fflush(ncid, H5F_SCOPE_LOCAL) ) < 0) HERR(routineName);
  /* --- Copy stuff to the IO data struct --- */
  io.in_ncid       = ncid;
  io.in_input_ncid = ncid_input;
  io.in_atmos_ncid = ncid_atmos;
  io.in_mpi_ncid   = ncid_mpi;
  return;
}
コード例 #17
0
ファイル: h5mdfile_tcl.cpp プロジェクト: Clemson-MSE/espresso
int H5mdfile::H5_Fflush(int argc, char **argv, Tcl_Interp *interp)
{
    H5Fflush(file_id, H5F_SCOPE_GLOBAL);
    return TCL_OK;
}
コード例 #18
0
ファイル: material.cpp プロジェクト: crbates/pyne
void pyne::Material::write_hdf5(std::string filename, std::string datapath, 
                                std::string nucpath, float row, int chunksize) {
  int row_num = (int) row;

  // Turn off annoying HDF5 errors
  H5Eset_auto2(H5E_DEFAULT, NULL, NULL);

  //Set file access properties so it closes cleanly
  hid_t fapl;
  fapl = H5Pcreate(H5P_FILE_ACCESS);
  H5Pset_fclose_degree(fapl,H5F_CLOSE_STRONG);
  // Create new/open datafile.
  hid_t db;
  if (pyne::file_exists(filename)) {
    bool ish5 = H5Fis_hdf5(filename.c_str());
    if (!ish5)
      throw h5wrap::FileNotHDF5(filename);
    db = H5Fopen(filename.c_str(), H5F_ACC_RDWR, fapl);
  }
  else
    db = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, fapl);

  //
  // Read in nuclist if available, write it out if not
  //
  bool nucpath_exists = h5wrap::path_exists(db, nucpath);
  std::vector<int> nuclides;
  int nuc_size;
  hsize_t nuc_dims[1];

  if (nucpath_exists) {
    nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5T_NATIVE_INT);
    nuc_size = nuclides.size();
    nuc_dims[0] = nuc_size;
  } else {
    nuclides = std::vector<int>();
    for (pyne::comp_iter i = comp.begin(); i != comp.end(); i++)
      nuclides.push_back(i->first);
    nuc_size = nuclides.size();

    // Create the data if it doesn't exist
    int nuc_data [nuc_size];
    for (int n = 0; n != nuc_size; n++)
      nuc_data[n] = nuclides[n];
    nuc_dims[0] = nuc_size;
    hid_t nuc_space = H5Screate_simple(1, nuc_dims, NULL);
    hid_t nuc_set = H5Dcreate2(db, nucpath.c_str(), H5T_NATIVE_INT, nuc_space, 
                               H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    H5Dwrite(nuc_set, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, nuc_data);
    H5Fflush(db, H5F_SCOPE_GLOBAL);
  };


  //
  // Write out the data itself to the file
  //
  hid_t data_set, data_space, data_hyperslab;
  int data_rank = 1;
  hsize_t data_dims[1] = {1};
  hsize_t data_max_dims[1] = {H5S_UNLIMITED};
  hsize_t data_offset[1] = {0};

  size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*nuc_size;
  hid_t desc = H5Tcreate(H5T_COMPOUND, material_struct_size);
  hid_t comp_values_array_type = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, nuc_dims);

  // make the data table type
  H5Tinsert(desc, "mass", HOFFSET(pyne::material_struct, mass), H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "density", HOFFSET(pyne::material_struct, density), 
            H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "atoms_per_molecule", HOFFSET(pyne::material_struct, atoms_per_mol), 
            H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "comp", HOFFSET(pyne::material_struct, comp), 
            comp_values_array_type);

  material_struct * mat_data  = new material_struct[material_struct_size];
  (*mat_data).mass = mass;
  (*mat_data).density = density;
  (*mat_data).atoms_per_mol = atoms_per_molecule;
  for (int n = 0; n != nuc_size; n++) {
    if (0 < comp.count(nuclides[n]))
      (*mat_data).comp[n] = comp[nuclides[n]];
    else
      (*mat_data).comp[n] = 0.0;
  };

  // get / make the data set
  bool datapath_exists = h5wrap::path_exists(db, datapath);
  if (datapath_exists) {
    data_set = H5Dopen2(db, datapath.c_str(), H5P_DEFAULT);
    data_space = H5Dget_space(data_set);
    data_rank = H5Sget_simple_extent_dims(data_space, data_dims, data_max_dims);

    // Determine the row size.
    if (std::signbit(row))
      row_num = data_dims[0] + row;  // careful, row is negative

    if (data_dims[0] <= row_num) {
      // row == -0, extend to data set so that we can append, or
      // row_num is larger than current dimension, resize to accomodate.
      data_dims[0] = row_num + 1;
      H5Dset_extent(data_set, data_dims);
    }

    data_offset[0] = row_num;
  } else {
    // Get full space
    data_space = H5Screate_simple(1, data_dims, data_max_dims);

    // Make data set properties to enable chunking
    hid_t data_set_params = H5Pcreate(H5P_DATASET_CREATE);
    hsize_t chunk_dims[1] ={chunksize}; 
    H5Pset_chunk(data_set_params, 1, chunk_dims);
    H5Pset_deflate(data_set_params, 1);

    material_struct * data_fill_value  = new material_struct[material_struct_size];
    (*data_fill_value).mass = -1.0;
    (*data_fill_value).density= -1.0;
    (*data_fill_value).atoms_per_mol = -1.0;
    for (int n = 0; n != nuc_size; n++)
      (*data_fill_value).comp[n] = 0.0;
    H5Pset_fill_value(data_set_params, desc, &data_fill_value);

    // Create the data set
    data_set = H5Dcreate2(db, datapath.c_str(), desc, data_space, H5P_DEFAULT, 
                            data_set_params, H5P_DEFAULT);
    H5Dset_extent(data_set, data_dims);

    // Add attribute pointing to nuc path
    hid_t nuc_attr_type = H5Tcopy(H5T_C_S1);
    H5Tset_size(nuc_attr_type, nucpath.length());
    hid_t nuc_attr_space = H5Screate(H5S_SCALAR);
    hid_t nuc_attr = H5Acreate2(data_set, "nucpath", nuc_attr_type, nuc_attr_space, 
                                H5P_DEFAULT, H5P_DEFAULT);
    H5Awrite(nuc_attr, nuc_attr_type, nucpath.c_str());
    H5Fflush(db, H5F_SCOPE_GLOBAL);

    // Remember to de-allocate
    delete[] data_fill_value;
  };

  // Get the data hyperslab
  data_hyperslab = H5Dget_space(data_set);
  hsize_t data_count[1] = {1};
  H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);

  // Get a memory space for writing
  hid_t mem_space = H5Screate_simple(1, data_count, data_max_dims);

  // Write the row...
  H5Dwrite(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data);

  // Close out the Dataset
  H5Fflush(db, H5F_SCOPE_GLOBAL);
  H5Dclose(data_set);
  H5Sclose(data_space);
  H5Tclose(desc);

  //
  // Write out the metadata to the file
  //
  std::string attrpath = datapath + "_metadata";
  hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace;
  int attrrank; 

  attrtype = H5Tvlen_create(H5T_NATIVE_CHAR);

  // get / make the data set
  bool attrpath_exists = h5wrap::path_exists(db, attrpath);
  if (attrpath_exists) {
    metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT);
    metadatapace = H5Dget_space(metadataet);
    attrrank = H5Sget_simple_extent_dims(metadatapace, data_dims, data_max_dims);

    if (data_dims[0] <= row_num) {
      // row == -0, extend to data set so that we can append, or
      // row_num is larger than current dimension, resize to accomodate.
      data_dims[0] = row_num + 1;
      H5Dset_extent(metadataet, data_dims);
    }

    data_offset[0] = row_num;
  } else {
    hid_t metadataetparams;
    hsize_t attrchunkdims [1];

    // Make data set properties to enable chunking
    metadataetparams = H5Pcreate(H5P_DATASET_CREATE);
    attrchunkdims[0] = chunksize; 
    H5Pset_chunk(metadataetparams, 1, attrchunkdims);
    H5Pset_deflate(metadataetparams, 1);

    hvl_t attrfillvalue [1];
    attrfillvalue[0].len = 3;
    attrfillvalue[0].p = (char *) "{}\n";
    H5Pset_fill_value(metadataetparams, attrtype, &attrfillvalue);

    // make dataset
    metadatapace = H5Screate_simple(1, data_dims, data_max_dims);
    metadataet = H5Dcreate2(db, attrpath.c_str(), attrtype, metadatapace, 
                         H5P_DEFAULT, metadataetparams, H5P_DEFAULT);
    H5Dset_extent(metadataet, data_dims);
  };

  // set the attr string
  hvl_t attrdata [1];
  Json::FastWriter writer;
  std::string metadatatr = writer.write(metadata);
  attrdata[0].p = (char *) metadatatr.c_str();
  attrdata[0].len = metadatatr.length();

  // write the attr
  metadatalab = H5Dget_space(metadataet);
  H5Sselect_hyperslab(metadatalab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);
  attrmemspace = H5Screate_simple(1, data_count, data_max_dims);
  H5Dwrite(metadataet, attrtype, attrmemspace, metadatalab, H5P_DEFAULT, attrdata);

  // close attr data objects
  H5Fflush(db, H5F_SCOPE_GLOBAL);
  H5Dclose(metadataet);
  H5Sclose(metadatapace);
  H5Tclose(attrtype);

  // Close out the HDF5 file
  H5Fclose(db);
  // Remember the milk!  
  // ...by which I mean to deallocate
  delete[] mat_data;
};
コード例 #19
0
ファイル: test_dset_opt.c プロジェクト: CommonLibrary/hdf5
/*-------------------------------------------------------------------------
 * Function:	test_data_conv
 *
 * Purpose:	Test data conversion
 *
 * Return:	Success:	0
 *
 *		Failure:	1
 *
 * Programmer:  Raymond Lu	
 *              30 November 2012
 *
 *-------------------------------------------------------------------------
 */
static int
test_data_conv(hid_t file)
{
    typedef struct {
	int a, b, c[4], d, e;
    } src_type_t;
    typedef struct {
	int a,    c[4],    e;
    } dst_type_t;

    hid_t       dataspace = -1, dataset = -1;
    hid_t       mem_space = -1;
    hid_t       cparms = -1, dxpl = -1;
    hsize_t     dims[2]  = {NX, NY};        
    hsize_t     maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
    hsize_t     chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
    herr_t      status;
    int         i, j, n;
    const hsize_t	four = 4;
    hid_t	st=-1, dt=-1;
    hid_t       array_dt;

    unsigned    filter_mask = 0;
    src_type_t  direct_buf[CHUNK_NX][CHUNK_NY];
    dst_type_t  check_chunk[CHUNK_NX][CHUNK_NY];
    hsize_t     offset[2] = {0, 0};
    size_t      buf_size = CHUNK_NX*CHUNK_NY*sizeof(src_type_t);

    hsize_t start[2];  /* Start of hyperslab */
    hsize_t stride[2]; /* Stride of hyperslab */
    hsize_t count[2];  /* Block count */
    hsize_t block[2];  /* Block sizes */

    TESTING("data conversion for H5DOwrite_chunk");

    /*
     * Create the data space with unlimited dimensions.
     */
    if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
        goto error;

    if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0)
        goto error;

    /*
     * Modify dataset creation properties, i.e. enable chunking
     */
    if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        goto error;

    if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)
        goto error;

    /* Build hdf5 datatypes */
    array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four);
    if((st = H5Tcreate(H5T_COMPOUND, sizeof(src_type_t))) < 0 ||
            H5Tinsert(st, "a", HOFFSET(src_type_t, a), H5T_NATIVE_INT) < 0 ||
            H5Tinsert(st, "b", HOFFSET(src_type_t, b), H5T_NATIVE_INT) < 0 ||
            H5Tinsert(st, "c", HOFFSET(src_type_t, c), array_dt) < 0 ||
            H5Tinsert(st, "d", HOFFSET(src_type_t, d), H5T_NATIVE_INT) < 0 ||
            H5Tinsert(st, "e", HOFFSET(src_type_t, e), H5T_NATIVE_INT) < 0)
        goto error;

    if(H5Tclose(array_dt) < 0)
        goto error;

    array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four);
    if((dt = H5Tcreate(H5T_COMPOUND, sizeof(dst_type_t))) < 0 ||
            H5Tinsert(dt, "a", HOFFSET(dst_type_t, a), H5T_NATIVE_INT) < 0 ||
            H5Tinsert(dt, "c", HOFFSET(dst_type_t, c), array_dt) < 0 ||
            H5Tinsert(dt, "e", HOFFSET(dst_type_t, e), H5T_NATIVE_INT) < 0)
        goto error;

    if(H5Tclose(array_dt) < 0)
        goto error;

    /*
     * Create a new dataset within the file using cparms
     * creation properties.
     */
    if((dataset = H5Dcreate2(file, DATASETNAME4, st, dataspace, H5P_DEFAULT,
			cparms, H5P_DEFAULT)) < 0)
        goto error;

    if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
        goto error;

    /* Initialize data for one chunk */
    for(i = n = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
            (direct_buf[i][j]).a    = i*j+0;
            (direct_buf[i][j]).b    = i*j+1;
            (direct_buf[i][j]).c[0] = i*j+2;
            (direct_buf[i][j]).c[1] = i*j+3;
            (direct_buf[i][j]).c[2] = i*j+4;
            (direct_buf[i][j]).c[3] = i*j+5;
            (direct_buf[i][j]).d    = i*j+6;
            (direct_buf[i][j]).e    = i*j+7;
        }
    }

    /* write the chunk data to dataset, using the direct writing function. 
     * There should be no data conversion involved. */
    offset[0] = CHUNK_NX;
    offset[1] = CHUNK_NY;

    if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0)
        goto error;

    if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) 
        goto error;

    if(H5Dclose(dataset) < 0)
        goto error;

    if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0)
        goto error;

    /*
     * Select hyperslab for the chunk just written in the file
     */
    start[0]  = CHUNK_NX; start[1]  = CHUNK_NY;
    stride[0] = 1; stride[1] = 1;
    count[0]  = 1; count[1]  = 1;
    block[0]  = CHUNK_NX; block[1]  = CHUNK_NY;
    if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0)
        goto error;

    /* Read the chunk back. Data should be converted */
    if((status = H5Dread(dataset, dt, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
        goto error;

    /* Check that the values read are the same as the values written */
    for(i = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
	    if ((direct_buf[i][j]).a    != (check_chunk[i][j]).a    ||
	        (direct_buf[i][j]).c[0] != (check_chunk[i][j]).c[0] ||
	        (direct_buf[i][j]).c[1] != (check_chunk[i][j]).c[1] ||
	        (direct_buf[i][j]).c[2] != (check_chunk[i][j]).c[2] ||
	        (direct_buf[i][j]).c[3] != (check_chunk[i][j]).c[3] ||
	        (direct_buf[i][j]).e    != (check_chunk[i][j]).e) {
                printf("    1. Read different values than written.");
                printf("    At index %d,%d\n", i, j);
	        printf("    src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n",
                   (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1], 
                   (direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e);
	        printf("    dst={a=%d, c=[%d,%d,%d,%d], e=%d\n",
                   (check_chunk[i][j]).a, (check_chunk[i][j]).c[0], (check_chunk[i][j]).c[1], (check_chunk[i][j]).c[2], 
                   (check_chunk[i][j]).c[3], (check_chunk[i][j]).e);

	    goto error;
            }
        }
    }

    /*
     * Close/release resources.
     */
    H5Dclose(dataset);
    H5Sclose(mem_space);
    H5Sclose(dataspace);
    H5Pclose(cparms);
    H5Pclose(dxpl);
    H5Tclose(st);
    H5Tclose(dt);


    PASSED();
    return 0;

error:
    H5E_BEGIN_TRY {
        H5Dclose(dataset);
        H5Sclose(mem_space);
        H5Sclose(dataspace);
        H5Pclose(cparms);
        H5Pclose(dxpl);
        H5Tclose(st);
        H5Tclose(dt);
    } H5E_END_TRY;

    return 1;
}
コード例 #20
0
ファイル: general.c プロジェクト: 00liujj/petsc
PetscErrorCode ISView_General_HDF5(IS is, PetscViewer viewer)
{
  hid_t           filespace;  /* file dataspace identifier */
  hid_t           chunkspace; /* chunk dataset property identifier */
  hid_t           plist_id;   /* property list identifier */
  hid_t           dset_id;    /* dataset identifier */
  hid_t           memspace;   /* memory dataspace identifier */
  hid_t           inttype;    /* int type (H5T_NATIVE_INT or H5T_NATIVE_LLONG) */
  hid_t           file_id, group;
  herr_t          status;
  hsize_t         dim, maxDims[3], dims[3], chunkDims[3], count[3],offset[3];
  PetscInt        bs, N, n, timestep, low;
  const PetscInt *ind;
  const char     *isname;
  PetscErrorCode  ierr;

  PetscFunctionBegin;
  ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
  ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr);
  ierr = PetscViewerHDF5GetTimestep(viewer, &timestep);CHKERRQ(ierr);

  /* Create the dataspace for the dataset.
   *
   * dims - holds the current dimensions of the dataset
   *
   * maxDims - holds the maximum dimensions of the dataset (unlimited
   * for the number of time steps with the current dimensions for the
   * other dimensions; so only additional time steps can be added).
   *
   * chunkDims - holds the size of a single time step (required to
   * permit extending dataset).
   */
  dim = 0;
  if (timestep >= 0) {
    dims[dim]      = timestep+1;
    maxDims[dim]   = H5S_UNLIMITED;
    chunkDims[dim] = 1;
    ++dim;
  }
  ierr = ISGetSize(is, &N);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is, &n);CHKERRQ(ierr);
  ierr = PetscHDF5IntCast(N/bs,dims + dim);CHKERRQ(ierr);

  maxDims[dim]   = dims[dim];
  chunkDims[dim] = dims[dim];
  ++dim;
  if (bs >= 1) {
    dims[dim]      = bs;
    maxDims[dim]   = dims[dim];
    chunkDims[dim] = dims[dim];
    ++dim;
  }
  filespace = H5Screate_simple(dim, dims, maxDims);
  if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");

#if defined(PETSC_USE_64BIT_INDICES)
  inttype = H5T_NATIVE_LLONG;
#else
  inttype = H5T_NATIVE_INT;
#endif

  /* Create the dataset with default properties and close filespace */
  ierr = PetscObjectGetName((PetscObject) is, &isname);CHKERRQ(ierr);
  if (!H5Lexists(group, isname, H5P_DEFAULT)) {
    /* Create chunk */
    chunkspace = H5Pcreate(H5P_DATASET_CREATE);
    if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Pcreate()");
    status = H5Pset_chunk(chunkspace, dim, chunkDims);CHKERRQ(status);

#if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
    dset_id = H5Dcreate2(group, isname, inttype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
#else
    dset_id = H5Dcreate(group, isname, inttype, filespace, H5P_DEFAULT);
#endif
    if (dset_id == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Dcreate2()");
    status = H5Pclose(chunkspace);CHKERRQ(status);
  } else {
    dset_id = H5Dopen2(group, isname, H5P_DEFAULT);
    status  = H5Dset_extent(dset_id, dims);CHKERRQ(status);
  }
  status = H5Sclose(filespace);CHKERRQ(status);

  /* Each process defines a dataset and writes it to the hyperslab in the file */
  dim = 0;
  if (timestep >= 0) {
    count[dim] = 1;
    ++dim;
  }
  ierr = PetscHDF5IntCast(n/bs,count + dim);CHKERRQ(ierr);
  ++dim;
  if (bs >= 1) {
    count[dim] = bs;
    ++dim;
  }
  if (n > 0) {
    memspace = H5Screate_simple(dim, count, NULL);
    if (memspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate_simple()");
  } else {
    /* Can't create dataspace with zero for any dimension, so create null dataspace. */
    memspace = H5Screate(H5S_NULL);
    if (memspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate()");
  }

  /* Select hyperslab in the file */
  ierr = PetscLayoutGetRange(is->map, &low, NULL);CHKERRQ(ierr);
  dim  = 0;
  if (timestep >= 0) {
    offset[dim] = timestep;
    ++dim;
  }
  ierr = PetscHDF5IntCast(low/bs,offset + dim);CHKERRQ(ierr);
  ++dim;
  if (bs >= 1) {
    offset[dim] = 0;
    ++dim;
  }
  if (n > 0) {
    filespace = H5Dget_space(dset_id);
    if (filespace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Dget_space()");
    status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
  } else {
    /* Create null filespace to match null memspace. */
    filespace = H5Screate(H5S_NULL);
    if (filespace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate(H5S_NULL)");
  }

  /* Create property list for collective dataset write */
  plist_id = H5Pcreate(H5P_DATASET_XFER);
  if (plist_id == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Pcreate()");
#if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
  status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
#endif
  /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

  ierr   = ISGetIndices(is, &ind);CHKERRQ(ierr);
  status = H5Dwrite(dset_id, inttype, memspace, filespace, plist_id, ind);CHKERRQ(status);
  status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
  ierr   = ISGetIndices(is, &ind);CHKERRQ(ierr);

  /* Close/release resources */
  if (group != file_id) {status = H5Gclose(group);CHKERRQ(status);}
  status = H5Pclose(plist_id);CHKERRQ(status);
  status = H5Sclose(filespace);CHKERRQ(status);
  status = H5Sclose(memspace);CHKERRQ(status);
  status = H5Dclose(dset_id);CHKERRQ(status);
  ierr = PetscInfo1(is, "Wrote IS object with name %s\n", isname);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
コード例 #21
0
int H5mdfile::H5_Fflush(int argc, char **argv, Tcl_Interp *interp)
{
	H5Fflush(dataset_id, H5F_SCOPE_LOCAL);
	return TCL_OK;
}
コード例 #22
0
ファイル: writeAux_p.c プロジェクト: tiagopereira/rh
/* ------- begin --------------------------   init_aux_new.c --   --- */
void init_aux_new(void) {
  /* Creates the HDF5 file for the auxiliary data */
  const char routineName[] = "init_aux_new";
  unsigned int *tmp;
  double   *tmp_double;
  int       i;
  hid_t     plist, ncid, file_dspace, ncid_atom, ncid_mol;
  hid_t     id_x, id_y, id_z, id_n, id_tmp;
  hsize_t   dims[4];
  char      group_name[ARR_STRLEN];
  Atom     *atom;
  Molecule *molecule;

  /* Create the file  */
  if (( plist = H5Pcreate(H5P_FILE_ACCESS) ) < 0) HERR(routineName);
  if (( H5Pset_fapl_mpio(plist, mpi.comm, mpi.info) ) < 0) HERR(routineName);
  if (( ncid = H5Fcreate(AUX_FILE, H5F_ACC_TRUNC, H5P_DEFAULT,
                         plist) ) < 0) HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);


  /* --- Definitions for the root group --- */
  /* dimensions as attributes */
  if (( H5LTset_attribute_int(ncid, "/", "nx", &mpi.nx, 1) ) < 0)
      HERR(routineName);
  if (( H5LTset_attribute_int(ncid, "/", "ny", &mpi.ny, 1) ) < 0)
      HERR(routineName);
  if (( H5LTset_attribute_int(ncid, "/", "nz", (int *) &infile.nz, 1 )) < 0)
      HERR(routineName);
  /* attributes */
  if (( H5LTset_attribute_string(ncid, "/", "atmosID", atmos.ID)) < 0)
    HERR(routineName);
  if (( H5LTset_attribute_string(ncid, "/", "rev_id", mpi.rev_id) ) < 0)
    HERR(routineName);

  /* Create arrays for multiple-atom/molecule output */
  io.aux_atom_ncid   = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
  io.aux_mol_ncid    = (hid_t *) malloc(atmos.Nactivemol  * sizeof(hid_t));
  if (input.p15d_wpop) {
      io.aux_atom_pop    = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
      io.aux_atom_poplte = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
      io.aux_mol_pop     = (hid_t *) malloc(atmos.Nactivemol  * sizeof(hid_t));
      io.aux_mol_poplte  = (hid_t *) malloc(atmos.Nactivemol  * sizeof(hid_t));
  }
  if (input.p15d_wrates) {
      io.aux_atom_RijL   = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
      io.aux_atom_RjiL   = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
      io.aux_atom_RijC   = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
      io.aux_atom_RjiC   = (hid_t *) malloc(atmos.Nactiveatom * sizeof(hid_t));
  }


  /* Fill value */
  if (( plist = H5Pcreate(H5P_DATASET_CREATE) ) < 0) HERR(routineName);
  if (( H5Pset_fill_value(plist, H5T_NATIVE_FLOAT, &FILLVALUE) ) < 0)
      HERR(routineName);
  if (( H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY) ) < 0) HERR(routineName);
  if (( H5Pset_fill_time(plist, H5D_FILL_TIME_ALLOC) ) < 0) HERR(routineName);

  /* --- Group loop over active ATOMS --- */
  for (i=0; i < atmos.Nactiveatom; i++) {
    atom = atmos.activeatoms[i];
    /* Get group name */
    sprintf(group_name, (atom->ID[1] == ' ') ? "atom_%.1s" : "atom_%.2s",
            atom->ID);
    if (( ncid_atom = H5Gcreate(ncid, group_name, H5P_DEFAULT, H5P_DEFAULT,
                                H5P_DEFAULT) ) < 0) HERR(routineName);
    io.aux_atom_ncid[i] = ncid_atom;
    /* --- dimensions as attributes --- */
    if (( H5LTset_attribute_int(ncid_atom, ".", "nlevel",
                                &atom->Nlevel, 1)) < 0) HERR(routineName);
    if (( H5LTset_attribute_int(ncid_atom, ".", "nline",
                                &atom->Nline, 1)) < 0) HERR(routineName);
    if (( H5LTset_attribute_int(ncid_atom, ".", "ncontinuum",
                                &atom->Ncont, 1)) < 0) HERR(routineName);
    /* --- dimension datasets --- */
    dims[0] = mpi.nx;
    if (( H5LTmake_dataset(ncid_atom, X_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           geometry.xscale) ) < 0)  HERR(routineName);
    if (( id_x = H5Dopen2(ncid_atom, X_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = mpi.ny;
    if (( H5LTmake_dataset(ncid_atom, Y_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           geometry.yscale) ) < 0)  HERR(routineName);
    if (( id_y = H5Dopen2(ncid_atom, Y_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = infile.nz;
    tmp_double = (double *) calloc(infile.nz , sizeof(double));
    if (( H5LTmake_dataset(ncid_atom, ZOUT_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           tmp_double) ) < 0)  HERR(routineName);
    free(tmp_double);
    if (( id_z = H5Dopen2(ncid_atom, ZOUT_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = atom->Nlevel;
    tmp = (unsigned int *) calloc(atom->Nlevel , sizeof(unsigned int));
    if (( H5LTmake_dataset(ncid_atom, LEVEL_NAME, 1, dims, H5T_NATIVE_UINT,
                           tmp) ) < 0)  HERR(routineName);
    free(tmp);
    dims[0] = atom->Nline;
    tmp = (unsigned int *) calloc(atom->Nline , sizeof(unsigned int));
    if (( H5LTmake_dataset(ncid_atom, LINE_NAME, 1, dims, H5T_NATIVE_UINT,
                           tmp) ) < 0)  HERR(routineName);
    free(tmp);
    if (atom->Ncont > 0) {
        dims[0] = atom->Ncont;
        tmp = (unsigned int *) calloc(atom->Ncont , sizeof(unsigned int));
        if (( H5LTmake_dataset(ncid_atom, CONT_NAME, 1, dims, H5T_NATIVE_UINT,
                               tmp) ) < 0)  HERR(routineName);
        free(tmp);
    }
    /* For compatibility with netCDF readers, only use dataset as dimension */
    if (( H5LTset_attribute_string(ncid_atom, ZOUT_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (( H5LTset_attribute_string(ncid_atom, LEVEL_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (( H5LTset_attribute_string(ncid_atom, LINE_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (atom->Ncont > 0) {
        if (( H5LTset_attribute_string(ncid_atom, CONT_NAME, "NAME",
                                       NETCDF_COMPAT) ) < 0) HERR(routineName);
    }
    /* --- variables --- */
    dims[0] = atom->Nlevel;
    dims[1] = mpi.nx;
    dims[2] = mpi.ny;
    dims[3] = infile.nz;
    /* Populations */
    if (input.p15d_wpop) {
      if (( file_dspace = H5Screate_simple(4, dims, NULL) ) < 0)
        HERR(routineName);
      if (( id_n = H5Dopen2(ncid_atom, LEVEL_NAME,
                            H5P_DEFAULT)) < 0) HERR(routineName);
      if (atom->n != NULL) {
        if (( id_tmp = H5Dcreate(ncid_atom, POP_NAME, H5T_NATIVE_FLOAT,
                                 file_dspace, H5P_DEFAULT,
                                 plist, H5P_DEFAULT)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
        if (( H5LTset_attribute_float(ncid_atom, POP_NAME, "_FillValue",
                                      &FILLVALUE, 1) ) < 0) HERR(routineName);
        io.aux_atom_pop[i] = id_tmp;
      }
      if (atom->nstar != NULL) {
        if (( id_tmp = H5Dcreate(ncid_atom, POPLTE_NAME, H5T_NATIVE_FLOAT,
                                 file_dspace, H5P_DEFAULT,
                                 plist, H5P_DEFAULT)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
        if (( H5LTset_attribute_float(ncid_atom, POPLTE_NAME, "_FillValue",
                                      &FILLVALUE, 1) ) < 0) HERR(routineName);
        io.aux_atom_poplte[i] = id_tmp;
      }
      if (( H5Dclose(id_n) ) < 0) HERR(routineName);
      if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);
    }
    if (input.p15d_wrates) {
      /* Radiative rates */
      dims[0] = atom->Nline;
      dims[1] = mpi.nx;
      dims[2] = mpi.ny;
      dims[3] = infile.nz;
      if (( file_dspace = H5Screate_simple(4, dims, NULL) ) < 0) HERR(routineName);
      if (( id_n = H5Dopen2(ncid_atom, LINE_NAME,
                            H5P_DEFAULT)) < 0) HERR(routineName);
      if (( id_tmp = H5Dcreate(ncid_atom, RIJ_L_NAME, H5T_NATIVE_FLOAT,
                               file_dspace, H5P_DEFAULT, plist,
                               H5P_DEFAULT)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
      if (( H5LTset_attribute_float(ncid_atom, RIJ_L_NAME, "_FillValue",
                                    &FILLVALUE, 1) ) < 0) HERR(routineName);
      io.aux_atom_RijL[i] = id_tmp;
      if (( id_tmp = H5Dcreate(ncid_atom, RJI_L_NAME, H5T_NATIVE_FLOAT,
                               file_dspace, H5P_DEFAULT, plist,
                               H5P_DEFAULT)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
      if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
      if (( H5LTset_attribute_float(ncid_atom, RJI_L_NAME, "_FillValue",
                                    &FILLVALUE, 1) ) < 0) HERR(routineName);
      io.aux_atom_RjiL[i] = id_tmp;
      if (( H5Dclose(id_n) ) < 0) HERR(routineName);
      if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);
      if (atom->Ncont > 0) {
          dims[0] = atom->Ncont;
          if (( file_dspace = H5Screate_simple(4, dims, NULL) ) < 0)
            HERR(routineName);
          if (( id_n = H5Dopen2(ncid_atom, CONT_NAME,
                                H5P_DEFAULT)) < 0) HERR(routineName);
          if (( id_tmp = H5Dcreate(ncid_atom, RIJ_C_NAME, H5T_NATIVE_FLOAT,
                                   file_dspace, H5P_DEFAULT,  plist,
                                   H5P_DEFAULT)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
          if (( H5LTset_attribute_float(ncid_atom, RIJ_C_NAME, "_FillValue",
                                        &FILLVALUE, 1) ) < 0) HERR(routineName);
          io.aux_atom_RijC[i] = id_tmp;
          if (( id_tmp = H5Dcreate(ncid_atom, RJI_C_NAME, H5T_NATIVE_FLOAT,
                                   file_dspace, H5P_DEFAULT, plist,
                                   H5P_DEFAULT)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
          if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
          if (( H5LTset_attribute_float(ncid_atom, RJI_C_NAME, "_FillValue",
                                        &FILLVALUE, 1) ) < 0) HERR(routineName);
          io.aux_atom_RjiC[i] = id_tmp;
          if (( H5Dclose(id_n) ) < 0) HERR(routineName);
          if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);
      }
    }
  if (( H5Dclose(id_x) ) < 0) HERR(routineName);
  if (( H5Dclose(id_y) ) < 0) HERR(routineName);
  if (( H5Dclose(id_z) ) < 0) HERR(routineName);
  }   /* end active ATOMS loop */

  /* --- Group loop over active MOLECULES --- */
  for (i=0; i < atmos.Nactivemol; i++) {
    molecule = atmos.activemols[i];
    /* Get group name */
    sprintf( group_name, "molecule_%s", molecule->ID);
    if (( ncid_mol = H5Gcreate(ncid, group_name, H5P_DEFAULT, H5P_DEFAULT,
                                H5P_DEFAULT) ) < 0) HERR(routineName);
    io.aux_mol_ncid[i] = ncid_mol;
    /* --- dimensions as attributes --- */
    if (( H5LTset_attribute_int(ncid_mol, ".", "nlevel_vibr",
                                &molecule->Nv, 1)) < 0) HERR(routineName);
    if (( H5LTset_attribute_int(ncid_mol, ".", "nline_molecule",
                                &molecule->Nrt, 1)) < 0) HERR(routineName);
    if (( H5LTset_attribute_int(ncid_mol, ".", "nJ",
                                &molecule->NJ, 1)) < 0) HERR(routineName);
    /* --- dimension datasets --- */
    dims[0] = mpi.nx;
    if (( H5LTmake_dataset(ncid_mol, X_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           geometry.xscale) ) < 0)  HERR(routineName);
    if (( id_x = H5Dopen2(ncid_mol, X_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = mpi.ny;
    if (( H5LTmake_dataset(ncid_mol, Y_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           geometry.yscale) ) < 0)  HERR(routineName);
    if (( id_y = H5Dopen2(ncid_mol, Y_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = infile.nz;
    tmp_double = (double *) calloc(infile.nz , sizeof(double));
    if (( H5LTmake_dataset(ncid_mol, ZOUT_NAME, 1, dims, H5T_NATIVE_DOUBLE,
                           tmp_double) ) < 0)  HERR(routineName);
    free(tmp_double);
    if (( id_z = H5Dopen2(ncid_mol, ZOUT_NAME, H5P_DEFAULT)) < 0) HERR(routineName);
    dims[0] = molecule->Nv;
    tmp = (unsigned int *) calloc(molecule->Nv, sizeof(unsigned int));
    if (( H5LTmake_dataset(ncid_mol, VLEVEL_NAME, 1, dims, H5T_NATIVE_UINT,
                           tmp) ) < 0)  HERR(routineName);
    free(tmp);
    dims[0] = molecule->Nrt;
    tmp = (unsigned int *) calloc(molecule->Nrt, sizeof(unsigned int));
    if (( H5LTmake_dataset(ncid_mol, VLINE_NAME, 1, dims, H5T_NATIVE_UINT,
                           tmp) ) < 0)  HERR(routineName);
    free(tmp);
    dims[0] = molecule->NJ;
    tmp = (unsigned int *) calloc(molecule->NJ, sizeof(unsigned int));
    if (( H5LTmake_dataset(ncid_mol, NJ_NAME, 1, dims, H5T_NATIVE_UINT,
                           tmp) ) < 0)  HERR(routineName);
    free(tmp);
    /* For compatibility with netCDF readers, only use dataset as dimension */
    if (( H5LTset_attribute_string(ncid_mol, ZOUT_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (( H5LTset_attribute_string(ncid_mol, VLEVEL_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (( H5LTset_attribute_string(ncid_mol, VLINE_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    if (( H5LTset_attribute_string(ncid_mol, NJ_NAME, "NAME",
                                   NETCDF_COMPAT) ) < 0) HERR(routineName);
    /* --- variables --- */
    dims[0] = molecule->Nv;
    dims[1] = mpi.nx;
    dims[2] = mpi.ny;
    dims[3] = infile.nz;
    /* Populations */
    if (input.p15d_wpop) {
      if (( file_dspace = H5Screate_simple(4, dims, NULL) ) < 0)
        HERR(routineName);
      if (( id_n = H5Dopen2(ncid_mol, VLEVEL_NAME,
                            H5P_DEFAULT)) < 0) HERR(routineName);
      if (molecule->nv != NULL) {
        if (( id_tmp = H5Dcreate(ncid_mol, POP_NAME, H5T_NATIVE_FLOAT,
                                 file_dspace, H5P_DEFAULT, plist,
                                 H5P_DEFAULT)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
        io.aux_mol_pop[i] = id_tmp;
      }
      if (molecule->nvstar != NULL) {
        if (( id_tmp = H5Dcreate(ncid_mol, POPLTE_NAME, H5T_NATIVE_FLOAT,
                                 file_dspace, H5P_DEFAULT, plist,
                                 H5P_DEFAULT)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_n, 0)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_x, 1)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_y, 2)) < 0) HERR(routineName);
        if (( H5DSattach_scale(id_tmp, id_z, 3)) < 0) HERR(routineName);
        io.aux_mol_poplte[i] = id_tmp;
      }
      if (( H5Dclose(id_n) ) < 0) HERR(routineName);
      if (( H5Sclose(file_dspace) ) < 0) HERR(routineName);
      // TODO:  molecule->Ediss, molecule->Tmin, molecule->Tmax
    }
  if (( H5Dclose(id_x) ) < 0) HERR(routineName);
  if (( H5Dclose(id_y) ) < 0) HERR(routineName);
  if (( H5Dclose(id_z) ) < 0) HERR(routineName);
  } /* end active MOLECULES loop */
  io.aux_ncid = ncid;   /* Copy stuff to the IO data struct */
  if (( H5Pclose(plist) ) < 0) HERR(routineName);  /* Free hdf5 resources */
  /* Flush ensures file is created in case of crash */
  if (( H5Fflush(ncid, H5F_SCOPE_LOCAL) ) < 0) HERR(routineName);
  return;
}
コード例 #23
0
ファイル: qh5file.cpp プロジェクト: NanoSim/Porto
bool QH5File :: flush()
{
  auto status = H5Fflush (fileId, H5F_SCOPE_GLOBAL);
  return (status >= 0);
}
コード例 #24
0
ファイル: t_pflush1.c プロジェクト: Starlink/hdf5
/*-------------------------------------------------------------------------
 * Function:    main
 *
 * Purpose:    Part 1 of a two-part H5Fflush() test.
 *
 * Return:    Success:    0
 *
 *        Failure:    1
 *
 * Programmer:    Robb Matzke
 *              Friday, October 23, 1998
 *
 * Modifications:
 *         Leon Arber
 *         Sept. 26, 2006, expand test to check for failure if H5Fflush is not called.
 *
 *
 *-------------------------------------------------------------------------
 */
int
main(int argc, char* argv[])
{
    hid_t file1, file2, fapl;
    MPI_File    *mpifh_p = NULL;
    char    name[1024];
    const char  *envval = NULL;
    int mpi_size, mpi_rank;
    MPI_Comm comm  = MPI_COMM_WORLD;
    MPI_Info info  = MPI_INFO_NULL;

    MPI_Init(&argc, &argv);
    MPI_Comm_size(comm, &mpi_size);
    MPI_Comm_rank(comm, &mpi_rank);

    fapl = H5Pcreate(H5P_FILE_ACCESS);
    H5Pset_fapl_mpio(fapl, comm, info);

    if(mpi_rank == 0)
    TESTING("H5Fflush (part1)");
    envval = HDgetenv("HDF5_DRIVER");
    if(envval == NULL)
        envval = "nomatch";
    if(HDstrcmp(envval, "split")) {
    /* Create the file */
    h5_fixname(FILENAME[0], fapl, name, sizeof name);
    file1 = create_file(name, fapl);
    /* Flush and exit without closing the library */
    if(H5Fflush(file1, H5F_SCOPE_GLOBAL) < 0) goto error;

    /* Create the other file which will not be flushed */
    h5_fixname(FILENAME[1], fapl, name, sizeof name);
    file2 = create_file(name, fapl);


    if(mpi_rank == 0)
        PASSED();
    fflush(stdout);
    fflush(stderr);
    } /* end if */
    else {
        SKIPPED();
        puts("    Test not compatible with current Virtual File Driver");
    } /* end else */

    /*
     * Some systems like AIX do not like files not closed when MPI_Finalize
     * is called.  So, we need to get the MPI file handles, close them by hand.
     * Then the _exit is still needed to stop at_exit from happening in some systems.
     * Note that MPIO VFD returns the address of the file-handle in the VFD struct
     * because MPI_File_close wants to modify the file-handle variable.
     */

    /* close file1 */
    if(H5Fget_vfd_handle(file1, fapl, (void **)&mpifh_p) < 0) {
    printf("H5Fget_vfd_handle for file1 failed\n");
    goto error;
    } /* end if */
    if(MPI_File_close(mpifh_p) != MPI_SUCCESS) {
    printf("MPI_File_close for file1 failed\n");
    goto error;
    } /* end if */
    /* close file2 */
    if(H5Fget_vfd_handle(file2, fapl, (void **)&mpifh_p) < 0) {
    printf("H5Fget_vfd_handle for file2 failed\n");
    goto error;
    } /* end if */
    if(MPI_File_close(mpifh_p) != MPI_SUCCESS) {
    printf("MPI_File_close for file2 failed\n");
    goto error;
    } /* end if */

    fflush(stdout);
    fflush(stderr);
    HD_exit(0);

error:
    fflush(stdout);
    fflush(stderr);
    HD_exit(1);
}
コード例 #25
0
med_err generateFieldFile( const med_size nentities, const med_size nvaluesperentity, const med_size nconstituentpervalue,
			   const med_switch_mode constituentmode,GetBlocksOfEntitiesType getBlockOfEntities, const med_int nbblocksperproc,
			   GenerateDataType generateDatas,
			   const med_storage_mode storagemode, const med_size profilearraysize,  const char * const fieldnameprefix,  COM_info * const cominfo ) {

/*     static int   _fileno=0; */
    med_err      _ret=-1;
    char         _filename   [255]="";
    char         _meshname[MED_NAME_SIZE+1]="Empty mesh";
    med_int      _meshdim=3;
    char         _meshcomponentname[3*MED_SNAME_SIZE+1] = "x               y               z               ";
    char         _meshcomponentunit[3*MED_SNAME_SIZE+1] = "cm              cm              cm              ";
    char         _fieldname  [MED_NAME_SIZE+1]="";
    char         *componentname,*componentunit;
    char         _profilename[MED_NAME_SIZE+1]=MED_NO_PROFILE;
    med_int       *_profilearray=0;
    int          _i=0,_j=0,_k=0, _lastusedrank=0;
    med_size     _blocksize=0,_lastblocksize=0,_count=0,_stride=0,_start=0,_index=0;
    med_float    *_arrayvalues;
    med_filter   filter = MED_FILTER_INIT;
    med_size     _nusedentities        = nentities;
    med_size     _io_count                = nbblocksperproc;
    med_idt      _fidseq,_fid;

    MPI_Info info     = cominfo->info;     
    MPI_Comm comm     = cominfo->comm;
    int      mpi_size = cominfo->mpi_size;
    int      mpi_rank = cominfo->mpi_rank;

    char         *_MED_MODE_SWITCH_MSG[3]={"MED_FULL_INTERLACE", "MED_NO_INTERLACE","MED_UNDEF_INTERLACE",};
    char         *_MED_STORAGE_MODE_MSG[3]={"MED_NO_STMODE","MED_GLOBAL_STMODE", "MED_COMPACT_STMODE"};

    med_geometry_type     _geotype       = MED_TRIA6;
    med_int               _geodim        = _geotype/100;
    med_int               _geonnodes     = _geotype%100;
    char       _ipointname[MED_NAME_SIZE+1];
    med_float* _ipointrefcoo = 0;
    med_int    _ipoint       = nvaluesperentity;
    med_float* _ipointcoo    = 0;
    med_float* _ipointwg     = 0;

    sprintf(_filename,"%s_CPU-%03d_@_%s_%s.med",fieldnameprefix,mpi_size,_MED_MODE_SWITCH_MSG[constituentmode],_MED_STORAGE_MODE_MSG[storagemode]);
/*     SSCRUTE(_filename); */
    /* Ouverture du fichier en mode parallel */
    if ((_fid = MEDparFileOpen(_filename, MODE_ACCES ,comm, info)) < 0){
      MED_ERR_(_ret,MED_ERR_OPEN,MED_ERR_FILE,_filename);
      goto ERROR;
    }

/*     SSCRUTE(_meshname); */
    if (MEDmeshCr( _fid,_meshname,_meshdim,_meshdim, MED_UNSTRUCTURED_MESH,
		   "Un maillage pour le test parallel","s", MED_SORT_DTIT,
		   MED_CARTESIAN, _meshcomponentname, _meshcomponentunit) < 0) {
      MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_MESH,_meshname);
      goto ERROR;
    };

    componentname = (char*) malloc((nconstituentpervalue*MED_SNAME_SIZE+1)*sizeof(char));
    componentunit = (char*) malloc((nconstituentpervalue*MED_SNAME_SIZE+1)*sizeof(char));
    /*TODO : Compléter le nom */
    strcpy(componentname,"");
    strcpy(componentunit,"");
    strcpy(_fieldname,fieldnameprefix);
    if ( MEDfieldCr(_fid,_fieldname,MED_FLOAT64,nconstituentpervalue,componentname,componentunit,"s",_meshname ) < 0) {
      MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_FIELD,_fieldname);
      goto ERROR;
    };
    free(componentname);
    free(componentunit);


    if ( _ipoint > 1 ) {

      MESSAGE("Creating a localization of integration points...");
      strcpy(_ipointname,_fieldname);
      strcat(_ipointname,"_loc");

      /*Attention ancienne spec*/
      _ipointrefcoo = (med_float *) calloc(_geodim*_geonnodes,sizeof(med_float));
      _ipointcoo    = (med_float *) calloc(_ipoint*_geodim,sizeof(med_float));
      _ipointwg     = (med_float *) calloc(_ipoint,sizeof(med_float));

      if (MEDlocalizationWr(_fid, _ipointname, _geotype, _geotype/100, _ipointrefcoo, constituentmode,
			    _ipoint, _ipointcoo, _ipointwg, MED_NO_INTERPOLATION, MED_NO_MESH_SUPPORT ) < 0) {
	MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_LOCALIZATION,_ipointname);
	ISCRUTE_int(constituentmode);
	goto ERROR;
      }
      free(_ipointrefcoo );
      free(_ipointcoo    );
      free(_ipointwg     );

    } else {
      strcpy(_ipointname,MED_NO_LOCALIZATION);
    }

    if (profilearraysize) {
      MESSAGE("Creating a profile...");

      strcpy(_profilename,_fieldname);strcat(_profilename,"_profile");

      _profilearray = (med_int*) calloc(profilearraysize,sizeof(med_int));

      for (_i=0; _i < profilearraysize; ++_i) _profilearray[_i]=_i;
      if ( MEDprofileWr(_fid,_profilename,profilearraysize,_profilearray) < 0) {
	MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_PROFILE,_profilename);
	goto ERROR;
      };
      _nusedentities = profilearraysize;
    } else {
      strcpy(_profilename,MED_NO_PROFILE);
    }


    MESSAGE("Generating partition...");
    getBlockOfEntities ( mpi_rank , mpi_size, _nusedentities,
			 &_start, &_stride, &_io_count, &_blocksize,
			 &_lastusedrank, &_lastblocksize);

    _count=_io_count;
    MESSAGE("Generating filter...");
    if ( MEDfilterBlockOfEntityCr(_fid, nentities, nvaluesperentity, nconstituentpervalue,
				  MED_ALL_CONSTITUENT, constituentmode, storagemode, _profilename,
				  _start,_stride,_count,_blocksize,_lastblocksize,  &filter) < 0 ) {
	MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_FILTER,"");
	goto ERROR;
    }

    MESSAGE("Generating datas...");
    generateDatas(mpi_rank, _lastusedrank, sizeof(med_float),
		  storagemode, profilearraysize, _profilearray,
		  _start, _stride, _count, _blocksize, _lastblocksize,
		  nentities, nvaluesperentity, nconstituentpervalue,
		  &_arrayvalues );

    MESSAGE("Writing field...");
    if ( MEDfieldValueAdvancedWr(_fid,_fieldname,MED_NO_DT,MED_NO_IT,0.0, MED_CELL, _geotype,
				 _ipointname, &filter, (unsigned char*)_arrayvalues ) < 0) {
      MED_ERR_(_ret,MED_ERR_WRITE,MED_ERR_FIELD,_fieldname);
      ISCRUTE(mpi_rank);
      goto ERROR;
    }

    /* Test de lecture du même fichier avec filtre simple par un seul processeur */
    /* TODO : Créer MEDflush */
    H5Fflush(_fid, H5F_SCOPE_GLOBAL );

    /*Le flush suffit pas besoin de synchroniser les processus : MPI_Barrier(MPI_COMM_WORLD); */
    if (mpi_rank == 0 ) {
      MESSAGE("Reading field...");


      med_int    _nentitiesarrayvalues=0;
      med_float  *_filteredarrayvalues=NULL;
      med_filter filter2=MED_FILTER_INIT;
      int        _ind=0;
      FILE *     _asciifile;
      char       _asciifilename[255]="";


      if ((_fidseq = MEDfileOpen(_filename, MED_ACC_RDONLY )) < 0){
	MED_ERR_(_ret,MED_ERR_OPEN,MED_ERR_FILE,_filename);
	goto ERROR;
      }

      sprintf(_asciifilename,"%s_CPU-%03d_@_%s_%s.ascii",fieldnameprefix,mpi_size,_MED_MODE_SWITCH_MSG[constituentmode],_MED_STORAGE_MODE_MSG[storagemode]);
      _asciifile=fopen(_asciifilename, "w");

      /*Génère un filtre de selection simple s'il n'a pas déjà été généré lors d'un précédent appel */
      /*TODO : Déplacer cette appel dans le main après avoir externaliser la génération du profile */
      if (!(cominfo->filterarray))
	if ( generateFilterArray(  nentities,  nvaluesperentity, nconstituentpervalue,
				   profilearraysize, _profilearray,
				   &(cominfo->nentitiesfiltered), &(cominfo->filterarray) ) < 0 ) {
	  goto ERROR;
	}

      ISCRUTE(cominfo->nentitiesfiltered);
      /*Stocke le filtre utilisé dans le fichier .ascii*/
      for (_i=0; _i < cominfo->nentitiesfiltered; ++_i ) {
/* 	ISCRUTE(cominfo->filterarray[_i]); */
	fprintf(_asciifile,"%d ",cominfo->filterarray[_i]) ;
      }
      fprintf(_asciifile,"\n") ;


      /*Pas de profile possible (profilearraysize == 0) en MED_GLOBAL_STMODE sur un fichier géré en parallel */
      if ( profilearraysize ) {
	_nentitiesarrayvalues = profilearraysize;
      } else {
	_nentitiesarrayvalues = nentities;
      }

      /*Attention allocation mémoire potentiellement grosse car réalisée uniquement par le processus 0
       qui rassemble les données.*/
      /* C'est une taille maxi qui ne prend pas en compte le COMPACT+filter */
      /* TODO : Ajuster la taille au storage_mode*/
      _filteredarrayvalues = (med_float*) malloc(_nentitiesarrayvalues*
						 nvaluesperentity*
						 nconstituentpervalue*sizeof(med_float));

      /* Permet de vérifier une erreur d'indiçage après la lecture */
      for (_i=0;_i<_nentitiesarrayvalues*nvaluesperentity*nconstituentpervalue; ++_i)
	_filteredarrayvalues[_i]=-_i;


      /*Création d'un filtre de sélection simple, pour une lecture séquentielle par le processys 0*/
      if ( MEDfilterEntityCr(_fidseq, nentities, nvaluesperentity, nconstituentpervalue,
			     MED_ALL_CONSTITUENT, constituentmode, storagemode, _profilename,
			     cominfo->nentitiesfiltered,cominfo->filterarray, &filter2) < 0 ) {
	MED_ERR_(_ret,MED_ERR_CREATE,MED_ERR_FILTER,"");
	goto ERROR;
      }

      if ( MEDfieldValueAdvancedRd(_fidseq,_fieldname,MED_NO_DT,MED_NO_IT, MED_CELL, _geotype,
				   &filter2, (unsigned char*)_filteredarrayvalues ) < 0) {
	MED_ERR_(_ret,MED_ERR_READ,MED_ERR_FIELD,_fieldname);
	ISCRUTE(mpi_rank);
	goto ERROR;
      }

      /*AFFICHAGE TOUJOURS EN FULL INTERLACE QUELQUES SOIENT LES COMBINAISONS*/
      /*TODO : Externaliser l'affichage*/
      if ( storagemode == MED_GLOBAL_STMODE ) {
	switch (constituentmode) {
	case MED_FULL_INTERLACE:
	  for (_i=0; _i < cominfo->nentitiesfiltered; ++_i)
	    for (_j=0; _j < nvaluesperentity; ++_j)
	      for (_k=0; _k < nconstituentpervalue; ++_k) {
		_ind = (cominfo->filterarray[_i]-1)*nvaluesperentity*nconstituentpervalue+ _j*nconstituentpervalue+_k;
/* 		fprintf(stdout,"%s%3d%s = %f\n","_filteredarrayvaluesFULLGLB[",_ind,"]",_filteredarrayvalues[_ind]) ; */
		fprintf(_asciifile,"%f\n",_filteredarrayvalues[_ind]) ;
	      }
	  break;
	case MED_NO_INTERLACE:
	  for (_j=0; _j < cominfo->nentitiesfiltered; ++_j)
	    for (_k=0; _k < nvaluesperentity; ++_k)
	      for (_i=0; _i < nconstituentpervalue; ++_i) {
		_ind =_i*nentities*nvaluesperentity+ (cominfo->filterarray[_j]-1)*nvaluesperentity +_k;
/* 		fprintf(stdout,"%s%3d%s = %f\n","_filteredarrayvaluesNOGLB[",_ind,"]",_filteredarrayvalues[_ind]); */
		fprintf(_asciifile,"%f\n",_filteredarrayvalues[_ind]);
	      }
	  break;
	}
      }  else
	switch (constituentmode) {
	case MED_FULL_INTERLACE:
	  for (_i=0; _i < cominfo->nentitiesfiltered; ++_i )
	    for (_j=0; _j < nvaluesperentity; ++_j)
	      for (_k=0; _k < nconstituentpervalue; ++_k) {
		_ind = _i*nvaluesperentity*nconstituentpervalue+_j*nconstituentpervalue+_k;
/* 		fprintf(stdout,"%s%3d%s = %f\n","_filteredarrayvaluesFULLCP[",_ind,"]",_filteredarrayvalues[_ind]) ; */
		fprintf(_asciifile,"%f\n",_filteredarrayvalues[_ind]) ;
	  }
	  break;
	case MED_NO_INTERLACE:
	  for (_j=0; _j < cominfo->nentitiesfiltered; ++_j)
	    for (_k=0; _k < nvaluesperentity; ++_k)
	      for (_i=0; _i < nconstituentpervalue; ++_i) {
		_ind =_i*cominfo->nentitiesfiltered*nvaluesperentity+ _j*nvaluesperentity +_k;
		/* _ind =_i*_nentitiesarrayvalues*nvaluesperentity+ (_filterarray[_j]-1)*nvaluesperentity +_k; */
/* 		fprintf(stdout,"%s%3d%s = %f\n","_filteredarrayvaluesNOCP[",_ind,"]",_filteredarrayvalues[_ind]); */
		fprintf(_asciifile,"%f\n",_filteredarrayvalues[_ind]);
	      }
	  break;
	}


      free(_filteredarrayvalues);

      fclose(_asciifile);

      if ( MEDfilterClose(&filter2) < 0 ) {
	MED_ERR_(_ret,MED_ERR_CLOSE,MED_ERR_FILTER,"");
	goto ERROR;
      }

    } /*fin if (mpi_rank == 0) */

  if ( MEDfilterClose(&filter) < 0 ) {
    MED_ERR_(_ret,MED_ERR_CLOSE,MED_ERR_FILTER,"");
    goto ERROR;
  }


    _ret=0;
  ERROR:
    if (_arrayvalues)     free(_arrayvalues);
    if (profilearraysize) free(_profilearray);

    if (  MEDfileClose(_fid) < 0) {
      MED_ERR_(_ret,MED_ERR_CLOSE,MED_ERR_FILE,""); _ret = -1;
    }

    if (mpi_rank == 0 ) {
      if (  MEDfileClose(_fidseq) < 0) {
	MED_ERR_(_ret,MED_ERR_CLOSE,MED_ERR_FILE,""); _ret = -1;
      }
    }

    return _ret;
}
コード例 #26
0
ファイル: material.cpp プロジェクト: crbates/pyne
void pyne::Material::_load_comp_protocol1(hid_t db, std::string datapath, int row) {
  std::string nucpath;
  hid_t data_set = H5Dopen2(db, datapath.c_str(), H5P_DEFAULT);

  hsize_t data_offset[1] = {row};
  if (row < 0) {
    // Handle negative row indices
    hid_t data_space = H5Dget_space(data_set);
    hsize_t data_dims[1];
    H5Sget_simple_extent_dims(data_space, data_dims, NULL);
    data_offset[0] += data_dims[0];
  };

  // Grab the nucpath
  hid_t nuc_attr = H5Aopen(data_set, "nucpath", H5P_DEFAULT);
  H5A_info_t nuc_info;
  H5Aget_info(nuc_attr, &nuc_info);
  hsize_t nuc_attr_len = nuc_info.data_size;
  hid_t str_attr = H5Tcopy(H5T_C_S1);
  H5Tset_size(str_attr, nuc_attr_len);
  char * nucpathbuf = new char [nuc_attr_len];
  H5Aread(nuc_attr, str_attr, nucpathbuf);
  nucpath = std::string(nucpathbuf, nuc_attr_len);
  delete[] nucpathbuf;

  // Grab the nuclides
  std::vector<int> nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5T_NATIVE_INT);
  int nuc_size = nuclides.size();
  hsize_t nuc_dims[1] = {nuc_size};

  // Get the data hyperslab
  hid_t data_hyperslab = H5Dget_space(data_set);
  hsize_t data_count[1] = {1};
  H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);

  // Get memory space for writing
  hid_t mem_space = H5Screate_simple(1, data_count, NULL);

  // Get material type
  size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*nuc_size;
  hid_t desc = H5Tcreate(H5T_COMPOUND, material_struct_size);
  hid_t comp_values_array_type = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, nuc_dims);

  // make the data table type
  H5Tinsert(desc, "mass", HOFFSET(pyne::material_struct, mass), H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "density", HOFFSET(pyne::material_struct, density), 
            H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "atoms_per_molecule", HOFFSET(pyne::material_struct, atoms_per_mol), 
            H5T_NATIVE_DOUBLE);
  H5Tinsert(desc, "comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type);

  // make the data array, have to over-allocate
  material_struct * mat_data = new material_struct [material_struct_size];

  // Finally, get data and put in on this instance
  H5Dread(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data);

  mass = (*mat_data).mass;
  density = (*mat_data).density;
  atoms_per_molecule = (*mat_data).atoms_per_mol;
  for (int i = 0; i < nuc_size; i++)
    comp[nuclides[i]] = (double) (*mat_data).comp[i];

  delete[] mat_data;
  H5Tclose(str_attr);

  //
  // Get metadata from associated dataset, if available
  //
  std::string attrpath = datapath + "_metadata";
  bool attrpath_exists = h5wrap::path_exists(db, attrpath);
  if (!attrpath_exists)
    return;

  hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace;
  int attrrank; 
  hvl_t attrdata [1];

  attrtype = H5Tvlen_create(H5T_NATIVE_CHAR);

  // Get the metadata from the file
  metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT);
  metadatalab = H5Dget_space(metadataet);
  H5Sselect_hyperslab(metadatalab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);
  attrmemspace = H5Screate_simple(1, data_count, NULL);
  H5Dread(metadataet, attrtype, attrmemspace, metadatalab, H5P_DEFAULT, attrdata);

  // convert to in-memory JSON
  Json::Reader reader;
  reader.parse((char *) attrdata[0].p, (char *) attrdata[0].p+attrdata[0].len, metadata, false);
  
  // close attr data objects
  H5Fflush(db, H5F_SCOPE_GLOBAL);
  H5Dclose(metadataet);
  H5Sclose(metadatapace);
  H5Tclose(attrtype);

  // Close out the HDF5 file
  H5Fclose(db);
};
コード例 #27
0
ファイル: test_dset_opt.c プロジェクト: CommonLibrary/hdf5
/*-------------------------------------------------------------------------
 * Function:	test_skip_compress_write2
 *
 * Purpose:	Test skipping compression filter when there are three filters
 *              for the dataset
 *
 * Return:	Success:	0
 *
 *		Failure:	1
 *
 * Programmer:  Raymond Lu	
 *              30 November 2012
 *
 *-------------------------------------------------------------------------
 */
static int
test_skip_compress_write2(hid_t file)
{
    hid_t       dataspace = -1, dataset = -1;
    hid_t       mem_space = -1;
    hid_t       cparms = -1, dxpl = -1;
    hsize_t     dims[2]  = {NX, NY};        
    hsize_t     maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
    hsize_t     chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
    herr_t      status;
    int         i, j, n;

    unsigned    filter_mask = 0;
    int         origin_direct_buf[CHUNK_NX][CHUNK_NY];
    int         direct_buf[CHUNK_NX][CHUNK_NY];
    int         check_chunk[CHUNK_NX][CHUNK_NY];
    hsize_t     offset[2] = {0, 0};
    size_t      buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
    int          aggression = 9;     /* Compression aggression setting */

    hsize_t start[2];  /* Start of hyperslab */
    hsize_t stride[2]; /* Stride of hyperslab */
    hsize_t count[2];  /* Block count */
    hsize_t block[2];  /* Block sizes */

    TESTING("skipping compression filters but keep two other filters");

    /*
     * Create the data space with unlimited dimensions.
     */
    if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
        goto error;

    if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0)
        goto error;

    /*
     * Modify dataset creation properties, i.e. enable chunking and compression.
     * The order of filters is bogus 1 + deflate + bogus 2.
     */
    if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        goto error;

    if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)
        goto error;

    /* Register and enable first bogus filter */
    if(H5Zregister (H5Z_BOGUS1) < 0) 
	goto error;

    if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS1, 0, (size_t)0, NULL) < 0) 
	goto error;

    /* Enable compression filter */
    if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0)
        goto error;

    /* Register and enable second bogus filter */
    if(H5Zregister (H5Z_BOGUS2) < 0) 
	goto error;

    if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS2, 0, (size_t)0, NULL) < 0) 
	goto error;

    /*
     * Create a new dataset within the file using cparms
     * creation properties.
     */
    if((dataset = H5Dcreate2(file, DATASETNAME3, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
			cparms, H5P_DEFAULT)) < 0)
        goto error;

    if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
        goto error;

    /* Initialize data for one chunk. Apply operations of two bogus filters to the chunk */
    for(i = n = 0; i < CHUNK_NX; i++)
        for(j = 0; j < CHUNK_NY; j++) {
	    origin_direct_buf[i][j] = n++;
	    direct_buf[i][j] = (origin_direct_buf[i][j] + ADD_ON) * FACTOR;
    }

    /* write the uncompressed chunk data repeatedly to dataset, using the direct writing function. 
     * Indicate skipping the compression filter but keep the other two bogus filters */ 
    offset[0] = CHUNK_NX;
    offset[1] = CHUNK_NY;

    /* compression filter is the middle one to be skipped */
    filter_mask = 0x00000002;

    if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0)
        goto error;

    if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) 
        goto error;

    if(H5Dclose(dataset) < 0)
        goto error;

    if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
        goto error;

    /*
     * Select hyperslab for one chunk in the file
     */
    start[0]  = CHUNK_NX; start[1]  = CHUNK_NY;
    stride[0] = 1; stride[1] = 1;
    count[0]  = 1; count[1]  = 1;
    block[0]  = CHUNK_NX; block[1]  = CHUNK_NY;
    if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0)
        goto error;

    /* Read the chunk back */
    if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
        goto error;

    /* Check that the values read are the same as the values written */
    for(i = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
            if(origin_direct_buf[i][j] != check_chunk[i][j]) {
                printf("    1. Read different values than written.");
                printf("    At index %d,%d\n", i, j);
                printf("    origin_direct_buf=%d, check_chunk=%d\n", origin_direct_buf[i][j], check_chunk[i][j]); 
                goto error;
            }
        }
    }

    /*
     * Close/release resources.
     */
    H5Dclose(dataset);
    H5Sclose(mem_space);
    H5Sclose(dataspace);
    H5Pclose(cparms);
    H5Pclose(dxpl);
    
    PASSED();
    return 0;

error:
    H5E_BEGIN_TRY {
        H5Dclose(dataset);
        H5Sclose(mem_space);
        H5Sclose(dataspace);
        H5Pclose(cparms);
        H5Pclose(dxpl);
    } H5E_END_TRY;

    return 1;
}
コード例 #28
0
ファイル: swmr_addrem_writer.c プロジェクト: FilipeMaia/hdf5
/*-------------------------------------------------------------------------
 * Function:    addrem_records
 *
 * Purpose:     Adds/removes a specified number of records to random datasets
 *              to the SWMR test file.
 *
 * Parameters:  hid_t fid
 *              The file ID of the SWMR HDF5 file
 *
 *              unsigned verbose
 *              Whether or not to emit verbose console messages
 *
 *              unsigned long nops
 *              # of records to read/write in the datasets
 *
 *              unsigned long flush_count
 *              # of records to write before flushing the file to disk
 *
 * Return:      Success:    0
 *              Failure:    -1
 *
 *-------------------------------------------------------------------------
 */
static int
addrem_records(hid_t fid, unsigned verbose, unsigned long nops, unsigned long flush_count)
{
    hid_t tid;                          /* Datatype ID for records */
    hid_t mem_sid;                      /* Memory dataspace ID */
    hsize_t start[2] = {0, 0}, count[2] = {1, 1}; /* Hyperslab selection values */
    hsize_t dim[2] = {1, 0};            /* Dataspace dimensions */
    symbol_t buf[MAX_SIZE_CHANGE];      /* Write buffer */
    H5AC_cache_config_t mdc_config_orig; /* Original metadata cache configuration */
    H5AC_cache_config_t mdc_config_cork; /* Corked metadata cache configuration */
    unsigned long op_to_flush;          /* # of operations before flush */
    unsigned long u, v;                 /* Local index variables */

    assert(fid > 0);

    /* Reset the buffer */
    memset(&buf, 0, sizeof(buf));

    /* Create a dataspace for the record to add */
    if((mem_sid = H5Screate_simple(2, count, NULL)) < 0)
        return -1;

    /* Create datatype for appending records */
    if((tid = create_symbol_datatype()) < 0)
        return -1;

    /* Get the current metadata cache configuration, and set up the corked
     * configuration */
    mdc_config_orig.version = H5AC__CURR_CACHE_CONFIG_VERSION;
    if(H5Fget_mdc_config(fid, &mdc_config_orig) < 0)
        return -1;
    memcpy(&mdc_config_cork, &mdc_config_orig, sizeof(mdc_config_cork));
    mdc_config_cork.evictions_enabled = FALSE;
    mdc_config_cork.incr_mode = H5C_incr__off;
    mdc_config_cork.flash_incr_mode = H5C_flash_incr__off;
    mdc_config_cork.decr_mode = H5C_decr__off;

    /* Add and remove records to random datasets, according to frequency
     * distribution */
    op_to_flush = flush_count;
    for(u=0; u<nops; u++) {
        symbol_info_t *symbol;  /* Symbol to write record to */
        hid_t file_sid;         /* Dataset's space ID */

        /* Get a random dataset, according to the symbol distribution */
        symbol = choose_dataset();

        /* Decide whether to shrink or expand, and by how much */
        count[1] = (hsize_t)random() % (MAX_SIZE_CHANGE * 2) + 1;

        if(count[1] > MAX_SIZE_CHANGE) {
            /* Add records */
            count[1] -= MAX_SIZE_CHANGE;

            /* Set the buffer's IDs (equal to its position) */
            for(v=0; v<count[1]; v++)
                buf[v].rec_id = (uint64_t)symbol->nrecords + (uint64_t)v;

            /* Set the memory space to the correct size */
            if(H5Sset_extent_simple(mem_sid, 2, count, NULL) < 0)
                return -1;

            /* Get the coordinates to write */
            start[1] = symbol->nrecords;

            /* Cork the metadata cache, to prevent the object header from being
             * flushed before the data has been written */
            /*if(H5Fset_mdc_config(fid, &mdc_config_cork) < 0)
                return(-1);*/

             /* Extend the dataset's dataspace to hold the new record */
            symbol->nrecords+= count[1];
            dim[1] = symbol->nrecords;
            if(H5Dset_extent(symbol->dsid, dim) < 0)
                return -1;

            /* Get the dataset's dataspace */
            if((file_sid = H5Dget_space(symbol->dsid)) < 0)
                return -1;

            /* Choose the last record in the dataset */
            if(H5Sselect_hyperslab(file_sid, H5S_SELECT_SET, start, NULL, count, NULL) < 0)
                return -1;

            /* Write record to the dataset */
            if(H5Dwrite(symbol->dsid, tid, mem_sid, file_sid, H5P_DEFAULT, &buf) < 0)
                return -1;

            /* Uncork the metadata cache */
            /*if(H5Fset_mdc_config(fid, &mdc_config_orig) < 0)
                return -1;*/

            /* Close the dataset's dataspace */
            if(H5Sclose(file_sid) < 0)
                return -1;
        } /* end if */
        else {
            /* Shrink the dataset's dataspace */
            if(count[1] > symbol->nrecords)
                symbol->nrecords = 0;
            else
                symbol->nrecords -= count[1];
            dim[1] = symbol->nrecords;
            if(H5Dset_extent(symbol->dsid, dim) < 0)
                return -1;
        } /* end else */

        /* Check for flushing file */
        if(flush_count > 0) {
            /* Decrement count of records to write before flushing */
            op_to_flush--;

            /* Check for counter being reached */
            if(0 == op_to_flush) {
                /* Flush contents of file */
                if(H5Fflush(fid, H5F_SCOPE_GLOBAL) < 0)
                    return -1;

                /* Reset flush counter */
                op_to_flush = flush_count;
            } /* end if */
        } /* end if */
    } /* end for */

    /* Emit informational message */
    if(verbose)
        fprintf(stderr, "Closing datasets\n");

    /* Close the datasets */
    for(u = 0; u < NLEVELS; u++)
        for(v = 0; v < symbol_count[u]; v++)
            if(H5Dclose(symbol_info[u][v].dsid) < 0)
                return -1;

    return 0;
}
コード例 #29
0
ファイル: test_dset_opt.c プロジェクト: CommonLibrary/hdf5
static int
test_direct_chunk_write (hid_t file)
{
    hid_t       dataspace = -1, dataset = -1;
    hid_t       mem_space = -1;
    hid_t       cparms = -1, dxpl = -1;
    hsize_t     dims[2]  = {NX, NY};        
    hsize_t     maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
    hsize_t     chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
    herr_t      status;
    int         ret;
    int         data[NX][NY];
    int         i, j, n;

    unsigned    filter_mask = 0;
    int         direct_buf[CHUNK_NX][CHUNK_NY];
    int         check_chunk[CHUNK_NX][CHUNK_NY];
    hsize_t     offset[2] = {0, 0};
    size_t      buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);

    const Bytef *z_src = (const Bytef*)(direct_buf);
    Bytef	    *z_dst;		/*destination buffer		*/
    uLongf	     z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
    uLong	     z_src_nbytes = (uLong)buf_size;
    int          aggression = 9;     /* Compression aggression setting */
    void	*outbuf = NULL;         /* Pointer to new buffer */

    hsize_t start[2];  /* Start of hyperslab */
    hsize_t stride[2]; /* Stride of hyperslab */
    hsize_t count[2];  /* Block count */
    hsize_t block[2];  /* Block sizes */

    TESTING("basic functionality of H5DOwrite_chunk");

    /*
     * Create the data space with unlimited dimensions.
     */
    if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
        goto error;

    if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0)
        goto error;

    /*
     * Modify dataset creation properties, i.e. enable chunking and compression
     */
    if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
        goto error;

    if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)
        goto error;

    if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0)
        goto error;

    /*
     * Create a new dataset within the file using cparms
     * creation properties.
     */
    if((dataset = H5Dcreate2(file, DATASETNAME1, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
			cparms, H5P_DEFAULT)) < 0)
        goto error;

    /* Initialize the dataset */
    for(i = n = 0; i < NX; i++)
        for(j = 0; j < NY; j++)
	    data[i][j] = n++;

    if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
        goto error;

    /*
     * Write the data for the dataset.  It should stay in the chunk cache.
     * It will be evicted from the cache by the H5DOwrite_chunk calls. 
     */
    if((status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
		      dxpl, data)) < 0)
        goto error;

    /* Initialize data for one chunk */
    for(i = n = 0; i < CHUNK_NX; i++)
        for(j = 0; j < CHUNK_NY; j++)
	    direct_buf[i][j] = n++;

    /* Allocate output (compressed) buffer */
    outbuf = HDmalloc(z_dst_nbytes);
    z_dst = (Bytef *)outbuf;

    /* Perform compression from the source to the destination buffer */
    ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression);

    /* Check for various zlib errors */
    if(Z_BUF_ERROR == ret) {
        fprintf(stderr, "overflow");
        goto error;
    } else if(Z_MEM_ERROR == ret) {
	fprintf(stderr, "deflate memory error");
        goto error;
    } else if(Z_OK != ret) {
	fprintf(stderr, "other deflate error");
        goto error;
    }

    /* Write the compressed chunk data repeatedly to cover all the chunks in the 
     * dataset, using the direct writing function.     */ 
    for(i=0; i<NX/CHUNK_NX; i++) {
        for(j=0; j<NY/CHUNK_NY; j++) {
            status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf);
            offset[1] += CHUNK_NY;
        }
        offset[0] += CHUNK_NX;
        offset[1] = 0;
    }

    if(outbuf)
        HDfree(outbuf);

    if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) 
        goto error;

    if(H5Dclose(dataset) < 0)
        goto error;

    if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0)
        goto error;

    /*
     * Select hyperslab for one chunk in the file
     */
    start[0]  = CHUNK_NX; start[1]  = CHUNK_NY;
    stride[0] = 1; stride[1] = 1;
    count[0]  = 1; count[1]  = 1;
    block[0]  = CHUNK_NX; block[1]  = CHUNK_NY;
    if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0)
        goto error;

    /* Read the chunk back */
    if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
        goto error;

    /* Check that the values read are the same as the values written */
    for(i = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
            if(direct_buf[i][j] != check_chunk[i][j]) {
                printf("    1. Read different values than written.");
                printf("    At index %d,%d\n", i, j);
                printf("    direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); 
                goto error;
            }
        }
    }

    /* Reinitialize different data for one chunk */
    for(i = 0; i < CHUNK_NX; i++)
        for(j = 0; j < CHUNK_NY; j++)
	    direct_buf[i][j] = i + j;

    /* Allocate output (compressed) buffer */
    outbuf = HDmalloc(z_dst_nbytes);
    z_dst = (Bytef *)outbuf;

    /* Perform compression from the source to the destination buffer */
    ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression);

    /* Check for various zlib errors */
    if(Z_BUF_ERROR == ret) {
        fprintf(stderr, "overflow");
        goto error;
    } else if(Z_MEM_ERROR == ret) {
	fprintf(stderr, "deflate memory error");
        goto error;
    } else if(Z_OK != ret) {
	fprintf(stderr, "other deflate error");
        goto error;
    }

    /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the 
     * dataset, using the direct writing function.     */ 
    offset[0] = offset[1] = 0;
    for(i=0; i<NX/CHUNK_NX; i++) {
        for(j=0; j<NY/CHUNK_NY; j++) {
            status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf);
            offset[1] += CHUNK_NY;
        }
        offset[0] += CHUNK_NX;
        offset[1] = 0;
    }

    if(outbuf)
        HDfree(outbuf);

    if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) 
        goto error;

    if(H5Dclose(dataset) < 0)
        goto error;

    if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0)
        goto error;

    /* Read the chunk back */
    if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
        goto error;

    /* Check that the values read are the same as the values written */
    for(i = 0; i < CHUNK_NX; i++) {
        for(j = 0; j < CHUNK_NY; j++) {
            if(direct_buf[i][j] != check_chunk[i][j]) {
                printf("    2. Read different values than written.");
                printf("    At index %d,%d\n", i, j);
                printf("    direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); 
                goto error;
            }
        }
    }

    /*
     * Close/release resources.
     */
    H5Dclose(dataset);
    H5Sclose(mem_space);
    H5Sclose(dataspace);
    H5Pclose(cparms);
    H5Pclose(dxpl);
    
    PASSED();
    return 0;

error:
    H5E_BEGIN_TRY {
        H5Dclose(dataset);
        H5Sclose(mem_space);
        H5Sclose(dataspace);
        H5Pclose(cparms);
        H5Pclose(dxpl);
    } H5E_END_TRY;

    if(outbuf)
        HDfree(outbuf);

    return 1;
}
コード例 #30
0
ファイル: FileHDF5.cpp プロジェクト: achilleas-k/nix
bool FileHDF5::flush() {
    HErr err = H5Fflush(hid, H5F_SCOPE_GLOBAL);
    return !err.isError();
}