コード例 #1
0
ファイル: external.c プロジェクト: EgoIncarnate/appleseed
/*-------------------------------------------------------------------------
 * Function:	test_2
 *
 * Purpose:	Tests reading from an external file set.
 *
 * Return:	Success:	0
 *
 * 		Failure:	number of errors
 *
 * Programmer:	Robb Matzke
 *              Wednesday, March  4, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static int
test_2 (hid_t fapl)
{
    hid_t	file=-1;		/*file to write to		*/
    hid_t	dcpl=-1;		/*dataset creation properties	*/
    hid_t	space=-1;		/*data space			*/
    hid_t	dset=-1;		/*dataset			*/
    hid_t	grp=-1;			/*group to emit diagnostics	*/
    int		fd;			/*external file descriptors	*/
    size_t	i, j;			/*miscellaneous counters	*/
    hssize_t	n;			/*bytes of I/O			*/
    char	filename[1024];		/*file names			*/
    int		part[25], whole[100];	/*raw data buffers		*/
    hsize_t	cur_size;		/*current data space size	*/
    hid_t	hs_space;		/*hyperslab data space		*/
    hsize_t	hs_start = 30;		/*hyperslab starting offset	*/
    hsize_t	hs_count = 25;		/*hyperslab size		*/
	int temparray[10] = {0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f};

    TESTING("read external dataset");

    /* Write the data to external files directly */
    for (i=0; i<4; i++) {
	for (j=0; j<25; j++) {
	    part[j] = (int)(i*25+j);
	}
	sprintf (filename, "extern_%lua.raw", (unsigned long)i+1);
	fd = HDopen(filename, O_RDWR|O_CREAT|O_TRUNC, 0666);
	assert (fd>=0);
/*	n = lseek (fd, (off_t)(i*10), SEEK_SET);
*/
	n = HDwrite(fd,temparray,(size_t)i*10);
	assert (n>=0 && (size_t)n==i*10);
	n = HDwrite(fd, part, sizeof(part));
	assert (n==sizeof(part));
	HDclose(fd);
    }

    /*
     * Create the file and an initial group.  This causes messages about
     * debugging to be emitted before we start playing games with what the
     * output looks like.
     */
    h5_fixname(FILENAME[1], fapl, filename, sizeof filename);
    if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
    if((grp = H5Gcreate2(file, "emit-diagnostics", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
    if(H5Gclose(grp) < 0) FAIL_STACK_ERROR

    /* Create the dataset */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_external(dcpl, "extern_1a.raw",  (off_t)0, (hsize_t)sizeof part) < 0 ||
            H5Pset_external(dcpl, "extern_2a.raw", (off_t)10, (hsize_t)sizeof part) < 0 ||
            H5Pset_external(dcpl, "extern_3a.raw", (off_t)20, (hsize_t)sizeof part) < 0 ||
            H5Pset_external(dcpl, "extern_4a.raw", (off_t)30, (hsize_t)sizeof part) < 0)
	goto error;
    cur_size = 100;
    if((space = H5Screate_simple(1, &cur_size, NULL)) < 0) goto error;
    if((dset = H5Dcreate2(file, "dset1", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;

    /*
     * Read the entire dataset and compare with the original
     */
    memset(whole, 0, sizeof(whole));
    if(H5Dread(dset, H5T_NATIVE_INT, space, space, H5P_DEFAULT, whole) < 0) goto error;
    for(i = 0; i < 100; i++)
	if(whole[i] != (signed)i) {
	    H5_FAILED();
	    puts("    Incorrect value(s) read.");
	    goto error;
	} /* end if */

    /*
     * Read the middle of the dataset
     */
    if((hs_space = H5Scopy(space)) < 0) goto error;
    if(H5Sselect_hyperslab(hs_space, H5S_SELECT_SET, &hs_start, NULL,
			    &hs_count, NULL) < 0) goto error;
    HDmemset(whole, 0, sizeof(whole));
    if(H5Dread(dset, H5T_NATIVE_INT, hs_space, hs_space, H5P_DEFAULT,
		 whole) < 0) goto error;
    if(H5Sclose(hs_space) < 0) goto error;
    for(i = hs_start; i<hs_start+hs_count; i++) {
	if(whole[i] != (signed)i) {
	    H5_FAILED();
	    puts("    Incorrect value(s) read.");
	    goto error;
	}
    }

    if (H5Dclose(dset) < 0) goto error;
    if (H5Pclose(dcpl) < 0) goto error;
    if (H5Sclose(space) < 0) goto error;
    if (H5Fclose(file) < 0) goto error;
    PASSED();
    return 0;

 error:
    H5E_BEGIN_TRY {
	H5Dclose(dset);
	H5Pclose(dcpl);
	H5Sclose(space);
	H5Fclose(file);
    } H5E_END_TRY;
    return 1;
}
コード例 #2
0
ファイル: h5ex_t_vlstring.c プロジェクト: LaHaine/ohpc
int
main (void)
{
    hid_t       file, filetype, memtype, space, dset;
                                            /* Handles */
    herr_t      status;
    hsize_t     dims[1] = {DIM0};
    char        *wdata[DIM0] = {"Parting", "is such", "sweet", "sorrow."},
                                            /* Write buffer */
                **rdata;                    /* Read buffer */
    int         ndims,
                i;

    /*
     * Create a new file using the default properties.
     */
    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /*
     * Create file and memory datatypes.  For this example we will save
     * the strings as FORTRAN strings.
     */
    filetype = H5Tcopy (H5T_FORTRAN_S1);
    status = H5Tset_size (filetype, H5T_VARIABLE);
    memtype = H5Tcopy (H5T_C_S1);
    status = H5Tset_size (memtype, H5T_VARIABLE);

    /*
     * Create dataspace.  Setting maximum size to NULL sets the maximum
     * size to be the current size.
     */
    space = H5Screate_simple (1, dims, NULL);

    /*
     * Create the dataset and write the variable-length string data to
     * it.
     */
    dset = H5Dcreate (file, DATASET, filetype, space, H5P_DEFAULT, H5P_DEFAULT,
                H5P_DEFAULT);
    status = H5Dwrite (dset, memtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);

    /*
     * Close and release resources.
     */
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Tclose (filetype);
    status = H5Tclose (memtype);
    status = H5Fclose (file);


    /*
     * Now we begin the read section of this example.  Here we assume
     * the dataset has the same name and rank, but can have any size.
     * Therefore we must allocate a new array to read in data using
     * malloc().
     */

    /*
     * Open file and dataset.
     */
    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    dset = H5Dopen (file, DATASET, H5P_DEFAULT);

    /*
     * Get the datatype.
     */
    filetype = H5Dget_type (dset);

    /*
     * Get dataspace and allocate memory for read buffer.
     */
    space = H5Dget_space (dset);
    ndims = H5Sget_simple_extent_dims (space, dims, NULL);
    rdata = (char **) malloc (dims[0] * sizeof (char *));

    /*
     * Create the memory datatype.
     */
    memtype = H5Tcopy (H5T_C_S1);
    status = H5Tset_size (memtype, H5T_VARIABLE);

    /*
     * Read the data.
     */
    status = H5Dread (dset, memtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);

    /*
     * Output the data to the screen.
     */
    for (i=0; i<dims[0]; i++)
        printf ("%s[%d]: %s\n", DATASET, i, rdata[i]);

    /*
     * Close and release resources.  Note that H5Dvlen_reclaim works
     * for variable-length strings as well as variable-length arrays.
     * Also note that we must still free the array of pointers stored
     * in rdata, as H5Tvlen_reclaim only frees the data these point to.
     */
    status = H5Dvlen_reclaim (memtype, space, H5P_DEFAULT, rdata);
    free (rdata);
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Tclose (filetype);
    status = H5Tclose (memtype);
    status = H5Fclose (file);

    return 0;
}
コード例 #3
0
ファイル: test_lite.c プロジェクト: svn2github/hdf5
static int test_attr(void)
{
    hid_t   file_id;
    hid_t   dataset_id;
    hid_t   group_id;
    hid_t   space_id;
    hsize_t dims[1] = { 5 };

    /* Create a new file using default properties. */
    file_id = H5Fcreate(FILE_NAME2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /*-------------------------------------------------------------------------
    * Create a dataset named "dset" on the root group
    *-------------------------------------------------------------------------
    */

    /* Create the data space  */
    if((space_id = H5Screate_simple(1, dims, NULL)) < 0) goto out;

    /* Create the dataset */
    if((dataset_id = H5Dcreate2(file_id , "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto out;

    /* Close */
    H5Dclose(dataset_id);

    /*-------------------------------------------------------------------------
    * Create a group named "grp" on the root group
    *-------------------------------------------------------------------------
    */

    /* Create a group. */
    if((group_id = H5Gcreate2(file_id, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto out;

    /* Close */
    H5Gclose(group_id);

    /*-------------------------------------------------------------------------
    *
    * Create attributes in the root group
    * Note that we are calling the H5LTset_attribute functions with the name "."
    *
    *-------------------------------------------------------------------------
    */
    if(make_attributes(file_id, ".") < 0) goto out;

    /*-------------------------------------------------------------------------
    *
    * Create attributes in the dataset "dset"
    *
    *-------------------------------------------------------------------------
    */
    if(make_attributes(file_id, "dset") < 0) goto out;

    /*-------------------------------------------------------------------------
    *
    * Create attributes in the group "grp"
    *
    *-------------------------------------------------------------------------
    */
    if(make_attributes(file_id, "grp") < 0) goto out;

    /*-------------------------------------------------------------------------
    * end
    *-------------------------------------------------------------------------
    */
    /* Close the file. */
    H5Fclose(file_id);

    return 0;

out:
    /* Close the file. */
    H5Fclose(file_id);

    H5_FAILED();
    return -1;
}
コード例 #4
0
ファイル: bagdataset.cpp プロジェクト: imincik/pkg-gdal
GDALDataset *BAGDataset::Open( GDALOpenInfo * poOpenInfo )

{
/* -------------------------------------------------------------------- */
/*      Confirm that this appears to be a BAG file.                     */
/* -------------------------------------------------------------------- */
    if( !Identify( poOpenInfo ) )
        return NULL;

/* -------------------------------------------------------------------- */
/*      Confirm the requested access is supported.                      */
/* -------------------------------------------------------------------- */
    if( poOpenInfo->eAccess == GA_Update )
    {
        CPLError( CE_Failure, CPLE_NotSupported, 
                  "The BAG driver does not support update access." );
        return NULL;
    }
    
/* -------------------------------------------------------------------- */
/*      Open the file as an HDF5 file.                                  */
/* -------------------------------------------------------------------- */
    hid_t hHDF5 = H5Fopen( poOpenInfo->pszFilename, 
                           H5F_ACC_RDONLY, H5P_DEFAULT );

    if( hHDF5 < 0 )  
        return NULL;

/* -------------------------------------------------------------------- */
/*      Confirm it is a BAG dataset by checking for the                 */
/*      BAG_Root/Bag Version attribute.                                 */
/* -------------------------------------------------------------------- */
    hid_t hBagRoot = H5Gopen( hHDF5, "/BAG_root" );
    hid_t hVersion = -1;

    if( hBagRoot >= 0 )
        hVersion = H5Aopen_name( hBagRoot, "Bag Version" );

    if( hVersion < 0 )
    {
        if( hBagRoot >= 0 )
            H5Gclose( hBagRoot );
        H5Fclose( hHDF5 );
        return NULL;
    }
    H5Aclose( hVersion );

/* -------------------------------------------------------------------- */
/*      Create a corresponding dataset.                                 */
/* -------------------------------------------------------------------- */
    BAGDataset *poDS = new BAGDataset();

    poDS->hHDF5 = hHDF5;

/* -------------------------------------------------------------------- */
/*      Extract version as metadata.                                    */
/* -------------------------------------------------------------------- */
    CPLString osVersion;

    if( GH5_FetchAttribute( hBagRoot, "Bag Version", osVersion ) )
        poDS->SetMetadataItem( "BagVersion", osVersion );

    H5Gclose( hBagRoot );

/* -------------------------------------------------------------------- */
/*      Fetch the elevation dataset and attach as a band.               */
/* -------------------------------------------------------------------- */
    int nNextBand = 1;
    hid_t hElevation = H5Dopen( hHDF5, "/BAG_root/elevation" );
    if( hElevation < 0 )
    {
        delete poDS;
        return NULL;
    }

    BAGRasterBand *poElevBand = new BAGRasterBand( poDS, nNextBand );

    if( !poElevBand->Initialize( hElevation, "elevation" ) )
    {
        delete poElevBand;
        delete poDS;
        return NULL;
    }

    poDS->nRasterXSize = poElevBand->nRasterXSize;
    poDS->nRasterYSize = poElevBand->nRasterYSize;

    poDS->SetBand( nNextBand++, poElevBand );

/* -------------------------------------------------------------------- */
/*      Try to do the same for the uncertainty band.                    */
/* -------------------------------------------------------------------- */
    hid_t hUncertainty = H5Dopen( hHDF5, "/BAG_root/uncertainty" );
    BAGRasterBand *poUBand = new BAGRasterBand( poDS, nNextBand );

    if( hUncertainty >= 0 && poUBand->Initialize( hUncertainty, "uncertainty") )
    {
        poDS->SetBand( nNextBand++, poUBand );
    }
    else
        delete poUBand;

/* -------------------------------------------------------------------- */
/*      Try to do the same for the uncertainty band.                    */
/* -------------------------------------------------------------------- */
    hid_t hNominal = -1;

    H5E_BEGIN_TRY {
        hNominal = H5Dopen( hHDF5, "/BAG_root/nominal_elevation" );
    } H5E_END_TRY;

    BAGRasterBand *poNBand = new BAGRasterBand( poDS, nNextBand );
    if( hNominal >= 0 && poNBand->Initialize( hNominal,
                                              "nominal_elevation" ) )
    {
        poDS->SetBand( nNextBand++, poNBand );
    }
    else
        delete poNBand;
        
/* -------------------------------------------------------------------- */
/*      Load the XML metadata.                                          */
/* -------------------------------------------------------------------- */
    poDS->LoadMetadata();

/* -------------------------------------------------------------------- */
/*      Setup/check for pam .aux.xml.                                   */
/* -------------------------------------------------------------------- */
    poDS->SetDescription( poOpenInfo->pszFilename );
    poDS->TryLoadXML();

/* -------------------------------------------------------------------- */
/*      Setup overviews.                                                */
/* -------------------------------------------------------------------- */
    poDS->oOvManager.Initialize( poDS, poOpenInfo->pszFilename );

    return( poDS );
}
コード例 #5
0
void close_file(hid_t h5file) {
  if (h5file >= 0) {
    assert(H5Fclose(h5file) >= 0);
  }
}
コード例 #6
0
ファイル: tst_h_atts4.c プロジェクト: U-238/gempak
int
main()
{
   printf("\n*** Checking HDF5 attribute functions for memory leaks.\n");
#ifdef EXTRA_TESTS
   printf("*** Checking vlen of compound file...");
   {
#define NUM_OBJ_2 2
#define ATT_NAME "Poseidon"
      hid_t fapl_id, fcpl_id;
      size_t chunk_cache_size = MY_CHUNK_CACHE_SIZE;
      size_t chunk_cache_nelems = CHUNK_CACHE_NELEMS;
      float chunk_cache_preemption = CHUNK_CACHE_PREEMPTION;
      hid_t fileid, grpid, attid, spaceid;
      hid_t s1_typeid, vlen_typeid;
      hid_t file_typeid1[NUM_OBJ_2], native_typeid1[NUM_OBJ_2];
      hid_t file_typeid2, native_typeid2;
      hsize_t num_obj;
      H5O_info_t obj_info;
      char obj_name[STR_LEN + 1];
      hsize_t dims[1] = {ATT_LEN}; /* netcdf attributes always 1-D. */
      struct s1
      {
	 float x;
	 double y;
      };

      /* vc stands for "Vlen of Compound." */
      hvl_t *vc_out;
      int i, k;

      /* Create some output data: an array of vlen (length ATT_LEN) of
       * struct s1. */
      if (!(vc_out = calloc(sizeof(hvl_t), ATT_LEN))) ERR;
      for (i = 0; i < ATT_LEN; i++)
      {
	 vc_out[i].len = i + 1; 
	 if (!(vc_out[i].p = calloc(sizeof(struct s1), vc_out[i].len))) ERR;
	 for (k = 0; k < vc_out[i].len; k++)
	 {
	    ((struct s1 *)vc_out[i].p)[k].x = 42.42;
	    ((struct s1 *)vc_out[i].p)[k].y = 2.0;
	 }
      }
      
      /* Create the HDF5 file, with cache control, creation order, and
       * all the timmings. */
      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;
      if (H5Pset_cache(fapl_id, 0, chunk_cache_nelems, chunk_cache_size, 
		       chunk_cache_preemption) < 0) ERR;
      if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR;
      if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | 
					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;
      if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | 
					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;
      if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR;
      if (H5Pclose(fapl_id) < 0) ERR;
      if (H5Pclose(fcpl_id) < 0) ERR;
      
      /* Open the root group. */
      if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR;
      
      /* Create the compound type for struct s1. */
      if ((s1_typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s1))) < 0) ERR;
      if (H5Tinsert(s1_typeid, X_NAME, offsetof(struct s1, x), 
		    H5T_NATIVE_FLOAT) < 0) ERR;
      if (H5Tinsert(s1_typeid, Y_NAME, offsetof(struct s1, y), 
		    H5T_NATIVE_DOUBLE) < 0) ERR;
      if (H5Tcommit(grpid, S1_TYPE_NAME, s1_typeid) < 0) ERR;
      
      /* Create a vlen type. Its a vlen of stuct s1. */
      if ((vlen_typeid = H5Tvlen_create(s1_typeid)) < 0) ERR;
      if (H5Tcommit(grpid, VLEN_TYPE_NAME, vlen_typeid) < 0) ERR;
      
      /* Create an attribute of this new type. */
      if ((spaceid = H5Screate_simple(1, dims, NULL)) < 0) ERR;
      if ((attid = H5Acreate(grpid, ATT_NAME, vlen_typeid, spaceid, 
			     H5P_DEFAULT)) < 0) ERR;
      if (H5Awrite(attid, vlen_typeid, vc_out) < 0) ERR;
      
      /* Close the types. */
      if (H5Tclose(s1_typeid) < 0 ||
	  H5Tclose(vlen_typeid) < 0) ERR;
	  
      /* Close the att. */
      if (H5Aclose(attid) < 0) ERR;
      
      /* Close the space. */
      if (H5Sclose(spaceid) < 0) ERR;

      /* Close the group and file. */
      if (H5Gclose(grpid) < 0 ||
	  H5Fclose(fileid) < 0) ERR;

      /* Reopen the file. */
      if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR;
      if ((grpid = H5Gopen(fileid, "/")) < 0) ERR;

      /* How many objects in this group? (There should be 2, the
       * types. Atts don't count as objects to HDF5.) */
      if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR;
      if (num_obj != NUM_OBJ_2) ERR;

      /* For each object in the group... */
      for (i = 0; i < num_obj; i++)
      {
	 /* Get the name, and make sure this is a type. */
	 if (H5Oget_info_by_idx(grpid, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC,
				i, &obj_info, H5P_DEFAULT) < 0) ERR;
	 if (H5Lget_name_by_idx(grpid, ".", H5_INDEX_NAME, H5_ITER_INC, i,
				obj_name, STR_LEN + 1, H5P_DEFAULT) < 0) ERR;
	 if (obj_info.type != H5O_TYPE_NAMED_DATATYPE) ERR;

	 /* Get the typeid and native typeid. */
	 if ((file_typeid1[i] = H5Topen2(grpid, obj_name, H5P_DEFAULT)) < 0) ERR;
	 if ((native_typeid1[i] = H5Tget_native_type(file_typeid1[i],
						     H5T_DIR_DEFAULT)) < 0) ERR;
      }

      /* There is one att: open it by index. */
      if ((attid = H5Aopen_idx(grpid, 0)) < 0) ERR;

      /* Get file and native typeids of the att. */
      if ((file_typeid2 = H5Aget_type(attid)) < 0) ERR;
      if ((native_typeid2 = H5Tget_native_type(file_typeid2, H5T_DIR_DEFAULT)) < 0) ERR;

      /* Close the attribute. */
      if (H5Aclose(attid) < 0) ERR;

      /* Close the typeids. */
      for (i = 0; i < NUM_OBJ_2; i++)
      {
	 if (H5Tclose(file_typeid1[i]) < 0) ERR;
	 if (H5Tclose(native_typeid1[i]) < 0) ERR;
      }
      if (H5Tclose(file_typeid2) < 0) ERR;
      if (H5Tclose(native_typeid2) < 0) ERR;

      /* Close the group and file. */
      if (H5Gclose(grpid) < 0 ||
	  H5Fclose(fileid) < 0) ERR;

      /* Deallocate our vlens. */
      for (i = 0; i < ATT_LEN; i++)
	 free(vc_out[i].p);
      free(vc_out);
   }
   SUMMARIZE_ERR;
#endif /* EXTRA_TESTS */
   FINAL_RESULTS;
}
コード例 #7
0
ファイル: Citcoms_Hdf2Vtk.c プロジェクト: QuLogic/citcoms
int main(int argc, char *argv[])
{
    char *datafile;

    hid_t h5file;
    hid_t input;
    herr_t status;

    int caps;
    int capid;
    cap_t *cap;

    int step;
    int n, i, j, k;
    int nodex, nodey, nodez;
	float radius_inner;
	float radius_outer;
	
	
	int nodex_redu=0;
	int nodey_redu=0;
	int nodez_redu=0;
	int initial=0;
	int timestep=-1;
	
    int current_t=0;
    int timesteps=0;
	
	int cell_counter=0;
	int cell_counter_surface=0;
	
    int *steps;
    char *endptr;

    field_t *coord;
    field_t *velocity;
    field_t *temperature;
    field_t *viscosity;
	
	//Bottom
	field_t *botm_coord;
	field_t *botm_hflux;
	field_t *botm_velo;
	field_t *botm_topo;
	
	//Surface
	field_t *surf_coord;
	field_t *surf_hflux;
	field_t *surf_velo;
	field_t *surf_topo;
	
	///////////////////////////////////////////////
	int bottom=false;
	int surface=false;
	int topo=false;
	int ascii=false;
	
    /************************************************************************
     * Parse command-line parameters.                                       *
     ************************************************************************/

    /*
     * HDF5 file must be specified as first argument.
     */
    
    if (argc < 2)
    {
        fprintf(stderr, "Usage: run with -h to get help\n", argv[0]);
        return EXIT_FAILURE;
    }

    char c;
    char *hdf_filepath;
    char *output_filepath;
    extern char *optarg;
    extern int optind, optopt;
	int errflg=0;
    while ((c = getopt(argc, argv, ":p:o:i:t:x:y:z:bscah?")) != -1) {
        switch(c) {
        		case 'p':
					hdf_filepath = optarg;
					break;
        
				case 'o':
            		output_filepath = optarg;
                	printf("Got output filepath\n");
            		break;
								
        		case 'i':
					initial = atoi(optarg);
					printf("Initial: %d\n",initial);
					break;
				
				case 't':
					timesteps = atoi(optarg);
					printf("Timesteps: %d\n", timesteps);
					//Inclusive
					timesteps++;
					break;
				
				case 'x':
					nodex_redu=atoi(optarg);
					break;
				
				case 'y':
					nodey_redu=atoi(optarg);
					break;
				
				case 'z':
					nodez_redu=atoi(optarg);
					break;
				
				
				case ':':       /* missing operand */
                    fprintf(stderr,
                            "Option -%c requires an operand\n", optopt);
                    errflg++;
                	break;
				////////////////////
				
				case 'b':
					bottom=true;
					printf("Create Bottom");
					break;
				
				case 's':
					surface=true;
					printf("Create Surface");
					break;
				
				case 'c':
					topo=true;
					printf("Create Topography");	
					break;
				
				case 'a':
					ascii=true;
					break;
				
				case '?':
            		errflg++;
					print_help();
				break;
        	}
    }
		
    for ( ; optind < argc; optind++) {
        if (access(argv[optind], R_OK)) {
 			printf("Geht\n");
			}
	}

	
	printf("Opening HDF file...\n");
	
    h5file = H5Fopen(hdf_filepath, H5F_ACC_RDONLY, H5P_DEFAULT);
    if (h5file < 0)
    {
        fprintf(stderr, "Could not open HDF5 file \"%s\"\n", argv[1]);
        return EXIT_FAILURE;
    }

    


    /************************************************************************
     * Get mesh parameters.                                                 *
     ************************************************************************/

    /* Read input group */
    input = H5Gopen(h5file, "input");
    if (input < 0)
    {
        fprintf(stderr, "Could not open /input group in \"%s\"\n", argv[1]);
        status = H5Fclose(h5file);
        return EXIT_FAILURE;
    }
	
	
    status = get_attribute_str(input, "datafile", &datafile);
    status = get_attribute_int(input, "nproc_surf", &caps);
    status = get_attribute_int(input, "nodex", &nodex);
    status = get_attribute_int(input, "nodey", &nodey);
    status = get_attribute_int(input, "nodez", &nodez);
	status = get_attribute_float(input,"radius_inner",&radius_inner);
	status = get_attribute_float(input,"radius_outer",&radius_outer);
	
	//Bound input params against hdf
	if(nodex_redu<nodex & nodex_redu>0)
	{
	nodex = nodex_redu;
	}
	if(nodey_redu<nodey & nodey_redu>0)
	{
	nodey = nodey_redu;
	}
	if(nodez_redu<nodez & nodez_redu>0)
	{
	nodez = nodez_redu;
	}
	
	
	printf("Nodex: %d\n",nodex);
	printf("Nodey: %d\n",nodey);
	printf("Nodez: %d\n",nodez);
	printf("Caps: %d\n",caps);
    /* Release input group */
    status = H5Gclose(input);
	

    /************************************************************************
     * Create fields using cap00 datasets as a template.                    *
     ************************************************************************/

    cap         = open_cap(h5file, 0);
    coord       = open_field(cap, "coord");
    velocity    = open_field(cap, "velocity");
    temperature = open_field(cap, "temperature");
    viscosity   = open_field(cap, "viscosity");
    
	
	/*Create fields bottom and surface*/
	botm_coord = open_field(cap,"botm/coord");
	botm_hflux = open_field(cap,"botm/heatflux");
	botm_velo = open_field(cap,"botm/velocity");
	botm_topo = open_field(cap,"botm/topography");
	
	surf_coord = open_field(cap,"surf/coord");
	surf_hflux = open_field(cap,"surf/heatflux");
	surf_velo = open_field(cap,"surf/velocity");
	surf_topo = open_field(cap,"surf/topography");
	
	status      = close_cap(cap);

	
    /************************************************************************
     * Output requested data.                                               *
     ************************************************************************/
	int iterations=0;
    /* Iterate over timesteps */
    for(current_t = initial; current_t < timesteps; current_t++)
    {
		
		printf("\nProcessing timestep: %d\n",current_t);
      
		
		coordinates_t ordered_coordinates[((nodex*nodey*nodez)*caps)];
		coordinates_t ordered_velocity[((nodex*nodey*nodez)*caps)*3];		
		float ordered_temperature[(nodex*nodey*nodez)*caps];
		float ordered_viscosity[(nodex*nodey*nodez)*caps];
		hexahedron_t connectivity[((nodex-1)*(nodey-1)*(nodez-1))*caps];
		
		
		coordinates_t ordered_botm_coords[(nodex*nodey*caps)];
		float ordered_botm_hflux[(nodex*nodey*caps)];
		coordinates_t ordered_botm_velocity[(nodex*nodey*caps)];
		
		
		coordinates_t ordered_surf_coords[(nodex*nodey*caps)];
		float ordered_surf_hflux[(nodex*nodey*caps)];
		coordinates_t ordered_surf_velocity[(nodex*nodey*caps)];
		
		vtk_pixel_t connectivity_surface[(nodex*nodey*caps)];
		
		//Holds single coordinate		
		coordinates_t coordinate;
		
		//Holds single vector
		coordinates_t velocity_vector;
				
        /* Iterate over caps */
		
		for(capid = 0; capid < caps; capid++)
        {
            cap = open_cap(h5file, capid);
			printf("Processing cap %d of %d\n",capid+1,caps);
            //snprintf(filename, (size_t)99, "%s.cap%02d.%d", datafile, capid, step);
            //fprintf(stderr, "Writing %s\n", filename);

            //file = fopen(filename, "w");
            //fprintf(file, "%d x %d x %d\n", nodex, nodey, nodez);

            /* Read data from HDF5 file. */
            read_field(cap, coord, 0);
            read_field(cap, velocity, current_t);
            read_field(cap, temperature, current_t);
            read_field(cap, viscosity, current_t);
			
			
			
    		/*Counts iterations*/
    		n = 0;
			
			//Number of nodes per cap
			int nodes=nodex*nodey*nodez;
			
	        /* Traverse data in Citcom order */
            for(j = 0; j < nodey; j++)
            {	
                for(i = 0; i < nodex; i++)
                {
			       for(k = 0; k < nodez; k++)
                    {       
								//Coordinates						
								coordinate.x = coord->data[3*n+0];
                                coordinate.y = coord->data[3*n+1];
                                coordinate.z = coord->data[3*n+2];
								coordinate = rtf_to_xyz(coordinate);
								ordered_coordinates[n+(capid*nodes)].x = coordinate.x;
								ordered_coordinates[n+(capid*nodes)].y = coordinate.y;
								ordered_coordinates[n+(capid*nodes)].z = coordinate.z;
								
								//Velocity
                        	    velocity_vector.x = velocity->data[3*n+0];
                                velocity_vector.y = velocity->data[3*n+1];
                                velocity_vector.z = velocity->data[3*n+2];
						
								velocity_vector = velocity_to_cart(velocity_vector,coordinate);
								
								ordered_velocity[n+(capid*nodes)].x = velocity_vector.x;
								ordered_velocity[n+(capid*nodes)].y = velocity_vector.y;
								ordered_velocity[n+(capid*nodes)].z = velocity_vector.z;
						
								//Temperature
                                ordered_temperature[n+(capid*nodes)] = temperature->data[n];
						
								//Viscosity
                                ordered_viscosity[n+(capid*nodes)] = viscosity->data[n];
								
								n++;
                    }
                }
            }

			//Create connectivity
			if(iterations==0)
			{
				//For 3d Data 
            	int i=1;    //Counts X Direction
            	int j=1;    //Counts Y Direction
            	int k=1;    //Counts Z Direction
    		
            	for(n=0; n<((nodex*nodey*nodez)-(nodex*nodez));n++)
					{
						
                		if ((i%nodez)==0)   //X-Values
							{
                    		j++;                 //Count Y-Values
        					}
                		if ((j%nodex)==0)
							{
                    		k++;                 //Count Z-Values
                  			}
							
                		if (((i%nodez) != 0) && ((j%nodex) != 0))            //Check if Box can be created
							{
							//Create Connectivity
                    		connectivity[cell_counter].c1 = n+(capid*(nodes));
							connectivity[cell_counter].c2 = connectivity[cell_counter].c1+nodez;
                    		connectivity[cell_counter].c3 = connectivity[cell_counter].c2+nodez*nodex;
                    		connectivity[cell_counter].c4 = connectivity[cell_counter].c1+nodez*nodex;
                    		connectivity[cell_counter].c5 = connectivity[cell_counter].c1+1;
                    		connectivity[cell_counter].c6 = connectivity[cell_counter].c5+nodez;
                    		connectivity[cell_counter].c7 = connectivity[cell_counter].c6+nodez*nodex;
                    		connectivity[cell_counter].c8 = connectivity[cell_counter].c5+nodez*nodex;
							cell_counter++;
							}                   	
                i++;
				
      			}
			}
			

			
			
			//Bottom and Surface
			
			if(bottom==true){
				
				/*Read Bottom data from HDF5 file.*/
				read_field(cap,botm_coord,0);
				read_field(cap,botm_hflux,current_t);
				read_field(cap,botm_velo,current_t);
				read_field(cap,botm_topo,current_t);
				float botm_mean=0.0;	
				if(topo=true)
					{
					botm_mean = calc_mean(botm_topo,nodex,nodey);
					}					
				for(n=0;n<nodex*nodey;n++)
				{
				//Coordinates						
				coordinate.x = botm_coord->data[2*n+0];
    			coordinate.y = botm_coord->data[2*n+1];
    				if(topo==true)
					{
					coordinate.z = radius_inner+(botm_topo->data[n]-botm_mean)*
						(pow(10.0,21.0)/(pow(6371000,2)/pow(10,-6))/3300*10)/1000;
					//printf("Z: %f\n",coordinate.z);
					}
					else
					{
					coordinate.z = radius_inner;
					}
					
				coordinate = rtf_to_xyz(coordinate);
				ordered_botm_coords[n+(capid*nodex*nodey)].x = coordinate.x;
				ordered_botm_coords[n+(capid*nodex*nodey)].y = coordinate.y;
				ordered_botm_coords[n+(capid*nodex*nodey)].z = coordinate.z;
								
				ordered_botm_hflux[n+((capid)*nodex*nodey)] = botm_hflux->data[n];
					
				velocity_vector.x = botm_velo->data[3*n+0];
				velocity_vector.y = botm_velo->data[3*n+1];
				velocity_vector.z = botm_velo->data[3*n+2];
					
				velocity_vector = velocity_to_cart(velocity_vector,coordinate);
					
				ordered_botm_velocity[n+(capid*nodex*nodey)].x = velocity_vector.x;
				ordered_botm_velocity[n+(capid*nodex*nodey)].y = velocity_vector.y;
				ordered_botm_velocity[n+(capid*nodex*nodey)].z = velocity_vector.z;
								
				}		
	
	
			}
			
			if(surface==true)
			{
		
				/*Read Surface data from HDF5 file.*/
				read_field(cap,surf_coord,0);
				read_field(cap,surf_hflux,current_t);
				read_field(cap,surf_velo,current_t);
				read_field(cap,surf_topo,current_t);
				float surf_mean=0.0;
				if(topo=true)
					{
						
					surf_mean = calc_mean(surf_topo,nodex,nodey);
					}					
				for(n=0;n<nodex*nodey;n++)
				{
				//Coordinates						
				coordinate.x = surf_coord->data[2*n+0];
    			coordinate.y = surf_coord->data[2*n+1];
    				if(topo==true)
					{
					coordinate.z = radius_outer+(surf_topo->data[n]-surf_mean)*
						(pow(10.0,21.0)/(pow(6371000,2)/pow(10,-6))/3300*10)/1000;
					//printf("Z: %f\n",coordinate.z);
					}
					else
					{
					coordinate.z = radius_outer;
					}
					
				coordinate = rtf_to_xyz(coordinate);
				ordered_surf_coords[n+(capid*nodex*nodey)].x = coordinate.x;
				ordered_surf_coords[n+(capid*nodex*nodey)].y = coordinate.y;
				ordered_surf_coords[n+(capid*nodex*nodey)].z = coordinate.z;
								
				ordered_surf_hflux[n+((capid)*nodex*nodey)] = botm_hflux->data[n];
					
				velocity_vector.x = botm_velo->data[3*n+0];
				velocity_vector.y = botm_velo->data[3*n+1];
				velocity_vector.z = botm_velo->data[3*n+2];
					
				velocity_vector = velocity_to_cart(velocity_vector,coordinate);
					
				ordered_surf_velocity[n+(capid*nodex*nodey)].x = velocity_vector.x;
				ordered_surf_velocity[n+(capid*nodex*nodey)].y = velocity_vector.y;
				ordered_surf_velocity[n+(capid*nodex*nodey)].z = velocity_vector.z;
								
				}		
				
			}

			
			//Create connectivity information 2d
			
			
			if(iterations==0){
				if(surface==true | bottom==true)
					{
					
            		for(n=0;n<(nodex*nodey)-nodey;n++)
						{
                    	if ((n+1)%nodey!=0){
							connectivity_surface[cell_counter_surface].c1 = n+(capid*((nodex)*(nodey)));
                        	connectivity_surface[cell_counter_surface].c2 = connectivity_surface[cell_counter_surface].c1+1;
							connectivity_surface[cell_counter_surface].c3 = connectivity_surface[cell_counter_surface].c1+nodey;
							connectivity_surface[cell_counter_surface].c4 = connectivity_surface[cell_counter_surface].c3+1;          
							cell_counter_surface++;
							}
						}
					}
				}
			
			close_cap(cap);
        }
		
    iterations++;
		
	//Write data to file
	write_vtk_shell(ordered_coordinates, connectivity, ordered_temperature, 
					   ordered_viscosity, ordered_velocity, nodex, nodey, nodez,
					   current_t, radius_inner,caps,ascii);
	
	if(bottom==true){
		write_vtk_surface(ordered_botm_coords,connectivity_surface,ordered_botm_hflux,
						  ordered_botm_velocity,current_t,nodex,nodey,nodez,caps,"bottom");
	}
	
	
	if(surface==true){
		write_vtk_surface(ordered_surf_coords,connectivity_surface,ordered_surf_hflux,
						  ordered_surf_velocity,current_t,nodex,nodey,nodez,caps,"surface");
	}
		
	}//end timesteps loop

    /* Release resources. */

    status = close_field(coord);
    status = close_field(velocity);
    status = close_field(temperature);
    status = close_field(viscosity);
    status = H5Fclose(h5file);

    return EXIT_SUCCESS;
}
コード例 #8
0
ファイル: ham-mom.cpp プロジェクト: wpoely86/Hubbard-GPU
/**
 * This methods calculates the eigenvalues for a range of U values. The interval is [Ubegin, Uend]
 * with a stepsize of step. The resulting data is written to a file in the HDF5 file format.
 * @param Ubegin the startpoint for U
 * @param Uend the endpoint for U. We demand Ubegin < Uend
 * @param step the stepsize to use for U
 * @param filename the name of the file write to written the eigenvalues to
 */
void MomHamiltonian::GenerateData(double Ubegin, double Uend, double step, std::string filename)
{
    double Ucur = Ubegin;

    if( !baseUp.size() || !baseDown.size() )
        BuildBase();

    std::vector<double> eigenvalues(dim);

    hid_t       file_id, group_id, dataset_id, dataspace_id, attribute_id;
    herr_t      status;

    file_id = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    HDF5_STATUS_CHECK(file_id);

    group_id = H5Gcreate(file_id, "run", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    HDF5_STATUS_CHECK(group_id);

    dataspace_id = H5Screate(H5S_SCALAR);

    attribute_id = H5Acreate (group_id, "L", H5T_STD_I32LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_INT, &L );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "Nu", H5T_STD_I32LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_INT, &Nu );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "Nd", H5T_STD_I32LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_INT, &Nd );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "J", H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_DOUBLE, &J );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "Ubegin", H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_DOUBLE, &Ubegin );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "Uend", H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_DOUBLE, &Uend );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    attribute_id = H5Acreate (group_id, "Ustep", H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attribute_id, H5T_NATIVE_DOUBLE, &step );
    HDF5_STATUS_CHECK(status);
    status = H5Aclose(attribute_id);
    HDF5_STATUS_CHECK(status);

    status = H5Sclose(dataspace_id);
    HDF5_STATUS_CHECK(status);

    status = H5Gclose(group_id);
    HDF5_STATUS_CHECK(status);

    status = H5Fclose(file_id);
    HDF5_STATUS_CHECK(status);

    std::vector<double> diagonalelements(dim);

    std::vector< std::unique_ptr<double []> > offdiag;
    offdiag.resize(L);

    // make sure that we don't rebuild the whole hamiltonian every time.
    // store the hopping and interaction part seperate so we can just
    // add them in every step
    #pragma omp parallel for
    for(int B=0; B<L; B++)
    {
        int cur_dim = mombase[B].size();
        int offset = 0;

        for(int tmp=0; tmp<B; tmp++)
            offset += mombase[tmp].size();

        offdiag[B].reset(new double [cur_dim*cur_dim]);

        for(int i=0; i<cur_dim; i++)
        {
            int a = mombase[B][i].first;
            int b = mombase[B][i].second;

            diagonalelements[offset+i] = hopping(baseUp[a]) + hopping(baseDown[b]);

            for(int j=i; j<cur_dim; j++)
            {
                int c = mombase[B][j].first;
                int d = mombase[B][j].second;

                offdiag[B][j+cur_dim*i] = 1.0/L*interaction(a,b,c,d);
                offdiag[B][i+cur_dim*j] = offdiag[B][j+cur_dim*i];
            }
        }
    }


    while(Ucur <= Uend)
    {
        std::cout << "U = " << Ucur << std::endl;
        setU(Ucur);

        // make hamiltonian
        #pragma omp parallel for
        for(int B=0; B<L; B++)
        {
            int cur_dim = mombase[B].size();
            int offset = 0;

            for(int tmp=0; tmp<B; tmp++)
                offset += mombase[tmp].size();

            std::memcpy(blockmat[B].get(),offdiag[B].get(),cur_dim*cur_dim*sizeof(double));

            int tmp = cur_dim*cur_dim;
            int inc = 1;
            dscal_(&tmp,&Ucur,blockmat[B].get(),&inc);

            for(int i=0; i<cur_dim; i++)
                blockmat[B][i+cur_dim*i] += diagonalelements[offset+i];
        }


        #pragma omp parallel for
        for(int B=0; B<L; B++)
        {
            int dim = mombase[B].size();
            int offset = 0;

            for(int tmp=0; tmp<B; tmp++)
                offset += mombase[tmp].size();

            Diagonalize(dim, blockmat[B].get(), &eigenvalues[offset], false);
        }

        hid_t U_id;
        std::stringstream name;
        name << std::setprecision(5) << std::fixed << "/run/" << Ucur;

        file_id = H5Fopen(filename.c_str(), H5F_ACC_RDWR, H5P_DEFAULT);
        HDF5_STATUS_CHECK(file_id);

        U_id = H5Gcreate(file_id, name.str().c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        HDF5_STATUS_CHECK(U_id);

        for(int B=0; B<L; B++)
        {
            int dim = mombase[B].size();
            int offset = 0;

            for(int tmp=0; tmp<B; tmp++)
                offset += mombase[tmp].size();

            hsize_t dimarr = dim;

            dataspace_id = H5Screate_simple(1, &dimarr, NULL);

            std::stringstream cur_block;
            cur_block << B;
            dataset_id = H5Dcreate(U_id, cur_block.str().c_str(), H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

            status = H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &eigenvalues[offset] );
            HDF5_STATUS_CHECK(status);

            status = H5Sclose(dataspace_id);
            HDF5_STATUS_CHECK(status);

            status = H5Dclose(dataset_id);
            HDF5_STATUS_CHECK(status);
        }

        status = H5Gclose(U_id);
        HDF5_STATUS_CHECK(status);

        status = H5Fclose(file_id);
        HDF5_STATUS_CHECK(status);

        Ucur += step;
    }
}
コード例 #9
0
ファイル: io.c プロジェクト: alexgittens/mpi_pcavariants
// localRowChunk will be allocated inside the function
double loadMatrix(char * infilename, char * datasetname, distMatrixInfo
    *matInfo, double ** localRowChunk, MPI_Comm *comm, MPI_Info *info) {
    double startTime = MPI_Wtime();

    // assuming double inputs, check that the chunks aren't too big to be read
    // by HDF5 in parallel mode
    if (matInfo->bigPartitionSize*matInfo->numcols >= 268435456 &&
        matInfo->mpi_rank == 0) {
        fprintf(stderr, "MPIIO-based HDF5 is limited to reading 2GiB at most in "
            "each call to read; try increasing the number of processors\n");
        exit(-1); }

    *localRowChunk = (double *) malloc( matInfo->localrows * matInfo->numcols *sizeof(double) );
    if (*localRowChunk == NULL) {
        fprintf(stderr, "Could not allocate enough memory for the local chunk "
            "of rows for %s in process %d\n", datasetname, matInfo->mpi_rank);
        exit(-1);
    }

    hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
    H5Pset_fapl_mpio(plist_id, *comm, *info);
    hid_t file_id = H5Fopen(infilename, H5F_ACC_RDONLY, plist_id);
    if (file_id < 0) {
        fprintf(stderr, "Error opening %s\n", infilename);
        exit(-1);
    }

    hid_t dataset_id = H5Dopen(file_id, datasetname, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error opening %s in %s : are you sure this dataset "
            "exists?\n", datasetname, infilename);
        exit(-1);
    }

    hsize_t offset[2], count[2], offset_out[2];
    count[0] = matInfo->localrows;
    count[1] = matInfo->numcols;
    offset[0] = matInfo->mpi_rank < matInfo->numBigPartitions ? 
                (matInfo->mpi_rank * matInfo->bigPartitionSize) : 
                (matInfo->numBigPartitions * matInfo->bigPartitionSize + 
                    (matInfo->mpi_rank - matInfo->numBigPartitions) * 
                    matInfo->littlePartitionSize );
    offset[1] = 0;

    hid_t filespace = H5Dget_space(dataset_id);
    if ( H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL) < 0 ) {
        fprintf(stderr, "Error selecting input file hyperslab in process %d\n",
            matInfo->mpi_rank);
        exit(-1);
    }

    hid_t memspace = H5Screate_simple(2, count, NULL);
    offset_out[0] = 0;
    offset_out[1] = 0;
    if ( H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, NULL, count, NULL) < 0 ) {
        fprintf(stderr, "Error selecting memory hyperslab in process %d\n",
            matInfo->mpi_rank);
        exit(-1);
    }

    hid_t daccess_id = H5Pcreate(H5P_DATASET_XFER);
    H5Pset_dxpl_mpio(daccess_id, H5FD_MPIO_INDEPENDENT); // collective io seems slow for this

    if (matInfo->mpi_rank == 0) {
        printf("Loading matrix from dataset %s in file %s\n", datasetname,
            infilename);
    }
    if( H5Dread(dataset_id, H5T_NATIVE_DOUBLE, memspace, filespace, daccess_id, *localRowChunk) < 0) {
        fprintf(stderr, "Error reading dataset in process %d\n", matInfo->mpi_rank);
        exit(-1);
    }

    H5Pclose(daccess_id);
    H5Sclose(memspace);
    H5Sclose(filespace);
    H5Dclose(dataset_id);
    H5Fclose(file_id);
    H5Pclose(plist_id);

    return MPI_Wtime() - startTime;
}
コード例 #10
0
ファイル: error_test.c プロジェクト: adasworks/hdf5
/*-------------------------------------------------------------------------
 * Function:	main
 *
 * Purpose:	Test error API.
 *
 * Programmer:	Raymond Lu
 *		July 10, 2003
 *
 *-------------------------------------------------------------------------
 */
int
main(void)
{
    hid_t		file, fapl;
    hid_t               estack_id;
    char		filename[1024];
    const char          *FUNC_main = "main";

    HDfprintf(stderr, "   This program tests the Error API.  There're supposed to be some error messages\n");

    /* Initialize errors */
    if(init_error() < 0)
        TEST_ERROR;

    fapl = h5_fileaccess();

    h5_fixname(FILENAME[0], fapl, filename, sizeof filename);
    if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
	TEST_ERROR;

    /* Test error stack */
    if(error_stack() < 0) {
        /* Push an error onto error stack */
        if(H5Epush(ERR_STACK, __FILE__, FUNC_main, __LINE__, ERR_CLS, ERR_MAJ_TEST, ERR_MIN_ERRSTACK,
                "Error stack test failed") < 0) TEST_ERROR;

        /* Delete an error from the top of error stack */
        H5Epop(ERR_STACK, 1);

        /* Make sure we can use other class's major or minor errors. */
        H5Epush(ERR_STACK, __FILE__, FUNC_main, __LINE__, ERR_CLS2, ERR_MAJ_TEST, ERR_MIN_ERRSTACK,
                "Error stack test failed");

        /* Print out the errors on stack */
        dump_error(ERR_STACK);

        /* Empty error stack */
        H5Eclear2(ERR_STACK);

        /* Close error stack */
        H5Eclose_stack(ERR_STACK);
    } /* end if */

    /* Test error API */
    if(test_error(file) < 0) {
        H5Epush(H5E_DEFAULT, __FILE__, FUNC_main, __LINE__, ERR_CLS, ERR_MAJ_TEST, ERR_MIN_SUBROUTINE,
                "Error test failed, %s", "it's wrong");
        estack_id = H5Eget_current_stack();
        H5Eprint2(estack_id, stderr);
        H5Eclose_stack(estack_id);
    } /* end if */

    /* Test pushing a very long error description */
    if(test_long_desc() < 0) TEST_ERROR;

    /* Test creating a new error stack */
    if(test_create() < 0) TEST_ERROR;

    /* Test copying a new error stack */
    if(test_copy() < 0) TEST_ERROR;

    if(H5Fclose(file) < 0) TEST_ERROR;

    /* Close error information */
    if(close_error() < 0)
        TEST_ERROR;

    /* Test error message during data reading when filter isn't registered 
     * Use default FAPL to avoid some VFD drivers by the check-vfd test because
     * the test file was pre-generated. */
    h5_fixname(DATAFILE, H5P_DEFAULT, filename, sizeof filename);
    if(test_filter_error(filename) < 0)
        TEST_ERROR;

    h5_clean_files(FILENAME, fapl);

    HDfprintf(stderr, "\nAll error API tests passed.\n");
    return 0;

error:
    HDfprintf(stderr, "\n***** ERROR TEST FAILED (real problem)! *****\n");
    return 1;
}
コード例 #11
0
ファイル: h5efc.c プロジェクト: LaHaine/ohpc
int
main(void)
{
    hid_t       file1, file2, group, fapl;      /* Handles */
    herr_t      status;

    /*
     * Create file access property list and set it to allow caching of open
     * files visited through external links.
     */
    fapl = H5Pcreate (H5P_FILE_ACCESS);
    status = H5Pset_elink_file_cache_size (fapl, 8);

    /*
     * Create a new file using the file access property list.
     */
    file1 = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);

    /*
     * Create files to serve as targets for external links.
     */
    file2 = H5Fcreate (EXT_FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Fclose (file2);
    file2 = H5Fcreate (EXT_FILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Fclose (file2);
    file2 = H5Fcreate (EXT_FILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Fclose (file2);

    /*
     * Create external links to the target files.
     */
    status = H5Lcreate_external (EXT_FILE1, "/", file1, "link_to_1",
            H5P_DEFAULT, H5P_DEFAULT);
    status = H5Lcreate_external (EXT_FILE2, "/", file1, "link_to_2",
            H5P_DEFAULT, H5P_DEFAULT);
    status = H5Lcreate_external (EXT_FILE3, "/", file1, "link_to_3",
            H5P_DEFAULT, H5P_DEFAULT);

    /*
     * Open and close the targets of all three external links (these will be the
     * root groups of the target files).  The target files should be held open
     * by the root file's external file cache after traversal.
     */
    group = H5Gopen (file1, "/link_to_1", H5P_DEFAULT);
    status = H5Gclose(group);
    group = H5Gopen (file1, "/link_to_2", H5P_DEFAULT);
    status = H5Gclose(group);
    group = H5Gopen (file1, "/link_to_3", H5P_DEFAULT);
    status = H5Gclose(group);

    /*
     * Open and close the targets of all three external links again.  The target
     * files should already be held open by the root file's external file cache,
     * so the library will not actually have to issue an "open" system call.
     */
    group = H5Gopen (file1, "/link_to_1", H5P_DEFAULT);
    status = H5Gclose(group);
    group = H5Gopen (file1, "/link_to_2", H5P_DEFAULT);
    status = H5Gclose(group);
    group = H5Gopen (file1, "/link_to_3", H5P_DEFAULT);
    status = H5Gclose(group);

    /*
     * Release the root file's external file cache.  This will close all the
     * external link target files.
     */
    status = H5Frelease_file_cache(file1);

    /*
     * Close and release resources.
     */
    status = H5Pclose (fapl);
    status = H5Fclose (file1);

    return 0;
}
コード例 #12
0
ファイル: tst_h_par.c プロジェクト: ArtisticCoding/libmesh
int
main(int argc, char **argv)
{
   int p, my_rank;

#ifdef USE_MPE
   int s_init, e_init, s_define, e_define, s_write, e_write, s_close, e_close;
#endif /* USE_MPE */

   MPI_Init(&argc, &argv);
   MPI_Comm_rank(MPI_COMM_WORLD, &my_rank);
   MPI_Comm_size(MPI_COMM_WORLD, &p);

#ifdef USE_MPE
   MPE_Init_log();
   s_init = MPE_Log_get_event_number();
   e_init = MPE_Log_get_event_number();
   s_define = MPE_Log_get_event_number();
   e_define = MPE_Log_get_event_number();
   s_write = MPE_Log_get_event_number();
   e_write = MPE_Log_get_event_number();
   s_close = MPE_Log_get_event_number();
   e_close = MPE_Log_get_event_number();
   MPE_Describe_state(s_init, e_init, "Init", "red");
   MPE_Describe_state(s_define, e_define, "Define", "yellow");
   MPE_Describe_state(s_write, e_write, "Write", "green");
   MPE_Describe_state(s_close, e_close, "Close", "purple");
   MPE_Start_log();
   MPE_Log_event(s_init, 0, "start init");
#endif /* USE_MPE */

   if (!my_rank)
      printf("*** Creating file for parallel I/O read, and rereading it...");
   {
      hid_t fapl_id, fileid, whole_spaceid, dsid, slice_spaceid, whole_spaceid1, xferid;
      hsize_t start[NDIMS], count[NDIMS];
      hsize_t dims[1];
      int data[SC1], data_in[SC1];
      int num_steps;
      double ftime;
      int write_us, read_us;
      int max_write_us, max_read_us;
      float write_rate, read_rate;
      int i, s;

      /* We will write the same slice of random data over and over to
       * fill the file. */
      for (i = 0; i < SC1; i++)
	 data[i] = rand();

#ifdef USE_MPE
      MPE_Log_event(e_init, 0, "end init");
      MPE_Log_event(s_define, 0, "start define file");
#endif /* USE_MPE */

      /* Create file. */
      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fapl_mpio(fapl_id, MPI_COMM_WORLD, MPI_INFO_NULL) < 0) ERR;
      if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT,
			      fapl_id)) < 0) ERR;

      /* Create a space to deal with one slice in memory. */
      dims[0] = SC1;
      if ((slice_spaceid = H5Screate_simple(NDIMS, dims, NULL)) < 0) ERR;

      /* Create a space to write all slices. */
      dims[0] = DIM2_LEN;
      if ((whole_spaceid = H5Screate_simple(NDIMS, dims, NULL)) < 0) ERR;

      /* Create dataset. */
      if ((dsid = H5Dcreate1(fileid, VAR_NAME, H5T_NATIVE_INT,
      whole_spaceid, H5P_DEFAULT)) < 0) ERR;

      /* Use collective write operations. */
      if ((xferid = H5Pcreate(H5P_DATASET_XFER)) < 0) ERR;
      if (H5Pset_dxpl_mpio(xferid, H5FD_MPIO_COLLECTIVE) < 0) ERR;

#ifdef USE_MPE
      MPE_Log_event(e_define, 0, "end define file");
      if (my_rank)
	 sleep(my_rank);
#endif /* USE_MPE */

      /* Write the data in num_step steps. */
      ftime = MPI_Wtime();
      num_steps = (DIM2_LEN/SC1) / p;
      for (s = 0; s < num_steps; s++)
      {
#ifdef USE_MPE
	 MPE_Log_event(s_write, 0, "start write slab");
#endif /* USE_MPE */

	 /* Select hyperslab for write of one slice. */
	 start[0] = s * SC1 * p + my_rank * SC1;
	 count[0] = SC1;
	 if (H5Sselect_hyperslab(whole_spaceid, H5S_SELECT_SET,
	 start, NULL, count, NULL) < 0) ERR;

	 if (H5Dwrite(dsid, H5T_NATIVE_INT, slice_spaceid, whole_spaceid,
	 xferid, data) < 0) ERR;

#ifdef USE_MPE
	 MPE_Log_event(e_write, 0, "end write file");
#endif /* USE_MPE */
      }
      write_us = (MPI_Wtime() - ftime) * MILLION;
      MPI_Reduce(&write_us, &max_write_us, 1, MPI_INT, MPI_MAX, 0, MPI_COMM_WORLD);
      if (!my_rank)
      {
	 write_rate = (float)(DIM2_LEN * sizeof(int))/(float)max_write_us;
	 printf("\np=%d, write_rate=%g", p, write_rate);
      }

#ifdef USE_MPE
      MPE_Log_event(s_close, 0, "start close file");
#endif /* USE_MPE */

      /* Close. These collective operations will allow every process
       * to catch up. */
      if (H5Dclose(dsid) < 0 ||
      H5Sclose(whole_spaceid) < 0 ||
      H5Sclose(slice_spaceid) < 0 ||
      H5Pclose(fapl_id) < 0 ||
      H5Fclose(fileid) < 0)
	 ERR;

#ifdef USE_MPE
      MPE_Log_event(e_close, 0, "end close file");
#endif /* USE_MPE */

      /* Open the file. */
      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fapl_mpio(fapl_id, MPI_COMM_WORLD, MPI_INFO_NULL) < 0) ERR;


      if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR;
      if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, fapl_id)) < 0) ERR;

      /* Create a space to deal with one slice in memory. */
      dims[0] = SC1;
      if ((slice_spaceid = H5Screate_simple(NDIMS, dims, NULL)) < 0) ERR;

      /* Open the dataset. */
      if ((dsid = H5Dopen(fileid, VAR_NAME)) < 0) ERR;
      if ((whole_spaceid1 = H5Dget_space(dsid)) < 0) ERR;

      ftime = MPI_Wtime();

      /* Read the data, a slice at a time. */
      for (s = 0; s < num_steps; s++)
      {
	 /* Select hyperslab for read of one slice. */
	 start[0] = s * SC1 * p + my_rank * SC1;
	 count[0] = SC1;
	 if (H5Sselect_hyperslab(whole_spaceid1, H5S_SELECT_SET,
	 start, NULL, count, NULL) < 0)
	 {
	    ERR;
	    return 2;
	 }

	 if (H5Dread(dsid, H5T_NATIVE_INT, slice_spaceid, whole_spaceid1,
	 H5P_DEFAULT, data_in) < 0)
	 {
	    ERR;
	    return 2;
	 }

/* 	 /\* Check the slice of data. *\/ */
/* 	 for (i = 0; i < SC1; i++) */
/* 	    if (data[i] != data_in[i])  */
/* 	    { */
/* 	       ERR; */
/* 	       return 2; */
/* 	    } */
      }
      read_us = (MPI_Wtime() - ftime) * MILLION;
      MPI_Reduce(&read_us, &max_read_us, 1, MPI_INT, MPI_MAX, 0, MPI_COMM_WORLD);
      if (!my_rank)
      {
	 read_rate = (float)(DIM2_LEN * sizeof(int))/(float)max_read_us;
	 printf(", read_rate=%g\n", read_rate);
      }

      /* Close down. */
      if (H5Dclose(dsid) < 0 ||
      H5Sclose(slice_spaceid) < 0 ||
      H5Sclose(whole_spaceid1) < 0 ||
      H5Pclose(fapl_id) < 0 ||
      H5Fclose(fileid) < 0)
	 ERR;
   }
   if (!my_rank)
      SUMMARIZE_ERR;

   MPI_Finalize();

   if (!my_rank)
      FINAL_RESULTS;
   return 0;
}
コード例 #13
0
ファイル: external.c プロジェクト: EgoIncarnate/appleseed
/*-------------------------------------------------------------------------
 * Function:	test_4
 *
 * Purpose:	Tests opening an external link twice.  It exposed a bug
 *              in the library.  This function tests the fix.  This test
 *              doesn't work with MULTI driver.
 *
 * Return:	Success:	0
 *
 * 		Failure:	number of errors
 *
 * Programmer:	Raymond Lu
 *              5 November 2007
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static int
test_4 (hid_t fapl)
{
    hid_t fid = -1;
    hid_t gid = -1;
    hid_t xid = -1;
    hid_t xid2 = -1;
    char  filename[1024];		/*file name			*/
    char  pathname[1024];
    char *srcdir = getenv("srcdir"); /*where the src code is located*/

    TESTING("opening external link twice");

    /* Make a copy of the FAPL, in order to switch to the sec2 driver */
    /* (useful when running test with another VFD) */
    if((fapl = H5Pcopy(fapl)) < 0) FAIL_STACK_ERROR;

    /* Switch local copy of the fapl to the sec2 driver */
    if(H5Pset_fapl_sec2(fapl) < 0) FAIL_STACK_ERROR;

    h5_fixname(FILENAME[3], fapl, filename, sizeof filename);

    if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
        goto error;

    if((gid = H5Gopen2(fid, "/", H5P_DEFAULT)) < 0)
        goto error;

    pathname[0] = '\0';
    /* Generate correct name for test file by prepending the source path */
    if(srcdir && ((HDstrlen(srcdir) + HDstrlen(LINKED_FILE) + 1) < sizeof(pathname))) {
        HDstrcpy(pathname, srcdir);
        HDstrcat(pathname, "/");
    }
    HDstrcat(pathname, LINKED_FILE);

    /* Create an external link to an existing file*/
    if(H5Lcreate_external(pathname, "/group", gid, " link", H5P_DEFAULT, H5P_DEFAULT) < 0)
        goto error;

    if(H5Gclose(gid) < 0)
        goto error;

    if(H5Fclose(fid) < 0)
        goto error;

    /* Reopen the file */
    if((fid = H5Fopen(filename, H5F_ACC_RDONLY, fapl)) < 0)
        goto error;

    /* Open the external link which is "/ link" as created previously via H5Lcreate_external() */
    if((xid = H5Gopen2(fid, "/ link", H5P_DEFAULT)) < 0)
        goto error;

    /* Open the external link twice */
    if((xid2 = H5Gopen2(xid, ".", H5P_DEFAULT)) < 0)
        goto error;

    if(H5Gclose(xid2) < 0)
        goto error;

    if(H5Gclose(xid) < 0)
        goto error;

    if(H5Fclose(fid) < 0)
        goto error;

    if(H5Pclose(fapl) < 0)
        TEST_ERROR

    PASSED();

    return 0;

 error:
    H5E_BEGIN_TRY {
        H5Gclose(gid);
        H5Gclose(xid);
        H5Gclose(xid2);
        H5Fclose(fid);
    } H5E_END_TRY;
    return 1;
}
コード例 #14
0
ファイル: external.c プロジェクト: EgoIncarnate/appleseed
/*-------------------------------------------------------------------------
 * Function:	test_3
 *
 * Purpose:	Tests writing to an external file set.
 *
 * Return:	Success:	0
 *
 * 		Failure:	number of errors
 *
 * Programmer:	Robb Matzke
 *              Wednesday, March  4, 1998
 *
 * Modifications:
 *
 *-------------------------------------------------------------------------
 */
static int
test_3 (hid_t fapl)
{
    hid_t	file=-1;		/*file to which to write	*/
    hid_t	dcpl=-1;		/*dataset creation properties	*/
    hid_t	mem_space=-1;		/*memory data space		*/
    hid_t	file_space=-1;		/*file data space		*/
    hid_t	dset=-1;		/*dataset			*/
    unsigned	i;			/*miscellaneous counters	*/
    int		fd;			/*external file descriptor	*/
    int	    part[25],whole[100];	/*raw data buffers		*/
    hsize_t	cur_size=100;		/*current data space size	*/
    hsize_t	max_size=200;		/*maximum data space size	*/
    hsize_t	hs_start=100;		/*hyperslab starting offset	*/
    hsize_t	hs_count=100;		/*hyperslab size		*/
    char	filename[1024];		/*file name			*/
	int temparray[10] = {0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f,0x0f0f0f0f};

    TESTING("write external dataset");

    /* Create another file */
    h5_fixname(FILENAME[2], fapl, filename, sizeof filename);
    if ((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) {
	goto error;
    }

    /* Create the external file list */
    if((dcpl=H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if (H5Pset_external(dcpl, "extern_1b.raw", (off_t)0, (hsize_t)sizeof part) < 0 ||
	H5Pset_external(dcpl, "extern_2b.raw", (off_t)10, (hsize_t)sizeof part) < 0 ||
	H5Pset_external(dcpl, "extern_3b.raw", (off_t)20, (hsize_t)sizeof part) < 0 ||
	H5Pset_external(dcpl, "extern_4b.raw", (off_t)30, H5F_UNLIMITED) < 0)
	goto error;

    /* Make sure the output files are fresh*/
    for (i=1; i<=4; i++) {
	sprintf(filename, "extern_%db.raw", i);
	if ((fd= HDopen(filename, O_RDWR|O_CREAT|O_TRUNC, 0666)) < 0) {
	    H5_FAILED();
	    printf("    cannot open %s: %s\n", filename, strerror(errno));
	    goto error;
	}

	HDwrite(fd, temparray, (i-1)*10);
	HDclose(fd);
    }

    /* Create the dataset */
    if((mem_space = H5Screate_simple(1, &cur_size, &max_size)) < 0) goto error;
    if((file_space = H5Scopy(mem_space)) < 0) goto error;
    if((dset = H5Dcreate2(file, "dset1", H5T_NATIVE_INT, file_space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
	goto error;

    /* Write the entire dataset and compare with the original */
    for(i = 0; i < cur_size; i++)
        whole[i] = i;
    if(H5Dwrite(dset, H5T_NATIVE_INT, mem_space, file_space, H5P_DEFAULT, whole) < 0) goto error;
    for(i = 0; i < 4; i++) {
	char name1[64], name2[64];

	sprintf(name1, "extern_%da.raw", i + 1);
	sprintf(name2, "extern_%db.raw", i + 1);
	if(!same_contents(name1, name2)) {
	    H5_FAILED();
	    puts ("   Output differs from expected value.");
	    goto error;
	} /* end if */
    } /* end for */

    /* Extend the dataset by another 100 elements */
    if(H5Dset_extent(dset, &max_size) < 0) goto error;
    if(H5Sclose(file_space) < 0) goto error;
    if((file_space = H5Dget_space(dset)) < 0) goto error;

    /* Write second half of dataset */
    for(i = 0; i < hs_count; i++)
        whole[i] = 100 + i;
    if(H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &hs_start, NULL, &hs_count, NULL) < 0) goto error;
    if(H5Dwrite(dset, H5T_NATIVE_INT, mem_space, file_space, H5P_DEFAULT, whole) < 0) goto error;


    if(H5Dclose(dset) < 0) goto error;
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Sclose(mem_space) < 0) goto error;
    if(H5Sclose(file_space) < 0) goto error;
    if(H5Fclose(file) < 0) goto error;

    PASSED();
    return 0;

 error:
    H5E_BEGIN_TRY {
	H5Dclose(dset);
	H5Pclose(dcpl);
	H5Sclose(mem_space);
	H5Sclose(file_space);
	H5Fclose(file);
    } H5E_END_TRY;
    return 1;
}
コード例 #15
0
ファイル: titerate.c プロジェクト: adasworks/hdf5
/****************************************************************
**
**  test_grp_memb_funcs(): Test group member information
**                         functionality
**
****************************************************************/
static void test_grp_memb_funcs(hid_t fapl)
{
    hid_t file;             /* File ID */
    hid_t dataset;          /* Dataset ID */
    hid_t datatype;         /* Common datatype ID */
    hid_t filespace;        /* Common dataspace ID */
    hid_t root_group,grp;   /* Root group ID */
    int i;                  /* counting variable */
    char name[NAMELEN];     /* temporary name buffer */
    char *dnames[NDATASETS+2];/* Names of the datasets created */
    char *obj_names[NDATASETS+2];/* Names of the objects in group */
    char dataset_name[NAMELEN];  /* dataset name */
    ssize_t name_len;       /* Length of object's name */
    H5G_info_t ginfo;       /* Buffer for querying object's info */
    herr_t ret = SUCCEED;	/* Generic return value */

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Group Member Information Functionality\n"));

    /* Create the test file with the datasets */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    datatype = H5Tcopy(H5T_NATIVE_INT);
    CHECK(datatype, FAIL, "H5Tcopy");

    filespace = H5Screate(H5S_SCALAR);
    CHECK(filespace, FAIL, "H5Screate");

    for(i = 0; i < NDATASETS; i++) {
        sprintf(name, "Dataset %d", i);
        dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(dataset, FAIL, "H5Dcreate2");

        /* Keep a copy of the dataset names around for later */
        dnames[i] = HDstrdup(name);
        CHECK(dnames[i], NULL, "strdup");

        ret = H5Dclose(dataset);
        CHECK(ret, FAIL, "H5Dclose");
    } /* end for */

    /* Create a group and named datatype under root group for testing */
    grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Gcreate2");

    dnames[NDATASETS] = HDstrdup("grp");
    CHECK(dnames[NDATASETS], NULL, "strdup");

    ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Tcommit2");

    dnames[NDATASETS + 1] = HDstrdup("dtype");
    CHECK(dnames[NDATASETS], NULL, "strdup");

    /* Close everything up */
    ret = H5Tclose(datatype);
    CHECK(ret, FAIL, "H5Tclose");

    ret = H5Gclose(grp);
    CHECK(ret, FAIL, "H5Gclose");

    ret = H5Sclose(filespace);
    CHECK(ret, FAIL, "H5Sclose");

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Sort the dataset names */
    HDqsort(dnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);

    /* Iterate through the datasets in the root group in various ways */
    file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
    CHECK(file, FAIL, "H5Fopen");

    /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
     * iterate through B-tree for group members in internal library design.
     */
    root_group = H5Gopen2(file, "/", H5P_DEFAULT);
    CHECK(root_group, FAIL, "H5Gopen2");

    ret = H5Gget_info(root_group, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info");

    for(i = 0; i < (int)ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */

        /* Test with NULL for name, to query length */
        name_len = H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, NULL, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(name_len, FAIL, "H5Lget_name_by_idx");

        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)(name_len + 1), H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_name_by_idx");

        /* Double-check that the length is the same */
        VERIFY(ret, name_len, "H5Lget_name_by_idx");

        /* Keep a copy of the dataset names around for later */
        obj_names[i] = HDstrdup(dataset_name);
        CHECK(obj_names[i], NULL, "strdup");

        ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Oget_info_by_idx");

        if(!HDstrcmp(dataset_name, "grp"))
            VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
        if(!HDstrcmp(dataset_name, "dtype"))
            VERIFY(oinfo.type, H5O_TYPE_NAMED_DATATYPE, "H5Lget_name_by_idx");
        if(!HDstrncmp(dataset_name, "Dataset", (size_t)7))
            VERIFY(oinfo.type, H5O_TYPE_DATASET, "H5Lget_name_by_idx");
    } /* end for */

    H5E_BEGIN_TRY {
        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Lget_name_by_idx");

    /* Sort the dataset names */
    HDqsort(obj_names, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);

    /* Compare object names */
    for(i = 0; i< (int)ginfo.nlinks; i++) {
        ret = HDstrcmp(dnames[i], obj_names[i]);
        VERIFY(ret, 0, "HDstrcmp");
    } /* end for */

    ret = H5Gclose(root_group);
    CHECK(ret, FAIL, "H5Gclose");


    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Free the dataset names */
    for(i = 0; i< (NDATASETS + 2); i++) {
        HDfree(dnames[i]);
        HDfree(obj_names[i]);
    } /* end for */
} /* test_grp_memb_funcs() */
コード例 #16
0
ファイル: io.c プロジェクト: alexgittens/mpi_pcavariants
void writeSVD(char * outfname, distGatherInfo *eigInfo, double * U, double * V, double * singvals, double * meanVec, double * rowWeights) {

    hid_t file_id = H5Fcreate(outfname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    if (file_id < 0) {
        fprintf(stderr, "Could not create output file %s\n", outfname);
        exit(-1);
    }

    hsize_t dims[2];
    dims[0] = eigInfo->numrows;
    dims[1] = eigInfo->numeigs;
    hid_t dataspace_id = H5Screate_simple(2, dims, NULL);
    hid_t dataset_id = H5Dcreate2(file_id, "/U", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    hid_t plist_id = H5Pcreate(H5P_DATASET_XFER);
    if (dataset_id < 0) {
        fprintf(stderr, "Error creating dataset U in %s\n", outfname);
        exit(-1);
    }
    if( H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, dataspace_id, plist_id, U) < 0) {
        fprintf(stderr, "Error writing U to %s\n", outfname);
        exit(-1);
    }
    H5Dclose(dataset_id);
    H5Sclose(dataspace_id);

    dims[0] = eigInfo->numcols;
    dims[1] = eigInfo->numeigs;
    dataspace_id = H5Screate_simple(2, dims, NULL);
    dataset_id = H5Dcreate2(file_id, "/V", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error creating dataset V in %s\n", outfname);
        exit(-1);
    }
    if ( H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, dataspace_id, plist_id, V) < 0 ) {
        fprintf(stderr, "Error writing V to %s\n", outfname);
        exit(-1);
    }
    H5Dclose(dataset_id);
    H5Sclose(dataspace_id);

    dims[0] = eigInfo->numeigs;
    dataspace_id = H5Screate_simple(1, dims, NULL);
    dataset_id = H5Dcreate2(file_id, "/S", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error creating dataset S in %s\n", outfname);
        exit(-1);
    }
    if ( H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, dataspace_id, plist_id, singvals) < 0) {
        fprintf(stderr, "Error writing S to %s\n", outfname);
        exit(-1);
    }
    H5Dclose(dataset_id);
    H5Sclose(dataspace_id);

    dims[0] = eigInfo->numrows;
    dataspace_id = H5Screate_simple(1, dims, NULL);
    dataset_id = H5Dcreate2(file_id, "/rowMeans", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error creating dataset rowMeans in %s\n", outfname);
        exit(-1);
    }
    if ( H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, dataspace_id, plist_id, meanVec) < 0) {
        fprintf(stderr, "Error writing rowMeans to %s\n", outfname);
        exit(-1);
    }
    H5Dclose(dataset_id);
    H5Sclose(dataspace_id);

    dims[0] = eigInfo->numrows;
    dataspace_id = H5Screate_simple(1, dims, NULL);
    dataset_id = H5Dcreate2(file_id, "/rowWeights", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error creating dataset rowWeights in %s\n", outfname);
        exit(-1);
    }
    if ( H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, dataspace_id, plist_id, rowWeights) < 0) {
        fprintf(stderr, "Error writing rowWeights to %s\n", outfname);
        exit(-1);
    }
    H5Dclose(dataset_id);
    H5Sclose(dataspace_id);

    H5Pclose(plist_id);
    H5Fclose(file_id);
}
コード例 #17
0
ファイル: titerate.c プロジェクト: adasworks/hdf5
/****************************************************************
**
**  test_links(): Test soft and hard link iteration
**
****************************************************************/
static void test_links(hid_t fapl)
{
    hid_t file;             /* File ID */
    char obj_name[NAMELEN]; /* Names of the object in group */
    ssize_t name_len;       /* Length of object's name */
    hid_t    gid, gid1;
    H5G_info_t ginfo;       /* Buffer for querying object's info */
    hsize_t i;
    herr_t ret;		    /* Generic return value */

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Soft and Hard Link Iteration Functionality\n"));

    /* Create the test file with the datasets */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    /* create groups */
    gid = H5Gcreate2(file, "/g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(gid, FAIL, "H5Gcreate2");

    gid1 = H5Gcreate2(file, "/g1/g1.1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(gid1, FAIL, "H5Gcreate2");

    /* create soft and hard links to the group "/g1". */
    ret = H5Lcreate_soft("something", gid, "softlink", H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Lcreate_soft");

    ret = H5Lcreate_hard(gid, "/g1", H5L_SAME_LOC, "hardlink", H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Lcreate_hard");

    ret = H5Gget_info(gid, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, 3, "H5Gget_info");

    /* Test these two functions, H5Oget_info_by_idx and H5Lget_name_by_idx */
    for(i = 0; i < ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */
        H5L_info_t linfo;               /* Link info */

        /* Get link name */
        name_len = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(name_len, FAIL, "H5Lget_name_by_idx");

        /* Get link type */
        ret = H5Lget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &linfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_info_by_idx");

        /* Get object type */
        if(linfo.type == H5L_TYPE_HARD) {
            ret = H5Oget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
            CHECK(ret, FAIL, "H5Oget_info_by_idx");
        } /* end if */

        if(!HDstrcmp(obj_name, "g1.1"))
            VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
        else if(!HDstrcmp(obj_name, "hardlink"))
            VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx");
        else if(!HDstrcmp(obj_name, "softlink"))
            VERIFY(linfo.type, H5L_TYPE_SOFT, "H5Lget_name_by_idx");
        else
            CHECK(0, 0, "unknown object name");
    } /* end for */

    ret = H5Gclose(gid);
    CHECK(ret, FAIL, "H5Gclose");

    ret = H5Gclose(gid1);
    CHECK(ret, FAIL, "H5Gclose");

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");
} /* test_links() */
コード例 #18
0
ファイル: io.c プロジェクト: alexgittens/mpi_pcavariants
// assumes matInfo was preallocated
double getMatrixInfo(char * infilename, char * datasetname, MPI_Comm *comm, MPI_Info *info, distMatrixInfo *matInfo) {
    double startTime = MPI_Wtime();

    int mpi_size, mpi_rank;
    MPI_Comm_size(*comm, &mpi_size);
    MPI_Comm_rank(*comm, &mpi_rank);

    hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
    H5Pset_fapl_mpio(plist_id, *comm, *info);

    hid_t file_id = H5Fopen(infilename, H5F_ACC_RDONLY, plist_id);
    if (file_id < 0) {
        fprintf(stderr, "Error opening %s\n", infilename);
        exit(-1);
    }

    hid_t dataset_id = H5Dopen(file_id, datasetname, H5P_DEFAULT);
    if (dataset_id < 0) {
        fprintf(stderr, "Error opening %s in %s : are you sure this dataset exists?\n", datasetname, infilename);
        exit(-1);
    }

    hid_t dataset_space = H5Dget_space(dataset_id);
    hsize_t dims[2];
    herr_t status = H5Sget_simple_extent_dims(dataset_space, dims, NULL);
    if (status < 0 || status != 2) {
       fprintf(stderr, "Error reading %s from %s : remember it should be a 2d matrix\n", datasetname, infilename); 
       exit(-1);
    }

    H5Sclose(dataset_space);
    H5Dclose(dataset_id);
    H5Fclose(file_id);
    H5Pclose(plist_id);

    int numrows = dims[0];
    int numcols = dims[1];
    
	int littlePartitionSize = numrows/mpi_size;
    int bigPartitionSize = littlePartitionSize + 1;
    int numLittlePartitions = mpi_size - numrows % mpi_size;
    int numBigPartitions = numrows % mpi_size;
    int localrows, startingrow;

    if (mpi_rank < numBigPartitions) {
        localrows = bigPartitionSize;
        startingrow = bigPartitionSize*mpi_rank;
    } else {
        localrows = littlePartitionSize;
        startingrow = bigPartitionSize*numBigPartitions + 
                      littlePartitionSize*(mpi_rank - numBigPartitions);
    }

    matInfo->mpi_size = mpi_size;
    matInfo->mpi_rank = mpi_rank;
    matInfo->numrows = numrows;
    matInfo->numcols = numcols;
    matInfo->localrows = localrows;
    matInfo->startingrow = startingrow;
    matInfo->littlePartitionSize = littlePartitionSize;
    matInfo->bigPartitionSize = bigPartitionSize;
    matInfo->numLittlePartitions = numLittlePartitions;
    matInfo->numBigPartitions = numBigPartitions;

    matInfo->comm = comm;
    
    matInfo->rowcounts = (int *) malloc ( mpi_size * sizeof(int) );
    matInfo->rowoffsets = (int *) malloc ( mpi_size * sizeof(int) );
    if (matInfo->rowcounts == NULL || matInfo->rowoffsets == NULL) {
        fprintf(stderr, "Could not allocate memory for the matrix chunk offset information\n");
        exit(-1);
    }
    for(int idx = 0; idx < numBigPartitions; idx = idx + 1) {
        matInfo->rowcounts[idx] = bigPartitionSize;
        matInfo->rowoffsets[idx] = bigPartitionSize * idx;
    }
    for(int idx = numBigPartitions; idx < mpi_size; idx = idx + 1) {
        matInfo->rowcounts[idx] = littlePartitionSize;
        matInfo->rowoffsets[idx] = bigPartitionSize * numBigPartitions + littlePartitionSize * (idx - numBigPartitions);
    }

    return MPI_Wtime() - startTime;
}
コード例 #19
0
ファイル: ex_table_02.c プロジェクト: MattNapsAlot/rHDF5
int main( void )
{
 typedef struct Particle 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle;

 Particle  dst_buf[NRECORDS+NRECORDS_ADD];

/* Define an array of Particles */
 Particle  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

 size_t dst_sizes[NFIELDS] = { sizeof( p_data[0].name),
                               sizeof( p_data[0].lati),
                               sizeof( p_data[0].longi),
                               sizeof( p_data[0].pressure),
                               sizeof( p_data[0].temperature)};
 
 /* Define field information */
 const char *field_names[NFIELDS] = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        *fill_data = NULL;
 int        compress  = 0;
 herr_t     status; 
 int        i;

  /* Append particles */ 
 Particle particle_in[ NRECORDS_ADD ] = 
 {{ "eight",80,80, 8.0f, 80.0},
 {"nine",90,90, 9.0f, 90.0} };

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
   
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_02.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* make a table */
 status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS, 
                        dst_size, field_names, dst_offset, field_type, 
                        chunk_size, fill_data, compress, p_data  );

 /* append two records */
 status=H5TBappend_records(file_id, TABLE_NAME,NRECORDS_ADD, dst_size, dst_offset, dst_sizes, 
  &particle_in );

 /* read the table */
 status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* print it by rows */
 for (i=0; i<NRECORDS+NRECORDS_ADD; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f", 
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }
 
 /* close type */
 H5Tclose( string_type );
 
 /* close the file */
 H5Fclose( file_id );

 return 0;
}
コード例 #20
0
int main(int argc, char **argv) {
    struct rlimit rlim;
    getrlimit(RLIMIT_STACK, &rlim);
    rlim.rlim_cur = 1024 * 1024 * 1024;
    setrlimit(RLIMIT_STACK, &rlim);

    hid_t file_id, dataset_id, dataspace_id, status, property_id;
    hsize_t dims[2];

    if(argc < 4) {
        usage(stderr, argv[0]);
        exit(1);
    }

    file_id = H5Fopen(argv[1], H5F_ACC_RDONLY, H5P_DEFAULT);
    dataset_id = H5Dopen2(file_id, "x", H5P_DEFAULT);
    dataspace_id = H5Dget_space(dataset_id);

    status = H5Sget_simple_extent_dims(dataspace_id, dims, NULL);

    hsize_t a_rows = dims[0], a_cols = dims[1];
    double a[a_rows][a_cols];
    status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, a);

    status = H5Sclose(dataspace_id);
    status = H5Dclose(dataset_id);
    status = H5Fclose(file_id);

    file_id = H5Fopen(argv[2], H5F_ACC_RDONLY, H5P_DEFAULT);
    dataset_id = H5Dopen2(file_id, "x", H5P_DEFAULT);
    dataspace_id = H5Dget_space(dataset_id);

    status = H5Sget_simple_extent_dims(dataspace_id, dims, NULL);

    hsize_t b_rows = dims[0], b_cols = dims[1];
    double b[b_rows][b_cols];
    status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, b);

    status = H5Sclose(dataspace_id);
    status = H5Dclose(dataset_id);
    status = H5Fclose(file_id);


    if(a_cols != b_rows) {
        fprintf(stderr, "Error: matrix dimension mismatch.\n");
        exit(1);
    }

    size_t m = a_rows;
    size_t n = a_cols;
    size_t p = b_cols;

    double c[m][p];
    matrix_multiply_block(&a[0][0], &b[0][0], &c[0][0], m, n, p, BLOCKSIZE);

    // Write out the output
    dims[0] = m;
    dims[1] = p;

    file_id = H5Fcreate(argv[3], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Screate_simple(2, dims, NULL);
    property_id = H5Pcreate(H5P_DATASET_CREATE);
    status = H5Pset_layout(property_id, H5D_CONTIGUOUS);

    dataset_id = H5Dcreate(file_id, "x", H5T_NATIVE_DOUBLE, dataspace_id, H5P_DEFAULT, property_id, H5P_DEFAULT);

    status = H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &c[0][0]);

    status = H5Sclose(dataspace_id);
    status = H5Dclose(dataset_id);
    status = H5Fclose(file_id);
    status = H5Pclose(property_id);
}
コード例 #21
0
ファイル: h5copy.c プロジェクト: Len3d/appleseed
int
main (int argc, const char *argv[])
{
    hid_t        fid_src=-1;
    hid_t        fid_dst=-1;
    char         *fname_src=NULL;
    char         *fname_dst=NULL;
    char         *oname_src=NULL;
    char         *oname_dst=NULL;
    unsigned     flag=0;
    unsigned     verbose=0;
    unsigned     parents=0;
    hid_t        ocpl_id = (-1);          /* Object copy property list */
    hid_t        lcpl_id = (-1);          /* Link creation property list */
    char         str_flag[20];
    int          opt;
    int          li_ret;
    h5tool_link_info_t linkinfo;
    int          i, len;
    char         *str_ptr=NULL;

    h5tools_setprogname(PROGRAMNAME);
    h5tools_setstatus(EXIT_SUCCESS);
    /* initialize h5tools lib */
    h5tools_init();

    /* init linkinfo struct */
    HDmemset(&linkinfo, 0, sizeof(h5tool_link_info_t));

    /* Check for no command line parameters */
    if(argc == 1) 
    {
        usage();
        leave(EXIT_FAILURE);
    } /* end if */

    /* parse command line options */
    while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF)
    {
        switch ((char)opt)
        {
        case 'd':
            oname_dst = HDstrdup(opt_arg);
            break;

        case 'f':
            /* validate flag */
            if (parse_flag(opt_arg,&flag)<0)
            {
                usage();
                leave(EXIT_FAILURE);
            }
            HDstrcpy(str_flag,opt_arg);
            break;

        case 'h':
            usage();
            leave(EXIT_SUCCESS);
            break;

        case 'i':
            fname_src = HDstrdup(opt_arg);
            break;

        case 'o':
            fname_dst = HDstrdup(opt_arg);
            break;

        case 'p':
            parents = 1;
            break;

        case 's':
            oname_src = HDstrdup(opt_arg);
            break;

        case 'V':
            print_version(h5tools_getprogname());
            leave(EXIT_SUCCESS);
            break;

        case 'v':
            verbose = 1;
            break;

        default:
            usage();
            leave(EXIT_FAILURE);
        }
    } /* end of while */

/*-------------------------------------------------------------------------
 * check for missing file/object names
 *-------------------------------------------------------------------------*/

    if (fname_src==NULL)
    {
        error_msg("Input file name missing\n");
        usage();
        leave(EXIT_FAILURE);
    }

    if (fname_dst==NULL)
    {
        error_msg("Output file name missing\n");
        usage();
        leave(EXIT_FAILURE);
    }

    if (oname_src==NULL)
    {
        error_msg("Source object name missing\n");
        usage();
        leave(EXIT_FAILURE);
    }

    if (oname_dst==NULL)
    {
        error_msg("Destination object name missing\n");
        usage();
        leave(EXIT_FAILURE);
    }

   /*-------------------------------------------------------------------------
    * open output file
    *-------------------------------------------------------------------------*/

    /* Attempt to open an existing HDF5 file first. Need to open the dst file
       before the src file just in case that the dst and src are the same file
     */
    fid_dst = h5tools_fopen(fname_dst, H5F_ACC_RDWR, H5P_DEFAULT, NULL, NULL, 0);

   /*-------------------------------------------------------------------------
    * open input file
    *-------------------------------------------------------------------------*/

    fid_src = h5tools_fopen(fname_src, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, 0);

   /*-------------------------------------------------------------------------
    * test for error in opening input file
    *-------------------------------------------------------------------------*/
    if (fid_src==-1)
    {
        error_msg("Could not open input file <%s>...Exiting\n", fname_src);
        if (fname_src)
            HDfree(fname_src);
        leave(EXIT_FAILURE);
    }


   /*-------------------------------------------------------------------------
    * create an output file when failed to open it
    *-------------------------------------------------------------------------*/

    /* If we couldn't open an existing file, try creating file */
    /* (use "EXCL" instead of "TRUNC", so we don't blow away existing non-HDF5 file) */
    if(fid_dst < 0)
        fid_dst = H5Fcreate(fname_dst, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);

   /*-------------------------------------------------------------------------
    * test for error in opening output file
    *-------------------------------------------------------------------------*/
    if (fid_dst==-1)
    {
        error_msg("Could not open output file <%s>...Exiting\n", fname_dst);
        if (fname_src)
            HDfree(fname_src);
        if (fname_dst)
            HDfree(fname_dst);
        leave(EXIT_FAILURE);
    }

   /*-------------------------------------------------------------------------
    * print some info
    *-------------------------------------------------------------------------*/

    if (verbose)
    {
        printf("Copying file <%s> and object <%s> to file <%s> and object <%s>\n",
        fname_src, oname_src, fname_dst, oname_dst);
        if (flag)
            printf("Using %s flag\n", str_flag);
    }


   /*-------------------------------------------------------------------------
    * create property lists for copy
    *-------------------------------------------------------------------------*/

    /* create property to pass copy options */
    if ( (ocpl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0)
        goto error;

    /* set options for object copy */
    if (flag)
    {
        if ( H5Pset_copy_object(ocpl_id, flag) < 0)
            goto error;
    }

    /* Create link creation property list */
    if((lcpl_id = H5Pcreate(H5P_LINK_CREATE)) < 0) {
        error_msg("Could not create link creation property list\n");
        goto error;
    } /* end if */

    /* Check for creating intermediate groups */
    if(parents) {
        /* Set the intermediate group creation property */
        if(H5Pset_create_intermediate_group(lcpl_id, 1) < 0) {
            error_msg("Could not set property for creating parent groups\n");
            goto error;
        } /* end if */

        /* Display some output if requested */
        if(verbose)
            printf("%s: Creating parent groups\n", h5tools_getprogname());
    } /* end if */
    else /* error, if parent groups doesn't already exist in destination file */
    {
        len = HDstrlen(oname_dst);        
        /* check if all the parents groups exist. skip root group */
        for (i = 1; i < len; i++)
        {
            if ('/'==oname_dst[i])
            {
                str_ptr = (char*)HDcalloc((size_t)i+1, sizeof(char));
                HDstrncpy (str_ptr, oname_dst, (size_t)i);
                str_ptr[i]='\0';
                if (H5Lexists(fid_dst, str_ptr, H5P_DEFAULT) <= 0)
                {
                    error_msg("group <%s> doesn't exist. Use -p to create parent groups.\n", str_ptr);
                    HDfree(str_ptr);
                    goto error;
                }
                HDfree(str_ptr);
            }
        }
    }

   /*-------------------------------------------------------------------------
    * do the copy
    *-------------------------------------------------------------------------*/
 
    if(verbose)
        linkinfo.opt.msg_mode = 1;
 
    li_ret = H5tools_get_symlink_info(fid_src, oname_src, &linkinfo, 1);
    if (li_ret == 0) /* dangling link */
    {
        if(H5Lcopy(fid_src, oname_src, 
                   fid_dst, oname_dst,
                   H5P_DEFAULT, H5P_DEFAULT) < 0)
            goto error;
    }
    else /* valid link */
    {
        if (H5Ocopy(fid_src,          /* Source file or group identifier */
                  oname_src,        /* Name of the source object to be copied */
                  fid_dst,          /* Destination file or group identifier  */
                  oname_dst,        /* Name of the destination object  */
                  ocpl_id,          /* Object copy property list */
                  lcpl_id)<0)       /* Link creation property list */
            goto error;
    }

    /* free link info path */
    if (linkinfo.trg_path)
        HDfree(linkinfo.trg_path);

    /* close propertis */
    if(H5Pclose(ocpl_id)<0)
        goto error;
    if(H5Pclose(lcpl_id)<0)
        goto error;

    /* close files */
    if (H5Fclose(fid_src)<0)
        goto error;
    if (H5Fclose(fid_dst)<0)
        goto error;

    if (fname_src)
        HDfree(fname_src);
    if (fname_dst)
        HDfree(fname_dst);
    if (oname_dst)
        HDfree(oname_dst);
    if (oname_src)
        HDfree(oname_src);

    h5tools_close();

    return EXIT_SUCCESS;

error:
    printf("Error in copy...Exiting\n");

    /* free link info path */
    if (linkinfo.trg_path)
        HDfree(linkinfo.trg_path);

 H5E_BEGIN_TRY {
    H5Pclose(ocpl_id);
    H5Pclose(lcpl_id);
    H5Fclose(fid_src);
    H5Fclose(fid_dst);
 } H5E_END_TRY;
    if (fname_src)
        HDfree(fname_src);
    if (fname_dst)
        HDfree(fname_dst);
    if (oname_dst)
        HDfree(oname_dst);
    if (oname_src)
        HDfree(oname_src);

    h5tools_close();

    return EXIT_FAILURE;
}
コード例 #22
0
ファイル: h5_vds-simpleIO.c プロジェクト: Starlink/hdf5
int
main (void)
{
    hid_t        file, space, src_space, vspace, dset; /* Handles */ 
    hid_t        dcpl;
    herr_t       status;
    hsize_t      vdsdims[2] = {DIM0, DIM1},     /* Virtual dataset dimension */
                 dims[2] = {DIM0, DIM1};        /* Source dataset dimensions */
    int          wdata[DIM0][DIM1],             /* Write buffer for source dataset */
                 rdata[DIM0][DIM1],             /* Read buffer for virtual dataset */
                 i, j;  
    H5D_layout_t layout;                        /* Storage layout */
    size_t       num_map;                       /* Number of mappings */
    ssize_t      len;                           /* Length of the string; also a return value */
    char         *filename;                  
    char         *dsetname;
    /*
     * Initialize data.
     */
        for (i = 0; i < DIM0; i++) 
            for (j = 0; j < DIM1; j++) wdata[i][j] = i+1;
         
     /*
      * Create the source file and the dataset. Write data to the source dataset 
      * and close all resources.
      */

     file = H5Fcreate (SRC_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
     space = H5Screate_simple (RANK, dims, NULL);
     dset = H5Dcreate2 (file, SRC_DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT,
                 H5P_DEFAULT, H5P_DEFAULT);
     status = H5Dwrite (dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
                 wdata[0]);
     status = H5Sclose (space);
     status = H5Dclose (dset);
     status = H5Fclose (file);

    /* Create file in which virtual dataset will be stored. */
    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /* Create VDS dataspace.  */
    vspace = H5Screate_simple (RANK, vdsdims, NULL);

    /* Set VDS creation property. */
    dcpl = H5Pcreate (H5P_DATASET_CREATE);
     
    /* 
     * Build the mappings.
     * Selections in the source datasets are H5S_ALL.
     * In the virtual dataset we select the first, the second and the third rows 
     * and map each row to the data in the corresponding source dataset. 
     */
    src_space = H5Screate_simple (RANK, dims, NULL);
    status = H5Pset_virtual (dcpl, vspace, SRC_FILE, SRC_DATASET, src_space);

    /* Create a virtual dataset. */
    dset = H5Dcreate2 (file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT,
                dcpl, H5P_DEFAULT);
    status = H5Sclose (vspace);
    status = H5Sclose (src_space);
    status = H5Dclose (dset);
    status = H5Fclose (file);    
     
    /*
     * Now we begin the read section of this example.
     */

    /*
     * Open the file and virtual dataset.
     */
    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    dset = H5Dopen2 (file, DATASET, H5P_DEFAULT);
    /*
     * Get creation property list and mapping properties.
     */
    dcpl = H5Dget_create_plist (dset);

    /*
     * Get storage layout.
     */
    layout = H5Pget_layout (dcpl);
    if (H5D_VIRTUAL == layout) 
        printf(" Dataset has a virtual layout \n");
    else
        printf(" Wrong layout found \n");

     /*
      * Find the number of mappings.
      */
     status = H5Pget_virtual_count (dcpl, &num_map);
     printf(" Number of mappings is %lu\n", (unsigned long)num_map);

     /* 
      * Get mapping parameters for each mapping.
      */
    for (i = 0; i < (int)num_map; i++) {   
        printf(" Mapping %d \n", i);
        printf("         Selection in the virtual dataset ");
        /* Get selection in the virttual  dataset */
        vspace = H5Pget_virtual_vspace (dcpl, (size_t)i);

        /* Make sure it is ALL selection and then print selection. */
        if(H5Sget_select_type(vspace) == H5S_SEL_ALL) {
                printf("Selection is H5S_ALL \n");
        }
        /* Get source file name. */
        len = H5Pget_virtual_filename (dcpl, (size_t)i, NULL, 0);
        filename = (char *)malloc((size_t)len*sizeof(char)+1);
        H5Pget_virtual_filename (dcpl, (size_t)i, filename, len+1);
        printf("         Source filename %s\n", filename);

        /* Get source dataset name. */
        len = H5Pget_virtual_dsetname (dcpl, (size_t)i, NULL, 0);
        dsetname = (char *)malloc((size_t)len*sizeof(char)+1);
        H5Pget_virtual_dsetname (dcpl, (size_t)i, dsetname, len+1);
        printf("         Source dataset name %s\n", dsetname);

        /* Get selection in the source dataset. */
        printf("         Selection in the source dataset ");
        src_space = H5Pget_virtual_srcspace (dcpl, (size_t)i);

        /* Make sure it is ALL selection and then print selection. */
        if(H5Sget_select_type(src_space) == H5S_SEL_ALL) {
                printf("Selection is H5S_ALL \n");
        }
        H5Sclose(vspace);
        H5Sclose(src_space);
        free(filename);
        free(dsetname);
    }
    /*
     * Read the data using the default properties.
     */
    status = H5Dread (dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
                rdata[0]);

    /*
     * Output the data to the screen.
     */
    printf (" VDS Data:\n");
    for (i=0; i<DIM0; i++) {
        printf (" [");
        for (j=0; j<DIM1; j++)
            printf (" %3d", rdata[i][j]);
        printf ("]\n");
    }
    /*
     * Close and release resources.
     */
    status = H5Pclose (dcpl);
    status = H5Dclose (dset);
    status = H5Fclose (file);

    return 0;
}
コード例 #23
0
ファイル: gen_bad_offset.c プロジェクト: Starlink/hdf5
/*-------------------------------------------------------------------------
 * Function:	main
 *
 *              Generate an HDF5 file with groups, datasets and symbolic links. 
 *              After the file is generated, write bad offset values to 
 *              the heap at 3 locations in the file:
 *              (A) Open the file:
 *                  fd = HDopen(TESTFILE, O_RDWR, 0663);
 *              (B) Position the file at:
 *                  (1) HDlseek(fd, (HDoff_t)880, SEEK_SET);
 *                      "/group1/group2": replace heap offset "8" by bad offset
 *                  (2) HDlseek(fd, (HDoff_t)1512, SEEK_SET);
 *                      "/dsetA": replace name offset into private heap "72" by bad offset 
 *                  (3) HDlseek(fd, (HDoff_t)1616, SEEK_SET);
 *                      /soft_one: replace link value offset in the scratch pad "32" by bad offset
 *              (C) Write the bad offset value to the file for (1), (2) and (3):
 *                  write(fd, &val, sizeof(val));
 *
 *              Note: if the groups/datasets/symbolic links are changed in the file,
 *              the above locations need to be adjusted accordingly.
 *
 * Return:      EXIT_SUCCESS/EXIT_FAILURE
 *
 *-------------------------------------------------------------------------
 */
int
main(void)
{
    hid_t       fid = -1, gid1 = -1, gid2 = -1; /* File and group IDs */
    hid_t       did = -1, sid = -1;             /* Dataset and dataspace IDs */
    int         fd = -1;                        /* File descriptor */
    int64_t     val = 999;                      /* Bad offset value */

    /* Create the test file */
    if((fid = H5Fcreate(TESTFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        FAIL_STACK_ERROR

    /* Create two groups */
    if((gid1 = H5Gcreate2(fid, GRP1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        FAIL_STACK_ERROR
    if((gid2 = H5Gcreate2(gid1, GRP2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        FAIL_STACK_ERROR

    /* Close the groups */
    if(H5Gclose(gid1) < 0)
        FAIL_STACK_ERROR
    if(H5Gclose(gid2) < 0)
        FAIL_STACK_ERROR

    /* Create soft links to the groups */
    if(H5Lcreate_soft("/group1", fid, SOFT1, H5P_DEFAULT, H5P_DEFAULT) < 0)
        FAIL_STACK_ERROR
    if(H5Lcreate_soft("/group1/group2", fid, SOFT2, H5P_DEFAULT, H5P_DEFAULT) < 0)
        FAIL_STACK_ERROR

    /* Create a dataset */
    if((sid = H5Screate(H5S_SCALAR)) < 0)
        FAIL_STACK_ERROR
    if((did = H5Dcreate2(fid, DSET, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) <  0)
        FAIL_STACK_ERROR

    /* Close the dataset */
    if(H5Dclose(did) < 0)
        FAIL_STACK_ERROR

    /* Close the dataspace */
    if(H5Sclose(sid) < 0)
        FAIL_STACK_ERROR

    /* Close the file */
    if(H5Fclose(fid) < 0)
        FAIL_STACK_ERROR

    /* 
     * Write bad offset values at 3 locations in the file
     */

    /* Open the file */
    if((fd = HDopen(TESTFILE, O_RDWR, 0663)) < 0)
        FAIL_STACK_ERROR

    /* Position the file for /group1/group2: replace heap offset "8" by bad offset */
    if(HDlseek(fd, (HDoff_t)880, SEEK_SET) < 0)
        FAIL_STACK_ERROR
    /* Write the bad offset value to the file */
    if(HDwrite(fd, &val, sizeof(val)) < 0)
        FAIL_STACK_ERROR

    /* Position the file for /dsetA: replace name offset into private heap "72" by bad offset */
    if(HDlseek(fd, (HDoff_t)1512, SEEK_SET) < 0)
        FAIL_STACK_ERROR
    /* Write the bad offset value to the file */
    if(HDwrite(fd, &val, sizeof(val)) < 0)
        FAIL_STACK_ERROR

    /* Position the file for /soft_one: replace link value offset in the scratch pad "32" by bad offset */
    if(HDlseek(fd, (HDoff_t)1616, SEEK_SET) < 0)
        FAIL_STACK_ERROR
    /* Write the bad offset value to the file */
    if(HDwrite(fd, &val, sizeof(val)) < 0)
        FAIL_STACK_ERROR

    /* Close the file */
    if(HDclose(fd) < 0)
        FAIL_STACK_ERROR

    return EXIT_SUCCESS;

error:
    H5E_BEGIN_TRY {
        H5Gclose(gid1);
        H5Gclose(gid2);
        H5Dclose(did);
        H5Sclose(sid);
        H5Fclose(fid);
    } H5E_END_TRY;

    return EXIT_FAILURE;
} /* end main() */
コード例 #24
0
ファイル: titerate.c プロジェクト: adasworks/hdf5
/****************************************************************
**
**  test_iter_group(): Test group iteration functionality
**
****************************************************************/
static void
test_iter_group(hid_t fapl, hbool_t new_format)
{
    hid_t file;             /* File ID */
    hid_t dataset;          /* Dataset ID */
    hid_t datatype;         /* Common datatype ID */
    hid_t filespace;        /* Common dataspace ID */
    hid_t root_group,grp;   /* Root group ID */
    int i;                  /* counting variable */
    hsize_t idx;            /* Index in the group */
    char name[NAMELEN];     /* temporary name buffer */
    char *lnames[NDATASETS + 2];/* Names of the links created */
    char dataset_name[NAMELEN];  /* dataset name */
    iter_info info;         /* Custom iteration information */
    H5G_info_t ginfo;       /* Buffer for querying object's info */
    herr_t ret;		    /* Generic return value */

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Group Iteration Functionality\n"));

    /* Create the test file with the datasets */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    /* Test iterating over empty group */
    info.command = RET_ZERO;
    idx = 0;
    ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    VERIFY(ret, SUCCEED, "H5Literate");

    datatype = H5Tcopy(H5T_NATIVE_INT);
    CHECK(datatype, FAIL, "H5Tcopy");

    filespace=H5Screate(H5S_SCALAR);
    CHECK(filespace, FAIL, "H5Screate");

    for(i=0; i< NDATASETS; i++) {
        sprintf(name,"Dataset %d",i);
        dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(dataset, FAIL, "H5Dcreate2");

        /* Keep a copy of the dataset names around for later */
        lnames[i] = HDstrdup(name);
        CHECK(lnames[i], NULL, "strdup");

        ret = H5Dclose(dataset);
        CHECK(ret, FAIL, "H5Dclose");
    } /* end for */

    /* Create a group and named datatype under root group for testing */
    grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Gcreate2");

    lnames[NDATASETS] = HDstrdup("grp");
    CHECK(lnames[NDATASETS], NULL, "strdup");

    ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Tcommit2");

    lnames[NDATASETS + 1] = HDstrdup("dtype");
    CHECK(lnames[NDATASETS], NULL, "strdup");

    /* Close everything up */
    ret = H5Tclose(datatype);
    CHECK(ret, FAIL, "H5Tclose");

    ret = H5Gclose(grp);
    CHECK(ret, FAIL, "H5Gclose");

    ret = H5Sclose(filespace);
    CHECK(ret, FAIL, "H5Sclose");

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Sort the dataset names */
    HDqsort(lnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);


    /* Iterate through the datasets in the root group in various ways */
    file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
    CHECK(file, FAIL, "H5Fopen");

    /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
     * iterate through B-tree for group members in internal library design.
     */
    root_group = H5Gopen2(file, "/", H5P_DEFAULT);
    CHECK(root_group, FAIL, "H5Gopen2");

    ret = H5Gget_info(root_group, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info");

    for(i = 0; i< (int)ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */

        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_name_by_idx");

        ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Oget_info_by_idx");
    } /* end for */

    H5E_BEGIN_TRY {
        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Lget_name_by_idx");

    ret = H5Gclose(root_group);
    CHECK(ret, FAIL, "H5Gclose");

    /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
     * iterate through B-tree for group members in internal library design.
     *  (Same as test above, but with the file ID instead of opening the root group)
     */
    ret = H5Gget_info(file, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, NDATASETS + 2, "H5Gget_info");

    for(i = 0; i< (int)ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */

        ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_name_by_idx");

        ret = H5Oget_info_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Oget_info_by_idx");
    } /* end for */

    H5E_BEGIN_TRY {
        ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Lget_name_by_idx");

    /* Test invalid indices for starting iteration */
    info.command = RET_ZERO;
    idx = (hsize_t)-1;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test skipping exactly as many entries as in the group */
    idx = NDATASETS + 2;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test skipping more entries than are in the group */
    idx = NDATASETS + 3;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test all objects in group, when callback always returns 0 */
    info.command = RET_ZERO;
    idx = 0;
    if((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0)
        TestErrPrintf("Group iteration function didn't return zero correctly!\n");

    /* Test all objects in group, when callback always returns 1 */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = RET_TWO;
    idx = i = 0;
    while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 2, "H5Literate");

        /* Increment the number of times "2" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (hsize_t)i, "H5Literate");
        if(idx > (NDATASETS + 2))
            TestErrPrintf("Group iteration function walked too far!\n");

        /* Verify that the correct name is retrieved */
        if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0)
            TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
    } /* end while */
    VERIFY(ret, -1, "H5Literate");

    if(i != (NDATASETS + 2))
        TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__);

    /* Test all objects in group, when callback changes return value */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = new_format ? RET_CHANGE2 : RET_CHANGE;
    idx = i = 0;
    while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) >= 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 1, "H5Literate");

        /* Increment the number of times "1" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (hsize_t)(i + 10), "H5Literate");
        if(idx > (NDATASETS + 2))
            TestErrPrintf("Group iteration function walked too far!\n");

        /* Verify that the correct name is retrieved */
        if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0)
            TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
    } /* end while */
    VERIFY(ret, -1, "H5Literate");

    if(i != 42 || idx != 52)
        TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__);

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Free the dataset names */
    for(i = 0; i< (NDATASETS + 2); i++)
        HDfree(lnames[i]);
} /* test_iter_group() */
コード例 #25
0
int main(int argc, char *argv[]) {
    
//    test_dstegr();

    if (argc != 4 + 1) {
        printf("Usage: %s N W K output.h5\n", argv[0]);
        return -1;
    }
    
    lapack_int  N = atoi(argv[1]);
    double      W = atof(argv[2]);
    lapack_int  K = atoi(argv[3]);
    char *outname = argv[4];
    
//    printf("N = %d\nW = %g\nK = %d\noutname = %s\n", N, W, K, outname);

    if (!(0 < K && K <= N && 0 < W && W < 0.5)) {
        printf("The arguments must satisfy 0 < K <= N and 0 < W < 0.5\n");
        return -1;
    }

    double *d = calloc(N, sizeof(double));
    double *e = calloc(N, sizeof(double));
    double *w = calloc(N, sizeof(double));
    double *z = calloc(K * N, sizeof(double));
    lapack_int m   = 0;
    lapack_int ldz = N;
    lapack_int *isuppz = calloc(2 * K, sizeof(lapack_int));

    assert(d && e && w && z && isuppz);

    double cos_two_pi_W = cos(2 * 3.14159265358979323846 * W);
    double x;
    for (int i = 0; i < N; i++) {
        x = (0.5 * (N - 1) - i);
        d[i] = x * x * cos_two_pi_W;
    }

    for (int i = 0; i < N - 1; i++)
        e[i] = 0.5 * (i + 1) * (N - i - 1);

    
/* lapack_int LAPACKE_dstegr( int matrix_order, char jobz, char range, */
/*                            lapack_int n, double* d, double* e, double vl, */
/*                            double vu, lapack_int il, lapack_int iu, */
/*                            double abstol, lapack_int* m, double* w, double* z, */
/*                            lapack_int ldz, lapack_int* isuppz ); */

    printf("Before DSTEGR\n");
    lapack_int info = LAPACKE_dstegr(LAPACK_COL_MAJOR, 'V', 'I', 
                                     N, d, e, 0, 
                                     0, N - K + 1, N,
                                     0, &m, w, z,
                                     ldz, isuppz);
    printf("After DSTEGR\n");
    printf("K = %d\nm = %d\n", K, m);
    
    if (info) {
        printf("Some error occurred in DSTEGR, info = %d\n", info);
        return -1;
    }

    hid_t   file_id;
    hsize_t dims_w[1] = {K};
    hsize_t dims_z[2] = {K, N};
    hsize_t dims_scalar[1] = {1};
    herr_t  status;
    
    file_id = H5Fcreate(outname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    assert(file_id >= 0);
    status  = H5LTmake_dataset_double(file_id, "/eigenvalues",  1, dims_w, w);
    assert(status >= 0);
    status  = H5LTmake_dataset_double(file_id, "/eigenvectors", 2, dims_z, z);
    assert(status >= 0);
    status  = H5LTmake_dataset_double(file_id, "/W", 1, dims_scalar, &W);
    assert(status >= 0);
    status  = H5LTmake_dataset_int(file_id, "/K", 1, dims_scalar, &K);
    assert(status >= 0);
    status  = H5LTmake_dataset_int(file_id, "/N", 1, dims_scalar, &N);
    assert(status >= 0);
    status  = H5Fclose (file_id);
    assert(status >= 0);

    /* free(d);  */
    /* free(e); */
    /* free(w); */
    /* free(z); */
    /* free(isuppz); */

    return 0;
    
}
コード例 #26
0
ファイル: titerate.c プロジェクト: adasworks/hdf5
/****************************************************************
**
**  test_iter_attr(): Test attribute iteration functionality
**
****************************************************************/
static void test_iter_attr(hid_t fapl, hbool_t new_format)
{
    hid_t file;             /* File ID */
    hid_t dataset;          /* Common Dataset ID */
    hid_t filespace;        /* Common dataspace ID */
    hid_t attribute;        /* Attribute ID */
    int i;                  /* counting variable */
    hsize_t idx;            /* Index in the attribute list */
    char name[NAMELEN];     /* temporary name buffer */
    char *anames[NATTR];    /* Names of the attributes created */
    iter_info info;         /* Custom iteration information */
    herr_t		ret;		/* Generic return value		*/

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Attribute Iteration Functionality\n"));

    /* Create the test file with the datasets */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    filespace = H5Screate(H5S_SCALAR);
    CHECK(filespace, FAIL, "H5Screate");

    dataset = H5Dcreate2(file, "Dataset", H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(dataset, FAIL, "H5Dcreate2");

    for(i = 0; i < NATTR; i++) {
        sprintf(name, "Attribute %02d", i);
        attribute = H5Acreate2(dataset, name, H5T_NATIVE_INT, filespace, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(attribute, FAIL, "H5Acreate2");

        /* Keep a copy of the attribute names around for later */
        anames[i] = HDstrdup(name);
        CHECK(anames[i], NULL, "strdup");

        ret = H5Aclose(attribute);
        CHECK(ret, FAIL, "H5Aclose");
    } /* end for */

    /* Close everything up */
    ret = H5Dclose(dataset);
    CHECK(ret, FAIL, "H5Dclose");

    ret = H5Sclose(filespace);
    CHECK(ret, FAIL, "H5Sclose");

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");


    /* Iterate through the attributes on the dataset in various ways */
    file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
    CHECK(file, FAIL, "H5Fopen");

    dataset = H5Dopen2(file, "Dataset", H5P_DEFAULT);
    CHECK(dataset, FAIL, "H5Dopen2");

    /* Test invalid indices for starting iteration */
    info.command = RET_ZERO;

    /* Test skipping exactly as many attributes as there are */
    idx = NATTR;
    H5E_BEGIN_TRY {
        ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Aiterate2");

    /* Test skipping more attributes than there are */
    idx = NATTR + 1;
    H5E_BEGIN_TRY {
        ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Aiterate2");

    /* Test all attributes on dataset, when callback always returns 0 */
    info.command = RET_ZERO;
    idx = 0;
    if((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0)
        TestErrPrintf("Attribute iteration function didn't return zero correctly!\n");

    /* Test all attributes on dataset, when callback always returns 1 */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = RET_TWO;
    idx = i = 0;
    while((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 2, "H5Aiterate2");

        /* Increment the number of times "2" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (unsigned)i, "H5Aiterate2");

        /* Don't check name when new format is used */
        if(!new_format) {
            /* Verify that the correct name is retrieved */
            if(HDstrcmp(info.name, anames[(size_t)idx - 1]) != 0)
                TestErrPrintf("%u: Attribute iteration function didn't set names correctly, info.name = '%s', anames[%u] = '%s'!\n", __LINE__, info.name, (unsigned)(idx - 1), anames[(size_t)idx - 1]);
        } /* end if */
    } /* end while */
    VERIFY(ret, -1, "H5Aiterate2");
    if(i != 50 || idx != 50)
        TestErrPrintf("%u: Attribute iteration function didn't perform multiple iterations correctly!\n", __LINE__);


    /* Test all attributes on dataset, when callback changes return value */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = new_format ? RET_CHANGE2 : RET_CHANGE;
    idx = i = 0;
    while((ret = H5Aiterate2(dataset, H5_INDEX_NAME, H5_ITER_INC, &idx, aiter_cb, &info)) > 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 1, "H5Aiterate2");

        /* Increment the number of times "1" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (unsigned)i + 10, "H5Aiterate2");

        /* Don't check name when new format is used */
        if(!new_format) {
            /* Verify that the correct name is retrieved */
            if(HDstrcmp(info.name, anames[(size_t)idx - 1]) != 0)
                TestErrPrintf("%u: Attribute iteration function didn't set names correctly, info.name = '%s', anames[%u] = '%s'!\n", __LINE__, info.name, (unsigned)(idx - 1), anames[(size_t)idx - 1]);
        } /* end if */
    } /* end while */
    VERIFY(ret, -1, "H5Aiterate2");
    if(i != 40 || idx != 50)
        TestErrPrintf("%u: Attribute iteration function didn't perform multiple iterations correctly!\n", __LINE__);

    ret=H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    ret=H5Dclose(dataset);
    CHECK(ret, FAIL, "H5Dclose");

    /* Free the attribute names */
    for(i=0; i< NATTR; i++)
        HDfree(anames[i]);

} /* test_iter_attr() */
コード例 #27
0
ファイル: spectrum.cpp プロジェクト: biochem-fan/cheetah
void readSpectrumEnergyScale(cGlobal *global, char *filename) {
	
	char        groupname[1024];
	char        fieldname[1024];
	hid_t       file_id;
	hid_t       datagroup_id;
	hid_t       dataset_id;
	hid_t       dataspace_id;
	hid_t       datatype_id;
	H5T_class_t dataclass;
	size_t      size;
	
	int ndims;
	
	sprintf(groupname, "energySpectrum");
	sprintf(fieldname, "runIntegratedEnergyScale");
	// Check if an energy scale calibration file has been specified
	if ( strcmp(filename,"") == 0 ){
		printf("spectrum energy scale calibration file path was not specified\n");
		printf("spectra will be output with default (0) energy scale\n");
		return;
	}
	
	// Check whether file exists!
	FILE* fp = fopen(filename, "r");
	if (fp) 	// file exists
		fclose(fp);
	else {		// file doesn't exist
		printf("specified energy scale calibration file does not exist: %s\n",filename);
		printf("spectra will be output with default (0) energy scale\n");
		return;
	}
	
	printf("Reading energy spectrum scale calibration file:\n");
	printf("\t%s\n",filename);
	
	// Open the file
	file_id = H5Fopen(filename,H5F_ACC_RDONLY,H5P_DEFAULT);
	if(file_id < 0){
		printf("ERROR: Could not open file %s\n",filename);
		printf("spectra will be output with default (0) energy scale\n");
		return;
	}
	
	// Open the dataset
	datagroup_id = H5Gopen1(file_id, groupname);
	dataset_id = H5Dopen1(datagroup_id, fieldname);
	dataspace_id = H5Dget_space(dataset_id);
	
	// Test if correct dimensions / size
	ndims = H5Sget_simple_extent_ndims(dataspace_id);
	if(ndims != 1) {
		printf("the specified file does not have the correct dimensions for energy scale calibration, ndims=%i\n",ndims);
		printf("spectra will be output with default (0) energy scale\n");
		return;
	}
	hsize_t dims[ndims];
	H5Sget_simple_extent_dims(dataspace_id,dims,NULL);
	if (dims[0]!=1 || dims[1]!=(hsize_t)global->espectrumLength) {
		printf("the specified file does not have the correct dimensions for energy scale calibration\n");
		printf("spectra will be output with default (0) energy scale\n");
		return;
	}
	
	datatype_id =  H5Dget_type(dataset_id);
	dataclass = H5Tget_class(datatype_id);
	size = H5Tget_size(datatype_id);
		
	double*     energyscale = (double *) calloc(global->espectrumLength, sizeof(double));
	H5Dread(dataset_id, datatype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, energyscale);
	for(int i=0; i<global->espectrumLength; i++) {
		global->espectrumScale[i] = energyscale[i];
	}
	free(energyscale);
	
	// Close and cleanup
	H5Dclose(dataset_id);
	H5Gclose(datagroup_id);
	
	// Cleanup stale IDs
	hid_t ids[256];
	int n_ids = H5Fget_obj_ids(file_id, H5F_OBJ_ALL, 256, ids);
	for (long i=0; i<n_ids; i++ ) {
		
		hid_t id;
		H5I_type_t type;
		id = ids[i];
		type = H5Iget_type(id);
		if ( type == H5I_GROUP )
			H5Gclose(id);
		if ( type == H5I_DATASET )
			H5Dclose(id);
		if ( type == H5I_DATASPACE )
			H5Sclose(id);
		//if ( type == H5I_DATATYPE )
		//	H5Dclose(id);
	}
	
	H5Fclose(file_id);
	printf("energy spectrum scale calibration file read successful:\n");
	return;
}
コード例 #28
0
ファイル: titerate.c プロジェクト: adasworks/hdf5
/****************************************************************
**
**  test_iter_group_large(): Test group iteration functionality
**          for groups with large #'s of objects
**
****************************************************************/
static void
test_iter_group_large(hid_t fapl)
{
    hid_t		file;		/* HDF5 File IDs		*/
    hid_t		dataset;	/* Dataset ID			*/
    hid_t		group;      /* Group ID             */
    hid_t		sid;       /* Dataspace ID			*/
    hid_t		tid;       /* Datatype ID			*/
    hsize_t		dims[] = {SPACE1_DIM1};
    herr_t		ret;		/* Generic return value		*/
    char gname[20];         /* Temporary group name */
    iter_info names[ITER_NGROUPS+2]; /* Names of objects in the root group */
    iter_info *curr_name;        /* Pointer to the current name in the root group */
    int                 i;

    /* Compound datatype */
    typedef struct s1_t {
        unsigned int a;
        unsigned int b;
        float c;
    } s1_t;

    HDmemset(names, 0, sizeof names);

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Large Group Iteration Functionality\n"));

    /* Create file */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    /* Create dataspace for datasets */
    sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
    CHECK(sid, FAIL, "H5Screate_simple");

    /* Create a bunch of groups */
    for(i = 0; i < ITER_NGROUPS; i++) {
        sprintf(gname, "Group_%d", i);

        /* Add the name to the list of objects in the root group */
        HDstrcpy(names[i].name, gname);
        names[i].type = H5O_TYPE_GROUP;

        /* Create a group */
        group = H5Gcreate2(file, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(group, FAIL, "H5Gcreate2");

        /* Close a group */
        ret = H5Gclose(group);
        CHECK(ret, FAIL, "H5Gclose");
    } /* end for */

    /* Create a dataset  */
    dataset = H5Dcreate2(file, "Dataset1", H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(dataset, FAIL, "H5Dcreate2");

    /* Add the name to the list of objects in the root group */
    HDstrcpy(names[ITER_NGROUPS].name, "Dataset1");
    names[ITER_NGROUPS].type = H5O_TYPE_DATASET;

    /* Close Dataset */
    ret = H5Dclose(dataset);
    CHECK(ret, FAIL, "H5Dclose");

    /* Close Dataspace */
    ret = H5Sclose(sid);
    CHECK(ret, FAIL, "H5Sclose");

    /* Create a datatype */
    tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
    CHECK(tid, FAIL, "H5Tcreate");

    /* Insert fields */
    ret = H5Tinsert(tid, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
    CHECK(ret, FAIL, "H5Tinsert");

    ret = H5Tinsert(tid, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
    CHECK(ret, FAIL, "H5Tinsert");

    ret = H5Tinsert(tid, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
    CHECK(ret, FAIL, "H5Tinsert");

    /* Save datatype for later */
    ret = H5Tcommit2(file, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Tcommit2");

    /* Add the name to the list of objects in the root group */
    HDstrcpy(names[ITER_NGROUPS + 1].name, "Datatype1");
    names[ITER_NGROUPS + 1].type = H5O_TYPE_NAMED_DATATYPE;

    /* Close datatype */
    ret = H5Tclose(tid);
    CHECK(ret, FAIL, "H5Tclose");

    /* Need to sort the names in the root group, cause that's what the library does */
    HDqsort(names, (size_t)(ITER_NGROUPS + 2), sizeof(iter_info), iter_strcmp2);

    /* Iterate through the file to see members of the root group */
    curr_name = &names[0];
    ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, liter_cb2, curr_name);
    CHECK(ret, FAIL, "H5Literate");
    for(i = 1; i < 100; i++) {
        hsize_t idx = i;

        curr_name = &names[i];
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb2, curr_name);
        CHECK(ret, FAIL, "H5Literate");
    } /* end for */

    /* Close file */
    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");
} /* test_iterate_group_large() */
コード例 #29
0
ファイル: test_lite.c プロジェクト: svn2github/hdf5
static int test_dsets( void )
{
    int     rank     = 2;
    hsize_t dims[2]  = {2,3};
    hid_t   file_id;
    hid_t   dataset_id;
    char    data_char_in[DIM]    = {1,2,3,4,5,6};
    char    data_char_out[DIM];
    short   data_short_in[DIM]   = {1,2,3,4,5,6};
    short   data_short_out[DIM];
    int     data_int_in[DIM]     = {1,2,3,4,5,6};
    int     data_int_out[DIM];
    long    data_long_in[DIM]    = {1,2,3,4,5,6};
    long    data_long_out[DIM];
    float   data_float_in[DIM]   = {1,2,3,4,5,6};
    float   data_float_out[DIM];
    double  data_double_in[DIM]  = {1,2,3,4,5,6};
    double  data_double_out[DIM];
    const char    *data_string_in = "This is a string";
    char    data_string_out[20];
    int     i;


    /* Create a new file using default properties. */
    file_id = H5Fcreate( FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

    /*-------------------------------------------------------------------------
    * H5LTmake_dataset test
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset");

    /* Make dataset */
    if ( H5LTmake_dataset( file_id, DSET0_NAME, rank, dims, H5T_NATIVE_INT, data_int_in ) < 0 )
        goto out;

    /* Read dataset using the basic HDF5 API */

    if ( ( dataset_id = H5Dopen2(file_id, DSET0_NAME, H5P_DEFAULT) ) < 0 )
        goto out;

    if ( H5Dread ( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_int_out ) < 0 )
        goto out;

    if ( H5Dclose( dataset_id ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * read using the LT function H5LTread_dataset
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTread_dataset");

    if ( H5LTread_dataset( file_id, DSET0_NAME, H5T_NATIVE_INT, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * test the H5LTmake_dataset_ functions
    *-------------------------------------------------------------------------
    */


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_char
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_char");

    /* Make dataset char */
    if ( H5LTmake_dataset_char( file_id, DSET1_NAME, rank, dims, data_char_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET1_NAME, H5T_NATIVE_CHAR, data_char_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_char_in[i] != data_char_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_char( file_id, DSET1_NAME, data_char_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_char_in[i] != data_char_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_short
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_short");

    /* Make dataset short */
    if ( H5LTmake_dataset_short( file_id, DSET2_NAME, rank, dims, data_short_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET2_NAME, H5T_NATIVE_SHORT, data_short_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_short_in[i] != data_short_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_short( file_id, DSET2_NAME, data_short_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_short_in[i] != data_short_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_int
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_int");

    /* Make dataset int */
    if ( H5LTmake_dataset_int( file_id, DSET3_NAME, rank, dims, data_int_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET3_NAME, H5T_NATIVE_INT, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_int( file_id, DSET3_NAME, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_long
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_long");

    /* Make dataset long */
    if ( H5LTmake_dataset_long( file_id, DSET4_NAME, rank, dims, data_long_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET4_NAME, H5T_NATIVE_LONG, data_long_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_long_in[i] != data_long_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_long( file_id, DSET4_NAME, data_long_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_long_in[i] != data_long_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_float
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_float");

    /* Make dataset float */
    if ( H5LTmake_dataset_float( file_id, DSET5_NAME, rank, dims, data_float_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET5_NAME, H5T_NATIVE_FLOAT, data_float_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_float_in[i] != data_float_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_float( file_id, DSET5_NAME, data_float_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_float_in[i] != data_float_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_double
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_double");

    /* Make dataset double */
    if ( H5LTmake_dataset_double( file_id, DSET6_NAME, rank, dims, data_double_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET6_NAME, H5T_NATIVE_DOUBLE, data_double_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_double_in[i] != data_double_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_double( file_id, DSET6_NAME, data_double_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_double_in[i] != data_double_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_string
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_string");

    /* Make dataset string */
    if ( H5LTmake_dataset_string(file_id,DSET7_NAME,data_string_in) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset_string(file_id,DSET7_NAME,data_string_out) < 0 )
        goto out;

    if ( strcmp(data_string_in,data_string_out) != 0 )
        goto out;



    /*-------------------------------------------------------------------------
    * end tests
    *-------------------------------------------------------------------------
    */

    /* Close the file. */
    H5Fclose( file_id );

    PASSED();


    return 0;

out:
    /* Close the file. */
    H5_FAILED();
    return -1;
}
コード例 #30
0
// -----------------------------------------------------------------------------
//
// -----------------------------------------------------------------------------
void FilterParametersRWTest()
{
  // Create our Pipeline object
  FilterPipeline::Pointer pipeline = FilterPipeline::New();


  GenericFilter::Pointer filt = GenericFilter::New();

  // Set something for each and every property so you have something to compare against.
  // You may want to make some constants for these values
  filt->setStlFilePrefix(StlFilePrefixTestValue);
  filt->setMaxIterations(MaxIterationsTestValue);
  filt->setMisorientationTolerance(MisorientationToleranceTestValue);
  filt->setInputFile(InputFileTestValue);
  filt->setInputPath(InputPathTestValue);
  filt->setOutputFile(OutputFileTestValue);
  filt->setOutputPath(OutputPathTestValue);
  filt->setWriteAlignmentShifts(WriteAlignmentShiftsTestValue);
  filt->setConversionType(ConversionTypeTestValue);
  filt->setSelectedCellArrayName(SelectedCellArrayNameTestValue);
  filt->setSelectedFieldArrayName(SelectedFieldArrayNameTestValue);
  filt->setSelectedEnsembleArrayName(SelectedEnsembleArrayNameTestValue);
  filt->setSurfaceMeshPointArrayName(SurfaceMeshPointArrayNameTestValue);
  filt->setSurfaceMeshFaceArrayName(SurfaceMeshFaceArrayNameTestValue);
  filt->setSurfaceMeshEdgeArrayName(SurfaceMeshEdgeArrayNameTestValue);
  filt->setSolidMeshPointArrayName(SolidMeshPointArrayNameTestValue);
  filt->setSolidMeshFaceArrayName(SolidMeshFaceArrayNameTestValue);
  filt->setSolidMeshEdgeArrayName(SolidMeshEdgeArrayNameTestValue);

  ComparisonInput_t comparison1;
  comparison1.arrayName = Comparison1InputArrayNameTestValue;
  comparison1.compOperator = Comparison1CompOperatorTestValue;
  comparison1.compValue = Comparison1CompValueTestValue;

  ComparisonInput_t comparison2;
  comparison2.arrayName = Comparison2InputArrayNameTestValue;
  comparison2.compOperator = Comparison2CompOperatorTestValue;
  comparison2.compValue = Comparison2CompValueTestValue;

  std::vector<ComparisonInput_t> comparisonVector;
  comparisonVector.push_back(comparison1);
  comparisonVector.push_back(comparison2);
  filt->setCellComparisonInputs(comparisonVector);

  AxisAngleInput_t axisAngles1;
  axisAngles1.angle = AxisAngles1AngleTestValue;
  axisAngles1.h = AxisAngles1HTestValue;
  axisAngles1.k = AxisAngles1KTestValue;
  axisAngles1.l = AxisAngles1LTestValue;

  AxisAngleInput_t axisAngles2;
  axisAngles2.angle = AxisAngles2AngleTestValue;
  axisAngles2.h = AxisAngles2HTestValue;
  axisAngles2.k = AxisAngles2KTestValue;
  axisAngles2.l = AxisAngles2LTestValue;

  std::vector<AxisAngleInput_t> axisAngleInputsVector;
  axisAngleInputsVector.push_back(axisAngles1);
  axisAngleInputsVector.push_back(axisAngles2);
  filt->setAxisAngleRotations(axisAngleInputsVector);

  DataContainerWriter::Pointer writer = DataContainerWriter::New();
  writer->setOutputFile(UnitTest::FilterParametersRWTest::OutputFile);

  pipeline->pushBack(filt);
  pipeline->pushBack(writer);

  pipeline->execute();
  int err = pipeline->getErrorCondition();
  DREAM3D_REQUIRED(err, >= , 0)


  // We are done writing a file, now we need to read the file using raw HDF5 codes

  hid_t fid = H5Utilities::openFile(UnitTest::FilterParametersRWTest::OutputFile);
  DREAM3D_REQUIRED(fid, >, 0)

  H5FilterParametersReader::Pointer reader = H5FilterParametersReader::New();

  hid_t pipelineGroupId = H5Gopen(fid, DREAM3D::HDF5::PipelineGroupName.c_str(), H5P_DEFAULT);
  reader->setGroupId(pipelineGroupId);

  err = reader->openFilterGroup( filt.get(), 0); // Open the HDF5 Group for this filter
  DREAM3D_REQUIRED(err, >=, 0)

  // This next line should read all the filter parameters into the filter.
  filt->readFilterParameters( reader.get(), 0);

  // Now one by one, compare each of the filter parameters that you have stored in some constant somewhere to the values that are now in the filt variable.
  // Use DREAM3D_REQUIRED() to make sure each one is what you think it is.
  DREAM3D_REQUIRED(StlFilePrefixTestValue, ==, filt->getStlFilePrefix() )
  DREAM3D_REQUIRED(MaxIterationsTestValue, ==, filt->getMaxIterations() )
  DREAM3D_REQUIRED(MisorientationToleranceTestValue, ==, filt->getMisorientationTolerance() )
  DREAM3D_REQUIRED(InputFileTestValue, ==, filt->getInputFile() )
  DREAM3D_REQUIRED(InputPathTestValue, ==, filt->getInputPath() )
  DREAM3D_REQUIRED(OutputFileTestValue, ==, filt->getOutputFile() )
  DREAM3D_REQUIRED(OutputPathTestValue, ==, filt->getOutputPath() )
  DREAM3D_REQUIRED(WriteAlignmentShiftsTestValue, ==, filt->getWriteAlignmentShifts() )
  DREAM3D_REQUIRED(ConversionTypeTestValue, ==, filt->getConversionType() )
  DREAM3D_REQUIRED(SelectedCellArrayNameTestValue, ==, filt->getSelectedCellArrayName() )
  DREAM3D_REQUIRED(SelectedFieldArrayNameTestValue, ==, filt->getSelectedFieldArrayName() )
  DREAM3D_REQUIRED(SelectedEnsembleArrayNameTestValue, ==, filt->getSelectedEnsembleArrayName() )
  DREAM3D_REQUIRED(SurfaceMeshPointArrayNameTestValue, ==, filt->getSurfaceMeshPointArrayName() )
  DREAM3D_REQUIRED(SurfaceMeshFaceArrayNameTestValue, ==, filt->getSurfaceMeshFaceArrayName() )
  DREAM3D_REQUIRED(SurfaceMeshEdgeArrayNameTestValue, ==, filt->getSurfaceMeshEdgeArrayName() )
  DREAM3D_REQUIRED(SolidMeshPointArrayNameTestValue, ==, filt->getSolidMeshPointArrayName() )
  DREAM3D_REQUIRED(SolidMeshFaceArrayNameTestValue, ==, filt->getSolidMeshFaceArrayName() )
  DREAM3D_REQUIRED(SolidMeshEdgeArrayNameTestValue, ==, filt->getSolidMeshEdgeArrayName() )

  // Test the CellComparisonInputs widget
  std::vector<ComparisonInput_t> comparisonVectorRead = filt->getCellComparisonInputs();
  ComparisonInput_t comparison1Read = comparisonVectorRead[0];
  ComparisonInput_t comparison2Read = comparisonVectorRead[1];

  DREAM3D_REQUIRED(comparison1.arrayName, ==, comparison1Read.arrayName)
  DREAM3D_REQUIRED(comparison1.compOperator, ==, comparison1Read.compOperator)
  DREAM3D_REQUIRED(comparison1.compValue, ==, comparison1Read.compValue)
  DREAM3D_REQUIRED(comparison2.arrayName, ==, comparison2Read.arrayName)
  DREAM3D_REQUIRED(comparison2.compOperator, ==, comparison2Read.compOperator)
  DREAM3D_REQUIRED(comparison2.compValue, ==, comparison2Read.compValue)

  // Test the AxisAngleInput widget
  std::vector<AxisAngleInput_t> axisAngleVectorRead = filt->getAxisAngleRotations();
  AxisAngleInput_t axisAngles1Read = axisAngleVectorRead[0];
  AxisAngleInput_t axisAngles2Read = axisAngleVectorRead[1];

  DREAM3D_REQUIRED(axisAngles1.angle, ==, axisAngles1Read.angle)
  DREAM3D_REQUIRED(axisAngles1.h, ==, axisAngles1Read.h)
  DREAM3D_REQUIRED(axisAngles1.k, ==, axisAngles1Read.k)
  DREAM3D_REQUIRED(axisAngles1.l, ==, axisAngles1Read.l)
  DREAM3D_REQUIRED(axisAngles2.angle, ==, axisAngles2Read.angle)
  DREAM3D_REQUIRED(axisAngles2.h, ==, axisAngles2Read.h)
  DREAM3D_REQUIRED(axisAngles2.k, ==, axisAngles2Read.k)
  DREAM3D_REQUIRED(axisAngles2.l, ==, axisAngles2Read.l)

  err = reader->closeFilterGroup(); // Close the HDF5 group for this filter
  DREAM3D_REQUIRED(err, >=, 0)


  H5Gclose(pipelineGroupId); // Closes the "Pipeline" group
  H5Fclose(fid); // Closes the file
}