Esempio n. 1
0
int_f
nh5tbget_table_info_c(hid_t_f *loc_id,
                        int_f *namelen,
                        _fcd name,
                        hsize_t_f *nfields,
                        hsize_t_f *nrecords)
{
 int     ret_value = -1;
 herr_t  ret;
 char    *c_name;
 int     c_namelen;
 hid_t   c_loc_id = *loc_id;
 hsize_t c_nfields;
	hsize_t c_nrecords;

/*
 * Convert FORTRAN name to C name
 */
 c_namelen = *namelen;
 c_name = (char *)HD5f2cstring(name, c_namelen);
 if (c_name == NULL) return ret_value;

/*
 * Call H5TBread_fields_index function.
 */

 ret = H5TBget_table_info(c_loc_id,c_name,&c_nfields,&c_nrecords);

	*nfields = (hsize_t_f) c_nfields;;
	*nrecords = (hsize_t_f) c_nrecords;

 if (ret < 0) return ret_value;
 ret_value = 0;
 return ret_value;
}
Esempio n. 2
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
unsigned int SCIA_RD_H5_LADS( struct param_record param, 
			      struct lads_scia *lads )
     /*@globals lads_size, lads_offs@*/
{
     hid_t   ads_id;
     hsize_t nfields, num_lads;

     const size_t lads_sizes[NFIELDS] = {
          sizeof( lads->mjd ),
          sizeof( lads->flag_mds ),
          sizeof( lads->corner )
     };
/*
 * create/open group /ADS/LADS
 */
     ads_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/ADS" );
     if ( ads_id < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_GRP, "/ADS" );
/*
 * read info_h5 records
 */
     (void) H5TBget_table_info( ads_id, TBL_NAME, &nfields, &num_lads );
     (void) H5TBread_table( ads_id, TBL_NAME, lads_size, lads_offs, 
                            lads_sizes, lads );
     (void) H5Gclose( ads_id );

     return (unsigned int) num_lads;
 done:
     return 0u;
}
Esempio n. 3
0
/*+++++++++++++++++++++++++
.IDENTifer   SDMF_rd_simudarkTable
.PURPOSE     read metaTable records from SDMF simultaneous dark signal parameter database
.INPUT/OUTPUT
  call as    SDMF_rd_simudarkTable( locID, &numIndx, metaIndx, &mtbl );
     input:
           hid_t locID            :  HDF5 identifier of file or group
           int   *metaIndx        :  array with indices to requested records
 in/output:
           int   *numIndx         :  input: dimension metaIndx (or zero)
                                     output: number records read
    output:
	   struct mtbl_simudark_rec **mtbl :  State meta-data records to read

.RETURNS     nothing, error status passed by global variable ``nadc_stat''
.COMMENTS    none
-------------------------*/
void SDMF_rd_simudarkTable( hid_t locID, int *numIndx, int *metaIndx,
			  struct mtbl_simudark_rec **mtbl_out )
{
     hsize_t nfields, nrecords;
     herr_t  stat;

     struct mtbl_simudark_rec *mtbl;
/*
 * initialize return values
 */
     *mtbl_out = NULL;
/*
 * does the table already exist?
 */
     H5E_BEGIN_TRY {
          hid_t dataID = H5Dopen( locID, tableName, H5P_DEFAULT );
	  if ( dataID < 0 ) return;
	  (void) H5Dclose( dataID );
     } H5E_END_TRY;
/*
 * obtain table info
 */
     stat = H5TBget_table_info(locID, tableName, &nfields, &nrecords );
     if ( *numIndx == 0 ) *numIndx = (int) nrecords;
     if ( stat < 0 || nrecords == 0 ) return;

/*
 * allocate memory to store metaTable records
 */
     mtbl = (struct mtbl_simudark_rec *) 
	  malloc( (size_t) (*numIndx) * mtbl_size );
     if ( mtbl == NULL )
	  NADC_RETURN_ERROR( NADC_ERR_ALLOC, "mtbl" );
/*
 * read table
 */
     if ( (*numIndx) == (int) nrecords ) {
	  stat = H5TBread_table( locID, tableName, mtbl_size, mtbl_offs, 
				 mtbl_simudark_sizes, mtbl );
	  if ( stat < 0 ) {
	       free( mtbl );
	       NADC_RETURN_ERROR( NADC_ERR_HDF_RD, tableName );
	  } 
     } else {
	  register int nm;

	  for ( nm = 0; nm < (*numIndx); nm++ ) {
	       stat = H5TBread_records( locID, tableName, metaIndx[nm], 1,
					mtbl_size, mtbl_offs, 
					mtbl_simudark_sizes, mtbl+nm );
	       if ( stat < 0 ) {
		    free( mtbl );
		    NADC_RETURN_ERROR( NADC_ERR_HDF_RD, tableName );
	       } 
	  }
     }

     *mtbl_out = mtbl;
}
Esempio n. 4
0
int main( void )
{
 typedef struct Particle1 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle1;
 
/* Define an array of Particles */
 Particle1  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };
 
 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size1 =  sizeof( Particle1 );
 size_t dst_offset1[NFIELDS] = { HOFFSET( Particle1, name ),
  HOFFSET( Particle1, lati ),
  HOFFSET( Particle1, longi ),
  HOFFSET( Particle1, pressure ),
  HOFFSET( Particle1, temperature )};
 
 /* Define field information */
 const char *field_names[NFIELDS]  = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 Particle1  fill_data[1] = { "no data",-1,-1, -99.0f, -99.0 };
 int        fill_data_new[1] = { -100 };
 hsize_t    position;
 herr_t     status; 
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 
 /* Define the inserted field information */
 hid_t      field_type_new = H5T_NATIVE_INT;
 int        data[NRECORDS] = { 0,1,2,3,4,5,6,7 };
 
 /* Initialize the field type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
 
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_11.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
 
 /* Make the table */
 status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS, 
                         dst_size1,field_names, dst_offset1, field_type, 
                         chunk_size, fill_data, compress, p_data  );
 
 /* Insert the new field at the end of the field list */
 position = NFIELDS;
 status=H5TBinsert_field( file_id, TABLE_NAME, "New Field", field_type_new, position, 
  fill_data_new, data );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
 
 /* Close the file. */
 H5Fclose( file_id );
 
 return 0;
 
 
}
Esempio n. 5
0
int main( void )
{
 typedef struct Particle 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle;

 Particle  dst_buf[ NRECORDS + NRECORDS_INS ];

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};
 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};

 /* Define an array of Particles */
 Particle  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 /* Define field information */
 const char *field_names[NFIELDS]  = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 Particle   fill_data[1] = 
 { {"no data",-1,-1, -99.0f, -99.0} };   /* Fill value particle */ 
 hsize_t    start1;                      /* Record to start reading from 1st table */
 hsize_t    nrecords;                    /* Number of records to insert */
 hsize_t    start2;                      /* Record to start writing in 2nd table */
 herr_t     status;
 int        i;
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 
 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
 
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
  
 /* Make 2 tables: TABLE2_NAME is empty  */
 status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS, 
                         dst_size,field_names, dst_offset, field_type, 
                         chunk_size, fill_data, compress, p_data  );
 
 status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS, 
                         dst_size,field_names, dst_offset, field_type, 
                         chunk_size, fill_data, compress, NULL  );
 
 
 /* Add 2 records from TABLE1_NAME to TABLE2_NAME  */
 start1    = 3;      
 nrecords  = NRECORDS_INS; 
 start2    = 6;      
 status=H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 );

 /* read TABLE2_NAME: it should have 2 more records now */
 status=H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
  
 /* print it by rows */
 for (i=0; i<nrecords_out; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f", 
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }
 
 /* Close the file. */
 H5Fclose( file_id );
 
 return 0;
}
IAS_RLUT_LINEARIZATION_PARAMS *ias_rlut_read_linearization_params
(
    const IAS_RLUT_IO *rlut,         /* I: Open RLUT file */
    int band_number,                 /* I: Current RLUT band number */
    int sca_number,                  /* I: Current SCA number */
    int num_detectors                /* I: Number of detectors in the
                                        current band/SCA */
)
{
    IAS_RLUT_LINEARIZATION_PARAMS *linearization_params = NULL;
                                               /* Pointer to an array of
                                                  data structures containing
                                                  the linearization
                                                  parameters for all detectors
                                                  in the current band/SCA */
    char bandsca_parameter_name[IAS_RLUT_BANDSCA_GROUP_NAME_LENGTH + 1];
                                               /* Linearization parameter
                                                  group name for the current
                                                  band/SCA */
    const char *field_names[IAS_RLUT_PARAM_NFIELDS];
                                               /* Name of each linearization
                                                  parameter */
    size_t offsets[IAS_RLUT_PARAM_NFIELDS];    /* Data offsets in
                                                  LINEARIZATION_PARAMS
                                                  data structure for each
                                                  field*/
    size_t field_sizes[IAS_RLUT_PARAM_NFIELDS];/* Size of each field */
    hid_t field_types[IAS_RLUT_PARAM_NFIELDS]; /* Data type for each field */
    hid_t fields_to_close[IAS_RLUT_PARAM_NFIELDS];
                                               /* Flags indicating open
                                                  fields needing to be
                                                  closed */
    hid_t linearization_param_group_id;        /* Root
                                                  LINEARIZATION_PARAMETERS
                                                  group */
    hid_t bandsca_group_id;                    /* SCA group handle */
    hsize_t nfields = 0;                       /* Number of fields in the
                                                  table description */
    hsize_t nrecords = 0;                      /* Number of records in the
                                                  table description (should
                                                  equal the number of
                                                  detectors) */
    hsize_t type_size;                         /* Size of entire data
                                                  structure to be read into */
    herr_t hdf_status;                         /* HDF5 error status flag */
    int status;                                /* IAS status */


    /* Make sure the RLUT file is actually open */
    if ((rlut == NULL) || (rlut->file_id < 0))
    {
        IAS_LOG_ERROR("NULL pointer to IAS_RLUT_IO data block, or no RLUT "
            "file has been opened");
        return NULL; 
    }

    /* Construct the group name for the current band/SCA */
    status = snprintf(bandsca_parameter_name, sizeof(bandsca_parameter_name),
        "%s/Band%02d_SCA%02d", LINEARIZATION_PARAMS_GROUP_NAME, band_number,
        sca_number);
    if ((status < 0) || (status >= sizeof(bandsca_parameter_name)))
    {
        IAS_LOG_ERROR("Creating group name for band %d SCA %d "
            "linearization parameters", band_number, sca_number);
        return NULL;
    }

    /* Open the root group*/
    linearization_param_group_id = H5Gopen(rlut->file_id,
        LINEARIZATION_PARAMS_GROUP_NAME, H5P_DEFAULT);
    if (linearization_param_group_id < 0)
    {
        IAS_LOG_ERROR("Opening root linearization parameters group");
        return NULL;
    }

    /* Try to open the group for the current band/SCA */
    bandsca_group_id = H5Gopen(linearization_param_group_id,
        bandsca_parameter_name, H5P_DEFAULT);
    if (bandsca_group_id < 0)
    {
        IAS_LOG_ERROR("Opening band %d SCA %d linearization parameter group",
            band_number, sca_number);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }

    /* Build the table definition */
    status = ias_rlut_build_linearization_params_table_description(offsets,
        field_names, field_types, fields_to_close, field_sizes);
    if (status != SUCCESS)
    {
        IAS_LOG_ERROR("Building linearization parameter table description");
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }

    /* Get the number of fields and records */
    hdf_status = H5TBget_table_info(bandsca_group_id,
        LINEARIZATION_PARAMS_DATASET_NAME, &nfields, &nrecords);
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Getting parameter table information for band %d SCA "
            "%d", band_number, sca_number);
        ias_rlut_cleanup_table_description(fields_to_close,
            IAS_RLUT_PARAM_NFIELDS);
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }
    else if (nfields != IAS_RLUT_PARAM_NFIELDS)
    {
        IAS_LOG_ERROR("Number of defined fields %d not equal to number of "
            "returned fields %d", IAS_RLUT_PARAM_NFIELDS, (int)nfields);
        ias_rlut_cleanup_table_description(fields_to_close,
            IAS_RLUT_PARAM_NFIELDS);
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }
    else if (nrecords != num_detectors)
    {
        IAS_LOG_ERROR("Band %d SCA %d parameter table should have %d "
            "records, found %d records instead", band_number, sca_number,
            num_detectors, (int)nrecords);
        ias_rlut_cleanup_table_description(fields_to_close,
            IAS_RLUT_PARAM_NFIELDS);
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }

    /* Allocate the parameter data buffer for the current band/SCA */
    linearization_params = malloc(
        num_detectors * sizeof(IAS_RLUT_LINEARIZATION_PARAMS));
    if (linearization_params == NULL)
    {
        IAS_LOG_ERROR("Allocating linearization parameter data buffer for "
            "band %d SCA %d", band_number, sca_number);
        ias_rlut_cleanup_table_description(fields_to_close,
            IAS_RLUT_PARAM_NFIELDS);
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return NULL;
    }

    /* Read the parameter set for the current band/SCA */
    type_size = sizeof(*linearization_params);
    hdf_status = H5TBread_table(bandsca_group_id,
        LINEARIZATION_PARAMS_DATASET_NAME, type_size, offsets,
        field_sizes, linearization_params);

    /* Cleanup the table description */
    ias_rlut_cleanup_table_description(fields_to_close,
        IAS_RLUT_PARAM_NFIELDS);

    /* Check the return status from the read */
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Reading parameters for band %d SCA %d", band_number,
            sca_number);
        free(linearization_params);
        linearization_params = NULL;
    }

    /* Close the group for the current band/SCA */
    hdf_status = H5Gclose(bandsca_group_id);
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Closing band %d SCA %d linearization parameter group",
            band_number, sca_number);
        if (linearization_params)
        {
            free(linearization_params);
            linearization_params = NULL;
        }
    }

    /* Close the main linearization parameter group */
    hdf_status = H5Gclose(linearization_param_group_id);
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Closing root linearization parameters group");
        if (linearization_params)
        {
            free(linearization_params);
            linearization_params = NULL;
        }
    }

    return linearization_params;
}   /* END ias_rlut_read_linearization_params */
Esempio n. 7
0
void bright::Reactor1G::loadlib(std::string lib)
{
    // Loads Apporiate Libraries for Reactor and makes them into Burnup Parameters [F, pi(F), di(F), BUi(F), Tij(F)].

    // HDF5 types
    hid_t  rlib;
    herr_t rstat;

    rlib = H5Fopen (lib.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);		//Recator Library

    // Initializes Burnup Parameters...
    hsize_t dimFromIso[1];
    hsize_t dimToIso[1];

    rstat = H5LTget_dataset_info(rlib, "/FromIso_zz", dimFromIso, NULL, NULL);
    rstat = H5LTget_dataset_info(rlib, "/ToIso_zz",   dimToIso,   NULL, NULL);

#ifdef _WIN32
    int * FromIso;
    int * ToIso;

    FromIso = new int [dimFromIso[0]];
    ToIso   = new int [dimToIso[0]];
#else
    int FromIso [dimFromIso[0]];
    int ToIso   [dimToIso[0]];
#endif

    rstat = H5LTread_dataset_int(rlib, "/FromIso_zz", FromIso);
    rstat = H5LTread_dataset_int(rlib, "/ToIso_zz",   ToIso);

    I.clear();
    I.insert(&FromIso[0], &FromIso[dimFromIso[0]]);
    J.clear();
    J.insert(&ToIso[0],   &ToIso[dimToIso[0]]);

    // Get Fluence Vector
    hsize_t dimsF[1]; // Read in number of data points
    rstat = H5LTget_dataset_info(rlib, "/Fluence", dimsF, NULL, NULL);
    int lenF = dimsF[0];

    // Make temp array
#ifdef _WIN32
    float * tempF;
    tempF = new float [lenF];
#else
    float tempF [lenF];
#endif

    rstat = H5LTread_dataset_float(rlib, "/Fluence", tempF);
    F.assign(&tempF[0], &tempF[lenF]);  // Fluence in [n/kb]

    for (nuc_iter i = I.begin(); i != I.end(); i++ )
    {
        std::string iso = pyne::nucname::name(*i);

        // Build BUi_F_
#ifdef _WIN32
        float * tempBUi;
        tempBUi = new float [lenF];
#else
        float tempBUi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Burnup/" + iso).c_str(), tempBUi);
        BUi_F_[*i].assign(&tempBUi[0], &tempBUi[lenF]);

        // Build pi_F_
#ifdef _WIN32
        float * temppi;
        temppi = new float [lenF];
#else
        float temppi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Production/" + iso).c_str(), temppi);
        pi_F_[*i].assign(&temppi[0], &temppi[lenF]);
        pi_F_[*i][0] = pyne::solve_line(0.0, F[2], pi_F_[*i][2], F[1], pi_F_[*i][1]);

        // Build di_F_
#ifdef _WIN32
        float * tempdi;
        tempdi = new float [lenF];
#else
        float tempdi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Destruction/" + iso).c_str(), tempdi);
        di_F_[*i].assign(&tempdi[0], &tempdi[lenF]);
        di_F_[*i][0] = pyne::solve_line(0.0, F[2], di_F_[*i][2], F[1], di_F_[*i][1]);

        // Build Tij_F_
        for (int jn = 0; jn < dimToIso[0] ; jn++)
        {
            int j = ToIso[jn];
            std::string jso = pyne::nucname::name(j);

#ifdef _WIN32
            float * tempTij;
            tempTij = new float [lenF];
#else
            float tempTij [lenF];
#endif
            rstat = H5LTread_dataset_float(rlib, ("/Transmutation/" + iso + "/" + jso).c_str(), tempTij);
            Tij_F_[*i][j].assign(&tempTij[0], &tempTij[lenF]);
        };
    };
    rstat = H5Fclose(rlib);

    // Now get microscopic XS data from KAERI...
    // ...But only if the disadvantage factor is used.
    if (!use_zeta)
        return;

    // HDF5 types
    hid_t  kdblib;			// KaeriData.h5 file reference
    herr_t kdbstat;			// File status

    hsize_t xs_nfields, xs_nrows; // Number of rows and fields (named columns) in XS table

    // open the file
    kdblib = H5Fopen ( (bright::BRIGHT_DATA + "/KaeriData.h5").c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);	// KAERI Data Library

    // Get Thermal Mawell Average Table & Field Data Dimensions
    kdbstat = H5TBget_table_info(kdblib, "/XS/ThermalMaxwellAve", &xs_nfields, &xs_nrows);

    // Creating an empy array of character strings is tricky,
    // because character strings are arrays themselves!
    char ** xs_field_names = new char * [xs_nfields];
    for (int n = 0; n < xs_nfields; n++)
        xs_field_names[n] = new char [50];

#ifdef _WIN32
    size_t * xs_field_sizes;
    size_t * xs_field_offsets;

    xs_field_sizes   = new size_t [xs_nfields];
    xs_field_offsets = new size_t [xs_nfields];
#else
    size_t xs_field_sizes   [xs_nfields];
    size_t xs_field_offsets [xs_nfields];
#endif

    size_t xs_type_size;
    kdbstat = H5TBget_field_info(kdblib, "/XS/ThermalMaxwellAve", xs_field_names, xs_field_sizes, xs_field_offsets, &xs_type_size);

    // Read the "isozz" column so that we can inteligently pick out our data
    int isozz_n = bright::find_index_char( (char *) "isozz", xs_field_names, xs_nfields);
    int * isozz = new int [xs_nrows];
    const size_t temp_xs_field_sizes_isozz_n [1] = {xs_field_sizes[isozz_n]};
    kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[isozz_n], 0, xs_nrows, sizeof(int), 0, temp_xs_field_sizes_isozz_n, isozz);

    // Now, load the XS that we need.
    // NOTE: This maps metastable isotopes to their stable versions if they can't be found!
    int sigma_a_n = bright::find_index_char( (char *) "sigma_a", xs_field_names, xs_nfields);
    int sigma_s_n = bright::find_index_char( (char *) "sigma_s", xs_field_names, xs_nfields);

    for (std::set<int>::iterator i = bright::track_nucs.begin(); i != bright::track_nucs.end(); i++)
    {
        int iso_n = bright::find_index<int>(*i, isozz, xs_nrows);

        if (iso_n < 0)
            iso_n = bright::find_index<int>(10*((*i)/10), isozz);

        if (iso_n < 0)
        {
            sigma_a_therm[*i] = 0.0;
            sigma_s_therm[*i] = 0.0;
            continue;
        };

        double * iso_sig_a = new double [1];
        double * iso_sig_s = new double [1];
        const size_t temp_xs_field_sizes_sigma_a_n [1] = {xs_field_sizes[sigma_a_n]};
        const size_t temp_xs_field_sizes_sigma_s_n [1] = {xs_field_sizes[sigma_s_n]};

        kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[sigma_a_n], iso_n, 1, sizeof(double), 0, temp_xs_field_sizes_sigma_a_n, iso_sig_a);
        kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[sigma_s_n], iso_n, 1, sizeof(double), 0, temp_xs_field_sizes_sigma_s_n, iso_sig_s);
        sigma_a_therm[*i] = iso_sig_a[0];
        sigma_s_therm[*i] = iso_sig_s[0];
    };

    kdbstat = H5Fclose(kdblib);

    return;
};
Esempio n. 8
0
int main( void )
{
 typedef struct Particle
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature;
 } Particle;

 /* Define an array of Particles */
 Particle  p_data[NRECORDS] = {
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 Particle  dst_buf[ 2 * NRECORDS ];
 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};
 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};


 /* Define field information */
 const char *field_names[NFIELDS]  =
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 int        *fill_data = NULL;
 herr_t     status;
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 int        i;

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;

 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_10.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make two tables */
 status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
                         dst_size,field_names, dst_offset, field_type,
                         chunk_size, fill_data, compress, p_data  );

 status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
                         dst_size,field_names, dst_offset, field_type,
                         chunk_size, fill_data, compress, p_data  );

 /* Combine the two tables into a third in the same file  */
 status=H5TBcombine_tables( file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME );

 /* read the combined table */
 status=H5TBread_table( file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE3_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);

 /* print it by rows */
 for (i=0; i<nrecords_out; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f",
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }

  /* close type */
 H5Tclose( string_type );

 /* close the file */
 H5Fclose( file_id );

 return 0;

}
Esempio n. 9
0
/*+++++++++++++++++++++++++ SDMF version 2.4 ++++++++++++++++++++++++++++++*/
float SDMF_orbitPhaseDiff( int orbit )
{
     register hsize_t ni;

     char   rsp_file[MAX_STRING_LENGTH];

     hsize_t nfields, num_roe;
     hid_t   file_id;
     int     *orbitList = NULL;
     float   orbitPhaseDiff = 0.092f;

     const char   roe_flname[]  = "ROE_EXC_all.h5";
     const char   roe_tblname[] = "roe_entry";
/*
 * open output HDF5-file
 */
     (void) snprintf( rsp_file, MAX_STRING_LENGTH, "./%s", roe_flname );
     H5E_BEGIN_TRY {
          file_id = H5Fopen( rsp_file, H5F_ACC_RDONLY, H5P_DEFAULT );
     } H5E_END_TRY;
     if ( file_id < 0 ) {
          (void) snprintf( rsp_file, MAX_STRING_LENGTH, 
                           "%s/%s", DATA_DIR, roe_flname );
          file_id = H5Fopen( rsp_file, H5F_ACC_RDONLY, H5P_DEFAULT );
          if ( file_id < 0 )
               NADC_GOTO_ERROR( NADC_ERR_HDF_FILE, rsp_file );
     }
/*
 * read info_h5 records
 */
     (void) H5TBget_table_info( file_id, roe_tblname, &nfields, &num_roe );
/*
 * read Orbit column
 */
     if ( num_roe == 0 ) return orbitPhaseDiff;

     orbitList = (int *) malloc( num_roe * sizeof( int ));
     if ( orbitList == NULL ) 
	  NADC_GOTO_ERROR( NADC_ERR_ALLOC, "orbitList" );
     if ( H5LTread_dataset_int( file_id, "orbitList", orbitList ) < 0 )
	  NADC_GOTO_ERROR( NADC_ERR_HDF_RD, "orbitList" );

     for ( ni = 0; ni < num_roe; ni++ ) {
	  if ( orbitList[ni] == orbit ) break;
     }

     if ( ni < num_roe ) {
	  double ECL_Exit, ECL_Entry, Period;

	  const size_t field_offset = 0;
	  const size_t dst_sizes    = sizeof( double );

	  H5TBread_fields_name( file_id, roe_tblname, "ECL_EXIT", ni, 1, 
				sizeof(double), &field_offset, &dst_sizes, 
				&ECL_Exit );
	  H5TBread_fields_name( file_id, roe_tblname, "ECL_ENTRY", ni, 1,
				sizeof(double), &field_offset, &dst_sizes, 
				&ECL_Entry );
	  H5TBread_fields_name( file_id, roe_tblname, "PERIOD", ni, 1,
				sizeof(double), &field_offset, &dst_sizes, 
				&Period );
	  orbitPhaseDiff = (float) ((ECL_Entry - ECL_Exit) / Period - 0.5) / 2;
     }
     (void) H5Fclose( file_id );
done:
     if ( orbitList != NULL ) free( orbitList );
     return orbitPhaseDiff;
}
/*----------------------------------------------------------------------
 NAME:                      read_ephemeris_data

 PURPOSE:  Reads ephemeris data from HDF5-formatted tables in the
           ancillary data file

 RETURNS:  Pointer to an allocated/populated IAS_ANC_EPHEMERIS_DATA
           structure if successful, NULL pointer if allocation fails
           or data read fails

------------------------------------------------------------------------*/
static IAS_ANC_EPHEMERIS_DATA *read_ephemeris_data
(
    hid_t hdf_file_id,              /* I: HDF5 ancillary data file handle */
    int file_format_version         /* I: current file format version */
)
{
    const char *field_names[EPHEMERIS_NFIELDS];

    double epoch_time[3] = {0.0, 0.0, 0.0};
                                    /* Temporary buffer for epoch time data */

    int status = SUCCESS;           /* Function return status code  */

    size_t field_offsets[EPHEMERIS_NFIELDS];
    size_t field_sizes[EPHEMERIS_NFIELDS];

    herr_t hdf_error_status = -1;   /* HDF5 I/O error status   */

    hid_t field_types[EPHEMERIS_NFIELDS];
    hid_t fields_to_close[EPHEMERIS_NFIELDS];

    hsize_t nfields = 0;            /* Number of table fields per record */
    hsize_t nrecords = 0;           /* Number of records in table */
    hsize_t type_size = 0;          /* Size of data structure to read */

    IAS_ANC_EPHEMERIS_DATA *data = NULL;
                                    /* Pointer to attitude data buffer */


    /* Read the attitude epoch time from the ancillary data file. */
    status = read_epoch_time(hdf_file_id,
        EPHEMERIS_EPOCH_TIME_ATTRIBUTE_NAME, epoch_time);
    if (status != SUCCESS)
    {
        IAS_LOG_ERROR("Reading ephemeris epoch time attribute");
        return NULL;
    }

    /* Build the ephemeris table definition. */
    status = ias_ancillary_build_ephemeris_table_definition(field_names,
        field_offsets, field_types, field_sizes, fields_to_close);  
    if (status != SUCCESS)
    {
        IAS_LOG_ERROR("Building ephemeris table definition");
        return NULL;
    }

    /* Get the number of records in the ephemeris data table.  We need
       this before we can allocate the proper-sized IAS_ANC_EPHEMERIS_DATA
       buffer.   If the table doesn't exist or there's a table defined
       with 0 records, consider it an error. */
    hdf_error_status = H5TBget_table_info(hdf_file_id,
        EPHEMERIS_DATA_DATASET_NAME, &nfields, &nrecords);
    if (hdf_error_status < 0)
    {
        IAS_LOG_ERROR("Obtaining ephemeris table information");
        ias_ancillary_cleanup_table_definition(fields_to_close,
            EPHEMERIS_NFIELDS);
        return NULL;
    }
    else if (nrecords < 1)
    {
        IAS_LOG_ERROR("No records found in ephemeris data table");
        ias_ancillary_cleanup_table_definition(fields_to_close,
            EPHEMERIS_NFIELDS);
        return NULL;
    }

    /* Allocate the ephemeris data buffer. */
    data = ias_ancillary_allocate_ephemeris(nrecords);
    if (data == NULL)
    {
        IAS_LOG_ERROR("Allocating ephemeris data buffer");
        ias_ancillary_cleanup_table_definition(fields_to_close,
            EPHEMERIS_NFIELDS);
        return NULL;
    }
    else
    {
        /* Copy the attitude epoch time info to the data structure. */
        memcpy(data->utc_epoch_time, epoch_time, sizeof(epoch_time));

        /* Read the table contents into the data structure. */
        type_size = sizeof(data->records);
        hdf_error_status = H5TBread_table(hdf_file_id,
            EPHEMERIS_DATA_DATASET_NAME, type_size,
            field_offsets, field_sizes, data->records);
        if (hdf_error_status < 0)
        {
            IAS_LOG_ERROR("Reading ephemeris data table");
            ias_ancillary_free_ephemeris(data);
            ias_ancillary_cleanup_table_definition(fields_to_close,
                EPHEMERIS_NFIELDS);
            return NULL;
        }
    }

    /* Close any "open" datatype field objects. */
    ias_ancillary_cleanup_table_definition(fields_to_close,
        EPHEMERIS_NFIELDS);

    /* Done */
    return data;
}
int main( void )
{
 typedef struct Particle
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature;
 } Particle;

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

  /* Define field information */
  const char *field_names[NFIELDS]  =
  { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
  hid_t      field_type[NFIELDS];
  hid_t      string_type;
  hid_t      file_id;
  hsize_t    chunk_size = 10;
  Particle   fill_data[1] =
  { {"no data",-1,-1, -99.0f, -99.0} };   /* Fill value particle */
  int        compress  = 0;
  hsize_t    nfields_out;
  hsize_t    nrecords_out;

  /* Initialize field_type */
  string_type = H5Tcopy( H5T_C_S1 );
  H5Tset_size( string_type, 16 );
  field_type[0] = string_type;
  field_type[1] = H5T_NATIVE_INT;
  field_type[2] = H5T_NATIVE_INT;
  field_type[3] = H5T_NATIVE_FLOAT;
  field_type[4] = H5T_NATIVE_DOUBLE;

 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_06.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make a table */
 H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,dst_size,
                       field_names, dst_offset, field_type,
                       chunk_size, fill_data, compress, NULL);

 /* Get table info  */
 H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);

  /* close type */
 H5Tclose( string_type );

 /* close the file */
 H5Fclose( file_id );

 return 0;


}
Esempio n. 12
0
hsize_t IbHdf5::numFields() const
{
    H5TBget_table_info(m_fid, m_tableName.toLatin1().data(), m_numRecords, m_numFields);
    return *m_numFields;
}