Exemplo n.º 1
0
int mcmc_set_table(mcmc_configuration config, const char* field_names[])
{
    int n_param = config.n_param;
    hid_t field_types[n_param];

    size_t dst_offsets[n_param];
    size_t dst_sizes[n_param];

    size_t dst_size = n_param*sizeof(double);

    int i;
    for (i=0; i<n_param; i++)
    {
	field_types[i] = H5T_NATIVE_DOUBLE;	    
	dst_sizes[i] = sizeof(double);
	dst_offsets[i] = i*sizeof(double);
    }

    hsize_t chunk_size = 10;
    int *fill_data = NULL;
    int compress = 0;

    
	

    herr_t status;
    status = H5TBmake_table("Traces", config.file_id, "Traces", n_param, 1, 
			    dst_size, field_names, dst_offsets, field_types,
			    chunk_size, fill_data, compress, config.parameters);

    status = H5TBmake_table("Proposed", config.file_id, "Proposed", n_param, 0, 
			    dst_size, field_names, dst_offsets, field_types,
			    chunk_size, fill_data, compress, config.parameters);

    
}
Exemplo n.º 2
0
void SCIA_WR_H5_LADS( struct param_record param, unsigned int nr_lads,
		      const struct lads_scia *lads )
     /*@globals lads_size, lads_offs@*/
{
     hid_t   ads_id;
     hsize_t adim;
     hid_t   lads_type[NFIELDS];
     hid_t   type_id, temp_type00, temp_type01;

     const hbool_t compress = (param.flag_deflate == PARAM_SET) ? TRUE : FALSE;
     const char *lads_names[NFIELDS] = {
	  "dsr_time", "attach_flag", "corner_grd"
     };
/*
 * check number of LADS records
 */
     if ( nr_lads == 0 ) return;
/*
 * create/open group /ADS
 */
     ads_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/ADS" );
     if ( ads_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/ADS" );
/*
 * define user-defined data types of the Table-fields
 */
     temp_type00 = H5Topen( param.hdf_file_id, "mjd", H5P_DEFAULT );
     adim = NUM_CORNERS;
     type_id = H5Topen( param.hdf_file_id, "coord", H5P_DEFAULT );
     temp_type01 = H5Tarray_create( type_id, 1, &adim );

     lads_type[0] = temp_type00;
     lads_type[1] = H5T_NATIVE_UCHAR;
     lads_type[2] = temp_type01;
/*
 * create table
 */
     (void) H5TBmake_table( "lads", ads_id, TBL_NAME, NFIELDS,
			    nr_lads, lads_size, lads_names, lads_offs,
			    lads_type, nr_lads, NULL, compress, lads );
/*
 * close interface
 */
     (void) H5Tclose( type_id );
     (void) H5Tclose( temp_type00 );
     (void) H5Tclose( temp_type01 );
     (void) H5Gclose( ads_id );
}
Exemplo n.º 3
0
/* Create a normal HL table just like the HL examples do */
static int create_hl_table(hid_t fid)
{
    /* Calculate the offsets of the particle struct members in memory */
    size_t part_offset[NFIELDS] = { HOFFSET( particle_t, name ),
                                    HOFFSET( particle_t, lati ),
                                    HOFFSET( particle_t, longi ),
                                    HOFFSET( particle_t, pressure ),
                                    HOFFSET( particle_t, temperature )
                                  };

    /* Define field information */
    const char *field_names[NFIELDS]  =
    { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
    hid_t      field_type[NFIELDS];
    hid_t      string_type;
    hsize_t    chunk_size = 10;
    int        *fill_data = NULL;
    int        compress  = 0;
    herr_t     status;

    /* Initialize the field field_type */
    string_type = H5Tcopy( H5T_C_S1 );
    H5Tset_size( string_type, (size_t)16 );
    field_type[0] = string_type;
    field_type[1] = H5T_NATIVE_INT;
    field_type[2] = H5T_NATIVE_INT;
    field_type[3] = H5T_NATIVE_FLOAT;
    field_type[4] = H5T_NATIVE_DOUBLE;


    /*------------------------------------------------------------------------
    * H5TBmake_table
    *-------------------------------------------------------------------------
    */

    status=H5TBmake_table( "Table Title", fid, H5TB_TABLE_NAME, (hsize_t)NFIELDS,
                           (hsize_t)NRECORDS, sizeof(particle_t),
                           field_names, part_offset, field_type,
                           chunk_size, fill_data, compress, testPart  );

    if(status<0)
        return -1;
    else
        return 0;
}
Exemplo n.º 4
0
bool IbHdf5::writeRecords(Record2* recArray, int nRecords)
{
    qDebug() << "In IbHdf5::writeRecords()";
    qDebug() << "    m_filePath:" << m_filePath;
//    if (QFileInfo::exists(m_filePath)) {
//        QFile::remove(m_filePath);
//    }
//    if (H5Lexists(m_fid, m_tableName.toLatin1().data(), H5P_DEFAULT) == TRUE) {
//        qDebug() << "NUM HDF5 RECORDS IS:" << numRecords();
//        H5TBdelete_record(m_fid, m_tableName.toLatin1().data(), 0, numRecords());
//    }

    if (H5Lexists(m_fid, m_tableName.toLatin1().data(), H5P_DEFAULT) == FALSE) {
        qDebug() << "    creating new HDF5 table for:" << m_tableName;
        H5TBmake_table(m_tableName.toLatin1().data(), m_fid, m_tableName.toLatin1().data(),
                   m_nFields, nRecords, sizeof(Record2), m_fieldNames, m_dst_offset,
                   m_fieldType, m_chunkSize, m_fillData, m_compress, recArray);
    }
    else {
        H5TBappend_records(m_fid, m_tableName.toLatin1().data(), (hsize_t)nRecords, sizeof(Record2), m_dst_offset, m_dst_sizes, recArray);
    }
}
Exemplo n.º 5
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
void SCIA_LV1_WR_H5_SRSN( struct param_record param, unsigned int nr_srsn,
			  const struct srsn_scia *srsn )
{
     hid_t   ads_id;
     hsize_t adim;
     herr_t  stat;

     hid_t   srsn_type[NFIELDS];

     const hbool_t compress = (param.flag_deflate == PARAM_SET) ? TRUE : FALSE;
     const size_t srsn_size = sizeof( struct srsn_scia );
     const char *srsn_names[NFIELDS] = { 
	  "mjd", "flag_mds", "flag_neu", "sun_spec_id", "avg_asm", "avg_esm", 
	  "avg_elev_sun", "dopp_shift", "wvlen_sun", "mean_sun", 
	  "precision_sun", "accuracy_sun", "etalon", "pmd_mean", "pmd_out"
     };
     const size_t srsn_offs[NFIELDS] = {
	  HOFFSET( struct srsn_scia, mjd ),
	  HOFFSET( struct srsn_scia, flag_mds ),
	  HOFFSET( struct srsn_scia, flag_neu ),
	  HOFFSET( struct srsn_scia, sun_spec_id ),
	  HOFFSET( struct srsn_scia, avg_asm ),
	  HOFFSET( struct srsn_scia, avg_esm ),
	  HOFFSET( struct srsn_scia, avg_elev_sun ),
	  HOFFSET( struct srsn_scia, dopp_shift ),
	  HOFFSET( struct srsn_scia, wvlen_sun ),
	  HOFFSET( struct srsn_scia, mean_sun ),
	  HOFFSET( struct srsn_scia, precision_sun ),
	  HOFFSET( struct srsn_scia, accuracy_sun ),
	  HOFFSET( struct srsn_scia, etalon ),
	  HOFFSET( struct srsn_scia, pmd_mean ),
	  HOFFSET( struct srsn_scia, pmd_out )
     };
/*
 * check number of PMD records
 */
     if ( nr_srsn == 0 ) return;
/*
 * open/create group /ADS
 */
     ads_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/ADS" );
     if ( ads_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/ADS" );
/*
 * write SRSN data sets
 */
     adim = SCIENCE_PIXELS;
     srsn_type[0] = H5Topen( param.hdf_file_id, "mjd", H5P_DEFAULT );
     srsn_type[1] = H5Tcopy( H5T_NATIVE_UCHAR );
     srsn_type[2] = H5Tcopy( H5T_NATIVE_UCHAR );
     srsn_type[3] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( srsn_type[3], (size_t) 3 );
     srsn_type[4] = H5Tcopy( H5T_NATIVE_FLOAT );
     srsn_type[5] = H5Tcopy( H5T_NATIVE_FLOAT );
     srsn_type[6] = H5Tcopy( H5T_NATIVE_FLOAT );
     srsn_type[7] = H5Tcopy( H5T_NATIVE_FLOAT );
     adim = SCIENCE_PIXELS;
     srsn_type[8] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     srsn_type[9] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     srsn_type[10] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     srsn_type[11] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     srsn_type[12] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = PMD_NUMBER;
     srsn_type[13] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     srsn_type[14] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );

     stat = H5TBmake_table( "srsn", ads_id, "NEW_SUN_REFERENCE", 
                            NFIELDS, 1, srsn_size, srsn_names,
                            srsn_offs, srsn_type, 1,
                            NULL, compress, srsn );
     if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, "srsn" );
/*
 * close interface
 */
 done:
     (void) H5Tclose( srsn_type[0] );
     (void) H5Tclose( srsn_type[1] );
     (void) H5Tclose( srsn_type[2] );
     (void) H5Tclose( srsn_type[3] );
     (void) H5Tclose( srsn_type[4] );
     (void) H5Tclose( srsn_type[5] );
     (void) H5Tclose( srsn_type[6] );
     (void) H5Tclose( srsn_type[7] );
     (void) H5Tclose( srsn_type[8] );
     (void) H5Tclose( srsn_type[9] );
     (void) H5Tclose( srsn_type[10] );
     (void) H5Tclose( srsn_type[11] );
     (void) H5Tclose( srsn_type[12] );
     (void) H5Tclose( srsn_type[13] );
     (void) H5Tclose( srsn_type[14] );
     (void) H5Gclose( ads_id );
}
Exemplo n.º 6
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
void SCIA_OL2_WR_H5_CLD( struct param_record param, unsigned int nr_cld,
			 const struct cld_sci_ol *cld )
{
     register unsigned int  nr;

     hid_t   grp_id;
     hbool_t compress;
     hsize_t adim;
     hvl_t   *vdata;
     hid_t   cld_type[NFIELDS];

     const char *cld_names[NFIELDS] = {
          "dsr_time", "quality_flag", "integr_time", "pmd_read", 
	  "cl_type_flags", "cloud_flags", "flag_output_flags", 
	  "num_aero_param", "pmd_read_cl", "dsr_length",
	  "surface_pres", "cl_frac", "cl_frac_err", "cl_top_pres", 
	  "cl_top_pres_err", "cl_opt_depth", "cl_opt_depth_err", 
	  "cl_reflectance", "cl_reflectance_err", 
	  "surface_reflectance", "surface_reflectance_err", 
	  "aero_abso_ind", "aero_ind_diag"
     };
/*
 * check number of CLD records
 */
     if ( nr_cld == 0 ) return;
/*
 * set HDF5 boolean variable for compression
 */
     if ( param.flag_deflate == PARAM_SET )
          compress = TRUE;
     else
          compress = FALSE;
/*
 * create group /MDS
 */
     grp_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/MDS" );
     if ( grp_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/MDS" );
/*
 * define user-defined data types of the Table-fields
 */
     cld_type[0] = H5Topen( param.hdf_file_id, "mjd", H5P_DEFAULT );
     cld_type[1] = H5Tcopy( H5T_NATIVE_CHAR );
     cld_type[2] = H5Tcopy( H5T_NATIVE_USHORT );
     cld_type[3] = H5Tcopy( H5T_NATIVE_USHORT );
     cld_type[4] = H5Tcopy( H5T_NATIVE_USHORT );
     cld_type[5] = H5Tcopy( H5T_NATIVE_USHORT );
     cld_type[6] = H5Tcopy( H5T_NATIVE_USHORT );
     cld_type[7] = H5Tcopy( H5T_NATIVE_USHORT );
     adim = 2;
     cld_type[8] = H5Tarray_create( H5T_NATIVE_USHORT, 1, &adim );
     cld_type[9] = H5Tcopy( H5T_NATIVE_UINT );
     cld_type[10] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[11] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[12] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[13] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[14] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[15] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[16] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[17] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[18] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[19] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[20] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[21] = H5Tcopy( H5T_NATIVE_FLOAT );
     cld_type[22] = H5Tcopy( H5T_NATIVE_FLOAT );
/*
 * create table
 */
     (void) H5TBmake_table( "Cloud end Aerosol MDS", grp_id, "cld", NFIELDS, 
			    nr_cld, cld_size, cld_names, cld_offs, 
			    cld_type, nr_cld, NULL, compress, cld );
/*
 * close interface
 */
     for ( nr = 0; nr < NFIELDS; nr++ ) (void) H5Tclose( cld_type[nr] );
/*
 * +++++ create/write variable part of the CLOUDS_AEROSOL record
 */
     adim = (hsize_t) nr_cld;
/*
 * Additional aerosol parameters
 */
     vdata = (hvl_t *) malloc( nr_cld * sizeof(hvl_t) );
     if ( vdata == NULL ) NADC_RETURN_ERROR( NADC_ERR_ALLOC, "vdata" );
     nr = 0;
     do {
	  vdata[nr].len = (size_t) cld[nr].numaeropars;
	  if ( cld[nr].numaeropars > 0 ) {
	       vdata[nr].p = malloc( vdata[nr].len * sizeof(float) );
	       if ( vdata[nr].p == NULL ) {
		    free( vdata );
		    NADC_RETURN_ERROR( NADC_ERR_ALLOC, "vdata.p" );
	       }
	       (void) memcpy( vdata[nr].p , cld[nr].aeropars,
			      vdata[nr].len * sizeof(float) );
	  }
     } while ( ++nr < nr_cld );
     NADC_WR_HDF5_Vlen_Dataset( compress, grp_id, "aeropars",
				H5T_NATIVE_FLOAT, 1, &adim, vdata );
/*
 * close interface
 */
     (void) H5Gclose( grp_id );
}
Exemplo n.º 7
0
int main( void )
{
 typedef struct Particle1 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle1;
 
/* Define an array of Particles */
 Particle1  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };
 
 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size1 =  sizeof( Particle1 );
 size_t dst_offset1[NFIELDS] = { HOFFSET( Particle1, name ),
  HOFFSET( Particle1, lati ),
  HOFFSET( Particle1, longi ),
  HOFFSET( Particle1, pressure ),
  HOFFSET( Particle1, temperature )};
 
 /* Define field information */
 const char *field_names[NFIELDS]  = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 Particle1  fill_data[1] = { "no data",-1,-1, -99.0f, -99.0 };
 int        fill_data_new[1] = { -100 };
 hsize_t    position;
 herr_t     status; 
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 
 /* Define the inserted field information */
 hid_t      field_type_new = H5T_NATIVE_INT;
 int        data[NRECORDS] = { 0,1,2,3,4,5,6,7 };
 
 /* Initialize the field type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
 
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_11.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
 
 /* Make the table */
 status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS, 
                         dst_size1,field_names, dst_offset1, field_type, 
                         chunk_size, fill_data, compress, p_data  );
 
 /* Insert the new field at the end of the field list */
 position = NFIELDS;
 status=H5TBinsert_field( file_id, TABLE_NAME, "New Field", field_type_new, position, 
  fill_data_new, data );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
 
 /* Close the file. */
 H5Fclose( file_id );
 
 return 0;
 
 
}
Exemplo n.º 8
0
int main( void )
{
 typedef struct Particle 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle;

 Particle  dst_buf[ NRECORDS + NRECORDS_INS ];

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};
 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};

 /* Define an array of Particles */
 Particle  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 /* Define field information */
 const char *field_names[NFIELDS]  = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 Particle   fill_data[1] = 
 { {"no data",-1,-1, -99.0f, -99.0} };   /* Fill value particle */ 
 hsize_t    start1;                      /* Record to start reading from 1st table */
 hsize_t    nrecords;                    /* Number of records to insert */
 hsize_t    start2;                      /* Record to start writing in 2nd table */
 herr_t     status;
 int        i;
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 
 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
 
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
  
 /* Make 2 tables: TABLE2_NAME is empty  */
 status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS, 
                         dst_size,field_names, dst_offset, field_type, 
                         chunk_size, fill_data, compress, p_data  );
 
 status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS, 
                         dst_size,field_names, dst_offset, field_type, 
                         chunk_size, fill_data, compress, NULL  );
 
 
 /* Add 2 records from TABLE1_NAME to TABLE2_NAME  */
 start1    = 3;      
 nrecords  = NRECORDS_INS; 
 start2    = 6;      
 status=H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 );

 /* read TABLE2_NAME: it should have 2 more records now */
 status=H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
  
 /* print it by rows */
 for (i=0; i<nrecords_out; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f", 
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }
 
 /* Close the file. */
 H5Fclose( file_id );
 
 return 0;
}
Exemplo n.º 9
0
int main( void )
{
 typedef struct Particle 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle;

 Particle  dst_buf[NRECORDS+NRECORDS_ADD];

/* Define an array of Particles */
 Particle  p_data[NRECORDS] = { 
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

 size_t dst_sizes[NFIELDS] = { sizeof( p_data[0].name),
                               sizeof( p_data[0].lati),
                               sizeof( p_data[0].longi),
                               sizeof( p_data[0].pressure),
                               sizeof( p_data[0].temperature)};
 
 /* Define field information */
 const char *field_names[NFIELDS] = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        *fill_data = NULL;
 int        compress  = 0;
 herr_t     status; 
 int        i;

  /* Append particles */ 
 Particle particle_in[ NRECORDS_ADD ] = 
 {{ "eight",80,80, 8.0f, 80.0},
 {"nine",90,90, 9.0f, 90.0} };

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
   
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_02.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* make a table */
 status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS, 
                        dst_size, field_names, dst_offset, field_type, 
                        chunk_size, fill_data, compress, p_data  );

 /* append two records */
 status=H5TBappend_records(file_id, TABLE_NAME,NRECORDS_ADD, dst_size, dst_offset, dst_sizes, 
  &particle_in );

 /* read the table */
 status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* print it by rows */
 for (i=0; i<NRECORDS+NRECORDS_ADD; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f", 
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }
 
 /* Close the file. */
 H5Fclose( file_id );

 return 0;
}
int main( void )
{
 typedef struct Particle
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature;
 } Particle;

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

  /* Define field information */
  const char *field_names[NFIELDS]  =
  { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
  hid_t      field_type[NFIELDS];
  hid_t      string_type;
  hid_t      file_id;
  hsize_t    chunk_size = 10;
  Particle   fill_data[1] =
  { {"no data",-1,-1, -99.0f, -99.0} };   /* Fill value particle */
  int        compress  = 0;
  hsize_t    nfields_out;
  hsize_t    nrecords_out;

  /* Initialize field_type */
  string_type = H5Tcopy( H5T_C_S1 );
  H5Tset_size( string_type, 16 );
  field_type[0] = string_type;
  field_type[1] = H5T_NATIVE_INT;
  field_type[2] = H5T_NATIVE_INT;
  field_type[3] = H5T_NATIVE_FLOAT;
  field_type[4] = H5T_NATIVE_DOUBLE;

 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_06.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make a table */
 H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,dst_size,
                       field_names, dst_offset, field_type,
                       chunk_size, fill_data, compress, NULL);

 /* Get table info  */
 H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);

  /* close type */
 H5Tclose( string_type );

 /* close the file */
 H5Fclose( file_id );

 return 0;


}
Exemplo n.º 11
0
/**
 * Allocates memory for and initializes data structure
 * that describes the HDF5 table used to store SNP data.
 * A single SNPTab data structure should be created for 
 * each chromosome, and be freed after use.
 *
 */
SNPTab *snp_tab_new(hid_t h5file, const char *chrom_name,
		    size_t max_record) {
  herr_t status;
  SNPTab *tab;
  SNP snp_desc;

  const char *field_names[] =
    {"name", "pos", "allele1", "allele2"};

  tab = my_malloc(sizeof(SNPTab));
  
  tab->h5file = h5file;  
  
  /* set datatypes for each field */
  tab->name_type = H5Tcopy(H5T_C_S1);
  H5Tset_size(tab->name_type, SNP_MAX_NAME);
  /* no longer store chromosome as each chromosome
   * gets its own table 
   */
  /* tab->chrom_type = H5Tcopy(H5T_C_S1);
   * H5Tset_size(tab->chrom_type, SNP_MAX_CHROM);
   */
  tab->allele_type = H5Tcopy(H5T_C_S1);
  H5Tset_size(tab->allele_type, SNP_MAX_ALLELE);
  tab->field_type[0] = tab->name_type; /* name */
  /* tab->field_type[1] = tab->chrom_type; */ /* chromosome */
  tab->field_type[1] = H5T_NATIVE_LONG; /* pos */
  tab->field_type[2] = tab->allele_type; /* allele1 */
  tab->field_type[3] = tab->allele_type; /* allele2 */

  /* sizes of record and each field */
  tab->record_size = sizeof(SNP);
  tab->field_size[0] = sizeof(snp_desc.name);
  /* tab->field_size[1] = sizeof(snp_desc.chrom); */
  tab->field_size[1] = sizeof(snp_desc.pos);
  tab->field_size[2] = sizeof(snp_desc.allele1);
  tab->field_size[3] = sizeof(snp_desc.allele2);
    
  /* offsets of each field */
  tab->field_offset[0] = HOFFSET(SNP, name);
  /* tab->field_offset[1] = HOFFSET(SNP, chrom); */
  tab->field_offset[1] = HOFFSET(SNP, pos);
  tab->field_offset[2] = HOFFSET(SNP, allele1);
  tab->field_offset[3] = HOFFSET(SNP, allele2);
    
  /* title and name of table */
  tab->title = util_str_concat(chrom_name, " SNPs", NULL);  
  tab->name = util_str_dup(chrom_name);

  /* set chunk size and compression */
  tab->chunk_size = SNPTAB_CHUNK_SIZE;
  tab->compress = 1;

  tab->n_record = 0;
  tab->max_record = max_record;
  
  status = H5TBmake_table(tab->title, tab->h5file, tab->name,
			  SNPTAB_N_FIELDS, tab->max_record,
			  tab->record_size, field_names,
			  tab->field_offset, tab->field_type,
			  tab->chunk_size, NULL, tab->compress, NULL);
  
  if(status < 0) {
    my_err("%s:%d: could not create SNP table\n", __FILE__, __LINE__);
  }

  
  return tab;
}
Exemplo n.º 12
0
Arquivo: FileIO.cpp Projeto: xyuan/gkc
    int FileIO::create(Setup *setup) {
     
        hid_t file_plist = H5Pcreate(H5P_FILE_ACCESS);
#ifdef GKC_PARALLEL_MPI
   //       pass some information onto the underlying MPI_File_open call 
          MPI_Info file_info;
          check(MPI_Info_create(&file_info), DMESG("File info"));
          /* 
          H5Pset_sieve_buf_size(file_plist, 262144); 
          H5Pset_alignment(file_plist, 524288, 262144);
                
          MPI_Info_set(file_info, (char *) "access_style"        , (char *) "write_once");
          MPI_Info_set(file_info, (char *) "collective_buffering", (char *) "true");
          MPI_Info_set(file_info, (char *) "cb_block_size"       , (char *) "1048576");
          MPI_Info_set(file_info, (char *) "cb_buffer_size"      , (char *) "4194304");
           * */

          check( H5Pset_fapl_mpio(file_plist, parallel->Comm[DIR_ALL], file_info), DMESG("Set MPI Property"));
#endif
        file = check(H5Fcreate(outputFileName.c_str(), (overwriteFile ? H5F_ACC_TRUNC : H5F_ACC_EXCL),
                        H5P_DEFAULT, file_plist ), DMESG("H5FCreate : HDF5 File (File already exists ? use -f to overwrite) : " + outputFileName));
        check( H5Pclose(file_plist),   DMESG("H5Pclose"));

#ifdef GKC_PARALLEL_MPI
        MPI_Info_free(&file_info);
#endif
        
         //////////////////////////////////////////////////////////////// Info Group ////////////////////////////////////////////////////////

          hid_t infoGroup = check(H5Gcreate(file, "/Info",H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT), DMESG("Error creating group file for Phasespace : H5Gcreate"));

         check(H5LTset_attribute_string(infoGroup, ".", "Output", outputFileName.c_str()), DMESG("H5LTset_attribute"));
         check(H5LTset_attribute_string(infoGroup, ".", "Input",  inputFileName.c_str()), DMESG("H5LTset_attribute"));
         
         
         check(H5LTset_attribute_string(infoGroup, ".", "Version", PACKAGE_VERSION), DMESG("H5LTset_attribute"));
         // Some Simulation specific stuff
         //check(H5LTset_attribute_string(infoGroup, ".", "Solver", ((setup->Solver & VL_LIN) ? "Linear" : "Non-Linear")), DMESG("H5LTset_attribute"));
         //heck(H5LTset_attribute_string(infoGroup, ".", "Type",   ((setup->VlasovType   & VLASOV_LOCAL ) ? "Local"  : "Global"    )), DMESG("H5LTset_attribute"));
         //heck(H5LTset_attribute_string(infoGroup, ".", "FFTSolverS",   ((setup->VlasovType   & VLASOV_LOCAL ) ? "Local"  : "Global"    )), DMESG("H5LTset_attribute"));
         //check(H5LTset_attribute_string(infoGroup, ".", "Initial Condition", setup->PerturbationMethod.c_str()), DMESG("H5LTset_attribute"));
         check(H5LTset_attribute_string(infoGroup, ".", "Info", info.c_str()), DMESG("H5LTset_attribute"));
         
         check(H5LTset_attribute_string(infoGroup, ".", "Config", setup->configFileString.c_str()), DMESG("H5LTset_attribute"));

         H5Gclose(infoGroup);
         
         
         /// Wrote setup constants, ugly here ////
         hid_t constantsGroup = check(H5Gcreate(file, "/Constants",H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT), DMESG("Error creating group file for Phasespace : H5Gcreate"));
         //
         if (!setup->parser_constants.empty()) { 
            
           std::vector<std::string> const_vec = Setup::split(setup->parser_constants, ",");

            for(int s = 0; s < const_vec.size(); s++) { 
                std::vector<std::string> key_value = Setup::split(const_vec[s],"=");
                double value = Setup::string_to_double(key_value[1]);
                int dim[] = { 1 };
   //           check(H5LTmake_dataset_double(constantsGroup, Setup::trimLower(key_value[0], false).c_str(), 1, dim, &value ), DMESG("Write Constants Attributes"));
                check(H5LTset_attribute_double(constantsGroup, ".", Setup::trimLower(key_value[0], false).c_str(), &value, 1), DMESG("H5LTset_attribute"));
                //check(H5LTset_attribute_double(constantsGroup, ".", Setup::trimLower(key_value[0], false).c_str(), &(Setup::string_to_double(key_value[1])), 1), DMESG("H5LTset_attribute"));
            };
         
         }
         
          H5Gclose(constantsGroup);

         
         // ********************* setup Table for CFL   *****************88
         cfl_table = new CFLTable();
         
         cfl_offset[0] =  HOFFSET( CFLTable, timeStep );
         cfl_offset[1] =  HOFFSET( CFLTable, time );
         cfl_offset[2] =  HOFFSET( CFLTable, Fx );
         cfl_offset[3] =  HOFFSET( CFLTable, Fy );
         cfl_offset[4] =  HOFFSET( CFLTable, Fz  );
         cfl_offset[5] =  HOFFSET( CFLTable, Fv );
         cfl_offset[6] =  HOFFSET( CFLTable, total );
          

         for(int i = 1; i < 7; i++)  cfl_sizes[i] = sizeof(double); cfl_sizes[0] = sizeof(int);
         hid_t   cfl_type[7]; for(int i = 1; i < 7; i++)  cfl_type [i] = H5T_NATIVE_DOUBLE; cfl_type[0] = H5T_NATIVE_INT;

         const char *cfl_names[7];
         cfl_names[0] = "timeStep";
         cfl_names[1] = "time";
         cfl_names[2] = "Fx"; cfl_names[3] = "Fy"; cfl_names[4] = "Fz"; cfl_names[5] = "Fv"; cfl_names[6] = "Total";

          check(H5TBmake_table("cflTable", file, "cfl", (hsize_t) 7, (hsize_t) 0, sizeof(CFLTable), (const char**) cfl_names,
                               cfl_offset, cfl_type, 32, NULL, 0, cfl_table ), DMESG("H5Tmake_table : cfl"));
         

         return HELIOS_SUCCESS;
    }
Exemplo n.º 13
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
void SCIA_WR_H5_MPH( struct param_record param, 
		     const struct mph_envi *mph )
{
     register unsigned short ni = 0;

     hid_t   mph_type[NFIELDS];

     const int compress = 0;
     const char *mph_names[NFIELDS] = {
	  "product_name", "proc_stage", "ref_doc", 
	  "acquisition_station", "proc_center", "proc_time", 
	  "software_version", 
	  "sensing_start", "sensing_stop", 
	  "phase", "cycle", "rel_orbit", "abs_orbit", "state_vector_time", 
	  "delta_ut1", 
	  "x_position", "y_position", "z_position", 
	  "x_velocity", "y_velocity", "z_velocity",
	  "vector_source", "utc_sbt_time", "sat_binary_time", "clock_step", 
	  "leap_utc", "leap_sign", "leap_err", 
	  "product_err", "tot_size", "sph_size", "num_dsd", "dsd_size", 
	  "num_data_sets"
     };
/*
 * define user-defined data types of the Table-fields
 */
     mph_type[0] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[0], (size_t) ENVI_FILENAME_SIZE );
     mph_type[1] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[1], (size_t) 2 );
     mph_type[2] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[2], (size_t) 24 );

     mph_type[3] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[3], (size_t) 21 );
     mph_type[4] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[4], (size_t) 7 );
     mph_type[5] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[5], (size_t) UTC_STRING_LENGTH );
     mph_type[6] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[6], (size_t) 15 );

     mph_type[7] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[7], (size_t) UTC_STRING_LENGTH );
     mph_type[8] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[8], (size_t) UTC_STRING_LENGTH );

     mph_type[9] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[9], (size_t) 2 );
     mph_type[10] = H5Tcopy( H5T_NATIVE_SHORT );
     mph_type[11] = H5Tcopy( H5T_NATIVE_INT );
     mph_type[12] = H5Tcopy( H5T_NATIVE_INT );
     mph_type[13] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[13], (size_t) UTC_STRING_LENGTH );
     mph_type[14] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[15] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[16] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[17] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[18] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[19] = H5Tcopy( H5T_NATIVE_DOUBLE );
     mph_type[20] = H5Tcopy( H5T_NATIVE_DOUBLE );

     mph_type[21] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[21], (size_t) 3 );
     mph_type[22] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[22], (size_t) UTC_STRING_LENGTH );
     mph_type[23] = H5Tcopy( H5T_NATIVE_UINT );
     mph_type[24] = H5Tcopy( H5T_NATIVE_UINT );

     mph_type[25] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[25], (size_t) UTC_STRING_LENGTH );
     mph_type[26] = H5Tcopy( H5T_NATIVE_SHORT );
     mph_type[27] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[27], (size_t) 2 );

     mph_type[28] = H5Tcopy( H5T_C_S1 );
     (void) H5Tset_size( mph_type[28], (size_t) 2 );
     mph_type[29] = H5Tcopy( H5T_NATIVE_UINT );
     mph_type[30] = H5Tcopy( H5T_NATIVE_UINT );
     mph_type[31] = H5Tcopy( H5T_NATIVE_UINT );
     mph_type[32] = H5Tcopy( H5T_NATIVE_UINT );
     mph_type[33] = H5Tcopy( H5T_NATIVE_UINT );
/*
 * create table
 */
     (void) H5TBmake_table( "Main Product Header", param.hdf_file_id, "MPH", 
			    NFIELDS, 1, mph_size, mph_names, mph_offs, 
			    mph_type, 1, NULL, compress, mph );
/*
 * create some attributes for quick access
 */
     (void) H5LTset_attribute_int( param.hdf_file_id, "/", "abs_orbit", 
				   &mph->abs_orbit, 1 );
/*
 * close interface
 */
     do {
	  (void) H5Tclose( mph_type[ni] );
     } while ( ++ni < NFIELDS );
}
Exemplo n.º 14
0
int SpIO_H5WriteTau(hid_t h5f_id, const Zone *zone)
{
	SpPhys *pp = zone->data;

	/* Just in case the programmer did something stupid */
	Deb_ASSERT(pp->mol != NULL);
	Deb_ASSERT(pp->tau != NULL);

	herr_t hstatus = 0;
	int status = 0;
	size_t
		i, j,
		nrad = pp->mol->nrad,
		record_size =  sizeof(double) * nrad,
		field_offset[nrad];
	const char **field_names = Mem_CALLOC(nrad, field_names);
	char **level_names = Mem_CALLOC(nrad, level_names);
	hid_t field_type[nrad];
	hsize_t chunk_size = 10;
	int *fill_data = NULL, compress  = 0;
	double *tau = Mem_CALLOC(zone->nchildren * nrad,  tau);

	/* Init fields */
	for(i = 0; i < nrad; i++) {
		field_offset[i] = i * sizeof(double);
		field_type[i] = H5T_NATIVE_DOUBLE;
		level_names[i] = Mem_Sprintf("line%lu", (unsigned long)i);
		field_names[i] = level_names[i];
	}

	/* Load data */
	#define TAU(i, j)\
		tau[(j) + nrad * (i)]

	for(i = 0; i < zone->nchildren; i++) {
		pp = zone->children[i]->data;
		for(j = 0; j < nrad; j++) {
			TAU(i, j) = pp->tau[j];
		}
	}

	#undef TAU

	/* Write table */
	hstatus = H5TBmake_table(
		"Level populations",
		h5f_id,
		"TAU",
		(hsize_t)nrad,
		(hsize_t)zone->nchildren,
		record_size,
		field_names,
		field_offset,
		field_type,
		chunk_size,
		fill_data,
		compress,
		tau
	);

	/* Cleanup */
	for(i = 0; i < nrad; i++)
		free(level_names[i]);
	free(level_names);
	free(field_names);
	free(tau);

	if(hstatus < 0)
		status = Err_SETSTRING("Error writing `TAU' table");

	return status;
}
Exemplo n.º 15
0
int SpIO_H5WritePops(hid_t h5f_id, const Zone *zone)
{
	SpPhys *pp = zone->data;

	/* Just in case the programmer did something stupid */
	Deb_ASSERT(pp->mol != NULL);
	Deb_ASSERT(pp->pops[0] != NULL);

	herr_t hstatus = 0;
	int status = 0;
	size_t
		i, j,
		nlev = pp->mol->nlev,
		record_size =  sizeof(double) * nlev,
		field_offset[nlev];
	const char **field_names = Mem_CALLOC(nlev, field_names);
	char **level_names = Mem_CALLOC(nlev, level_names);
	hid_t field_type[nlev];
	hsize_t chunk_size = 10;
	int *fill_data = NULL, compress  = 0;
	double *pops = Mem_CALLOC(zone->nchildren * nlev, pops);

	/* Init fields */
	for(i = 0; i < nlev; i++) {
		field_offset[i] = i * sizeof(double);
		field_type[i] = H5T_NATIVE_DOUBLE;
		level_names[i] = Mem_Sprintf("lev%lu", (unsigned long)i);
		field_names[i] = level_names[i];
	}

	/* Load data */
	#define POPS(i, j)\
		pops[(j) + nlev * (i)]

	for(i = 0; i < zone->nchildren; i++) {
		pp = zone->children[i]->data;
		for(j = 0; j < nlev; j++) {
			POPS(i, j) = pp->pops[0][j];
		}
	}

	#undef POPS

	/* Write table */
	hstatus = H5TBmake_table(
		"Level populations",
		h5f_id,
		"POPS",
		(hsize_t)nlev,
		(hsize_t)zone->nchildren,
		record_size,
		field_names,
		field_offset,
		field_type,
		chunk_size,
		fill_data,
		compress,
		pops
	);

	/* Cleanup */
	for(i = 0; i < nlev; i++)
		free(level_names[i]);
	free(level_names);
	free(field_names);
	free(pops);

	if(hstatus < 0)
		status = Err_SETSTRING("Error writing `POPS' table");

	return status;
}
Exemplo n.º 16
0
int main( void )
{
 typedef struct Particle
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature;
 } Particle;

 /* Define an array of Particles */
 Particle  p_data[NRECORDS] = {
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

 Particle  dst_buf[ 2 * NRECORDS ];
 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};
 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};


 /* Define field information */
 const char *field_names[NFIELDS]  =
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 int        compress  = 0;
 int        *fill_data = NULL;
 herr_t     status;
 hsize_t    nfields_out;
 hsize_t    nrecords_out;
 int        i;

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;

 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_10.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make two tables */
 status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
                         dst_size,field_names, dst_offset, field_type,
                         chunk_size, fill_data, compress, p_data  );

 status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
                         dst_size,field_names, dst_offset, field_type,
                         chunk_size, fill_data, compress, p_data  );

 /* Combine the two tables into a third in the same file  */
 status=H5TBcombine_tables( file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME );

 /* read the combined table */
 status=H5TBread_table( file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* Get table info  */
 status=H5TBget_table_info (file_id,TABLE3_NAME, &nfields_out, &nrecords_out );

 /* print */
 printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);

 /* print it by rows */
 for (i=0; i<nrecords_out; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f",
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }

  /* close type */
 H5Tclose( string_type );

 /* close the file */
 H5Fclose( file_id );

 return 0;

}
Exemplo n.º 17
0
/*-------------------------------------------------------------------------
 * Function: h5tbmake_table_c
 *
 * Purpose: Call H5TBmake_table
 *
 * Return: Success: 0, Failure: -1
 *
 * Programmer: [email protected]
 *
 * Date: October 06, 2004
 *
 * Comments:
 *
 * Modifications:
 *
 *
 *-------------------------------------------------------------------------
 */
int_f
nh5tbmake_table_c(int_f *namelen1,
                  _fcd name1,
                  hid_t_f *loc_id,
                  int_f *namelen,
                  _fcd name,
                  hsize_t_f *nfields,
                  hsize_t_f *nrecords,
                  size_t_f *type_size,
                  size_t_f *field_offset,
                  hid_t_f *field_types,
                  hsize_t_f *chunk_size,
                  int_f *compress,
                  int_f *namelen2,       /* field_names lenghts */
                  _fcd field_names)      /* field_names */
{
 int     ret_value = -1;
 herr_t  ret;
 char    *c_name;
 int     c_namelen;
 char    *c_name1;
 int     c_namelen1;
 hsize_t num_elem;
 hsize_t i;
 int     max_len=1;
 hid_t   c_loc_id     = *loc_id;
 hsize_t c_nfields    = *nfields;
 hsize_t c_nrecords   = *nrecords;
 hsize_t c_chunk_size = *chunk_size;
 size_t  c_type_size  = *type_size;
 size_t  *c_field_offset;
 hid_t   *c_field_types;
 char    **c_field_names;
 char    *tmp, *tmp_p;

 num_elem = *nfields;

 for (i=0; i < num_elem; i++) {
  if (namelen2[i] > max_len) max_len = namelen2[i];
 }

/*
 * Convert FORTRAN name to C name
 */
 c_namelen = *namelen;
 c_name = (char *)HD5f2cstring(name, c_namelen);
 if (c_name == NULL) return ret_value;

 c_namelen1 = *namelen1;
 c_name1 = (char *)HD5f2cstring(name1, c_namelen1);
 if (c_name1 == NULL) return ret_value;

 c_field_offset =  (size_t*)malloc(sizeof(size_t) * (size_t)c_nfields);
 if (!c_field_offset) return ret_value;

 c_field_types =  (hid_t*)malloc(sizeof(hid_t) * (size_t)c_nfields);
 if (!c_field_types) return ret_value;

 for (i=0; i < num_elem; i++) {
  c_field_offset[i] = field_offset[i];
  c_field_types[i]  = field_types[i];
 }

/*
 * Allocate array of character pointers
 */
 c_field_names = (char **)malloc((size_t)num_elem * sizeof(char *));
 if (c_field_names == NULL) return ret_value;

 /* Copy data to long C string */
 tmp = (char *)HD5f2cstring(field_names, (int)(max_len*num_elem));
 if (tmp == NULL) {
  free(c_field_names);
  return ret_value;
 }

/*
 * Move data from temorary buffer
 */
 tmp_p = tmp;
 for (i=0; i < num_elem; i++) {
  c_field_names[i] = (char *) malloc((size_t)namelen2[i]+1);
  memcpy(c_field_names[i], tmp_p, (size_t)namelen2[i]);
  c_field_names[i][namelen2[i]] = '\0';
  tmp_p = tmp_p + max_len;
 }

/*
 * Call H5TBmake_table function.
 */

 ret = H5TBmake_table(c_name1,c_loc_id,c_name,c_nfields,c_nrecords,c_type_size,
   c_field_names,c_field_offset,c_field_types,c_chunk_size,NULL,*compress,NULL);

 for (i=0; i < num_elem; i++) {
  free (c_field_names[i]);
 }
 free(c_field_names);
 free(tmp);
 free(c_field_offset);
 free(c_field_types);

 if (ret < 0) return ret_value;
 ret_value = 0;
 return ret_value;
}
Exemplo n.º 18
0
int main( void )
{
 typedef struct Particle 
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature; 
 } Particle;

 /* Define a subset of Particle, with latitude and longitude fields */
 typedef struct Position 
 {
  int    lati;
  int    longi;
 } Position;

 /* Define a subset of Particle, with name and pressure fields */
 typedef struct NamePressure 
 {
  char   name[16];
  float  pressure;
 } NamePressure;
 
 /* Calculate the type_size and the offsets of our struct members */
 Particle  dst_buf[NRECORDS];
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};
 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};

 size_t field_offset_pos[2] = { HOFFSET( Position, lati ),
                                HOFFSET( Position, longi )};
 
 /* Initially no data */
 Particle  *p_data = NULL;

 /* Define field information */
 const char *field_names[NFIELDS]  = 
 { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
  Particle   fill_data[1] = 
 { {"no data",-1,-1, -99.0f, -99.0} };   /* Fill value particle */ 
 int        compress  = 0;
 hsize_t    nfields;
 hsize_t    start;                       /* Record to start reading/writing */
 hsize_t    nrecords;                    /* Number of records to read/write */
 herr_t     status; 
 int        i;

 /* Define new values for the field "Pressure"  */
 float      pressure_in  [NRECORDS_ADD] =
 { 0.0f,1.0f,2.0f};
 int        field_index_pre[1]     = { 3 };
 int        field_index_pos[2]     = { 1,2 };

 /* Define new values for the fields "Latitude,Longitude"  */
 Position   position_in[NRECORDS_ADD] = { {0,0},
 {10,10},
 {20,20} };

 size_t field_sizes_pos[2]=
 {
  sizeof(position_in[0].longi),
  sizeof(position_in[0].lati)
 };
 
 size_t field_sizes_pre[1]=
 { 
  sizeof(float)
 };

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;
 
 /* Create a new file using default properties. */
 file_id = H5Fcreate( "ex_table_05.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make the table */
 status=H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS, 
                         dst_size,field_names, dst_offset, field_type, 
                         chunk_size, fill_data, compress, p_data  );

 /* Write the pressure field starting at record 2 */
 nfields  = 1;
 start    = 2;      
 nrecords = NRECORDS_ADD; 
 status=H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pre, start, nrecords, 
   sizeof( float ), 0, field_sizes_pre, pressure_in  );

 /* Write the new longitude and latitude information starting at record 2  */
 nfields  = 2;
 start    = 2;      
 nrecords = NRECORDS_ADD; 
 status=H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pos, start, nrecords, 
   sizeof( Position ), field_offset_pos, field_sizes_pos, position_in  );

 /* read the table */
 status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );

 /* print it by rows */
 for (i=0; i<NRECORDS; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f", 
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }
 
 /* close type */
 H5Tclose( string_type );
 
 /* close the file */
 H5Fclose( file_id );

 return 0;

}
Exemplo n.º 19
0
int main( void )
{

 Particle  dst_buf[NRECORDS];

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

 Particle  p = {"zero",0,0, 0.0f, 0.0};
 size_t dst_sizes[NFIELDS] = { sizeof( p.name),
                               sizeof( p.lati),
                               sizeof( p.longi),
                               sizeof( p.pressure),
                               sizeof( p.temperature)};

 /* Fill value particle */
 Particle   fill_data[1] =
	{ {"no data",-1,-1, -99.0f, -99.0} };
 hid_t      field_type[NFIELDS];
 hid_t      string_type;
 hid_t      file_id;
 hsize_t    chunk_size = 10;
 hsize_t    start;      /* Record to start reading/writing */
 hsize_t    nrecords;   /* Number of records to read/write */
 herr_t     status;
 int        i;

 /* Define 2 new particles to write */
 Particle  particle_in[NRECORDS_WRITE] =
 { {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0} };

 /* Initialize the field field_type */
 string_type = H5Tcopy( H5T_C_S1 );
 H5Tset_size( string_type, 16 );
 field_type[0] = string_type;
 field_type[1] = H5T_NATIVE_INT;
 field_type[2] = H5T_NATIVE_INT;
 field_type[3] = H5T_NATIVE_FLOAT;
 field_type[4] = H5T_NATIVE_DOUBLE;

/* Create a new file using default properties. */
 file_id = H5Fcreate( "h5_table_03.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

 /* Make the table */
 status=H5TBmake_table( "Table Title",
  file_id,
  TABLE_NAME,
  NFIELDS,
  NRECORDS,
  dst_size,
  field_names,
  dst_offset,
  field_type,
  chunk_size,
  fill_data,
  0,           /* no compression */
  NULL );      /* no data written */


 /* Overwrite 2 records starting at record 0 */
 start    = 0;
 nrecords = NRECORDS_WRITE;
 status   = H5TBwrite_records (file_id,
			       TABLE_NAME,
			       start,
			       nrecords,
			       dst_size, 
			       dst_offset,
			       dst_sizes,
			       particle_in);

 /* read the table */
 status = H5TBread_table (file_id,
			  TABLE_NAME,
			  dst_size,
			  dst_offset,
			  dst_sizes,
			  dst_buf);

 /* print it by rows */
 for (i=0; i<NRECORDS; i++) {
  printf ("%-5s %-5d %-5d %-5f %-5f",
   dst_buf[i].name,
   dst_buf[i].lati,
   dst_buf[i].longi,
   dst_buf[i].pressure,
   dst_buf[i].temperature);
  printf ("\n");
 }

  /* close type */
 H5Tclose( string_type );
 
 /* close the file */
 H5Fclose( file_id );

 return 0;

}
Exemplo n.º 20
0
int main( void )
{
  Particle dst_buf[NRECORDS];
  /* Calculate the size and the offsets of our struct members in memory */
  size_t dst_size =  sizeof( Particle );
  size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
				 HOFFSET( Particle, lati ),
				 HOFFSET( Particle, longi ),
				 HOFFSET( Particle, pressure ),
				 HOFFSET( Particle, temperature )};
  size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
				sizeof( dst_buf[0].lati),
				sizeof( dst_buf[0].longi),
				sizeof( dst_buf[0].pressure),
				sizeof( dst_buf[0].temperature)};
  size_t field_offset_pos[2] = { HOFFSET( Position, lati ),
				 HOFFSET( Position, longi )};
  hid_t      field_type[NFIELDS];
  hid_t      string_type;
  hid_t      fileID;
  hsize_t    chunk_size = 10;
  Particle   fill_data[1] =
    { {"no data",-1,-1, -99.0f, -99.0} };  /* Fill value particle */
  hsize_t    start;                        /* Record to start reading/writing */
  hsize_t    nrecords;                     /* Number of records to read/write */
  int        compress  = 0;
  int        n;
  herr_t     status;
  Particle  *p_data = NULL;               /* Initially no data */
  float      pressure_in [NRECORDS_ADD] = /* Define new values for the field "Pressure"  */
    { 0.0f,1.0f,2.0f};
  Position   position_in[NRECORDS_ADD] = {/* Define new values for "Latitude,Longitude"  */
    {0,0},
    {10,10},
    {20,20}};
  NamePressure   namepre_in[NRECORDS_ADD] =/* Define new values for "Name,Pressure"  */
    { {"zero",0.0f},
      {"one",   1.0f},
      {"two",   2.0f},
    };
  size_t field_sizes_pos[2]=
    {
      sizeof(position_in[0].longi),
      sizeof(position_in[0].lati)
    };
  size_t field_sizes_pre[1]=
    {
      sizeof(namepre_in[0].pressure)
    };
  
  /* Initialize the field field_type */
  string_type = H5Tcopy( H5T_C_S1 );
  H5Tset_size( string_type, 16 );
  field_type[0] = string_type;
  field_type[1] = H5T_NATIVE_INT;
  field_type[2] = H5T_NATIVE_INT;
  field_type[3] = H5T_NATIVE_FLOAT;
  field_type[4] = H5T_NATIVE_DOUBLE;
  
  /*__________________________________________________________________
    Create a new file using default properties.
  */

  fileID = H5Fcreate ("h5_table_04.h5",
		       H5F_ACC_TRUNC,
		       H5P_DEFAULT,
		       H5P_DEFAULT);
  
  /*__________________________________________________________________
    Make the table
  */

  status = H5TBmake_table ("Table Title",
			   fileID,
			   TABLE_NAME,
			   NFIELDS,
			   NRECORDS,
			   dst_size,
			   field_names,
			   dst_offset,
			   field_type,
			   chunk_size,
			   fill_data,
			   compress,
			   p_data);

  /*__________________________________________________________________
    Write the pressure field starting at record 2.
  */

  start    = 2;
  nrecords = NRECORDS_ADD;
  status   = H5TBwrite_fields_name (fileID,
				    TABLE_NAME,
				    "Pressure",
				    start,
				    nrecords,
				    sizeof( float ),
				    0,
				    field_sizes_pre,
				    pressure_in);
  
  /*__________________________________________________________________
    Write the new longitude and latitude information starting at
    record 2.
  */
  start    = 2;
  nrecords = NRECORDS_ADD;
  status = H5TBwrite_fields_name (fileID,
				  TABLE_NAME,
				  "Latitude,Longitude",
				  start,
				  nrecords,
				  sizeof( Position ),
				  field_offset_pos,
				  field_sizes_pos,
				  position_in);
  
  /*__________________________________________________________________
    Read the table
  */

  status = H5TBread_table (fileID,
			   TABLE_NAME,
			   dst_size,
			   dst_offset,
			   dst_sizes,
			   dst_buf);
  
  /* print it by rows */
  for (n=0; n<NRECORDS; n++) {
    printf ("%-5s %-5d %-5d %-5f %-5f",
	    dst_buf[n].name,
	    dst_buf[n].lati,
	    dst_buf[n].longi,
	    dst_buf[n].pressure,
	    dst_buf[n].temperature);
    printf ("\n");
  }
  
  /*-------------------------------------------------------------------------
   * end
   *-------------------------------------------------------------------------
   */
  
  /* close type */
  H5Tclose( string_type );
  
  /* close the file */
  H5Fclose (fileID);
  
  return 0;
  
  
}
Exemplo n.º 21
0
bool stfio::exportHDF5File(const std::string& fName, const Recording& WData, ProgressInfo& progDlg) {
    
    hid_t file_id = H5Fcreate(fName.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    
    const int NRECORDS = 1;
    const int NFIELDS = 3;

    /* Calculate the size and the offsets of our struct members in memory */
    size_t rt_offset[NFIELDS] = {  HOFFSET( rt, channels ),
                                   HOFFSET( rt, date ),
                                   HOFFSET( rt, time )};

    /* Define an array of root tables */
    rt p_data;
    p_data.channels = WData.size();
    struct tm t = WData.GetDateTime();
    std::size_t date_length = snprintf(p_data.date, DATELEN, "%04i-%02i-%02i", t.tm_year+1900, t.tm_mon+1, t.tm_mday);
    std::size_t time_length = snprintf(p_data.time, TIMELEN, "%02i:%02i:%02i", t.tm_hour, t.tm_min, t.tm_sec);
    // ensure that an undefine string is set to "\0", and that the terminating \0 is counted in string length
    p_data.date[date_length++] = 0;
    p_data.time[time_length++] = 0;

    /* Define field information */
    const char *field_names[NFIELDS]  =  { "channels", "date", "time" };
    hid_t      field_type[NFIELDS];

    /* Initialize the field field_type */
    hid_t string_type1 = H5Tcopy( H5T_C_S1 );
    hid_t string_type2 = H5Tcopy( H5T_C_S1 );
    H5Tset_size( string_type1,  date_length);
    H5Tset_size( string_type2,  time_length);
    field_type[0] = H5T_NATIVE_INT;
    field_type[1] = string_type1;
    field_type[2] = string_type2;
    
    std::ostringstream desc;
    desc << "Description of " << fName;
    
    herr_t status = H5TBmake_table( desc.str().c_str(), file_id, "description", (hsize_t)NFIELDS, (hsize_t)NRECORDS, sizeof(rt),
                                    field_names, rt_offset, field_type, 10, NULL, 0, &p_data  );

    if (status < 0) {
        std::string errorMsg("Exception while writing description in stfio::exportHDF5File");
        H5Fclose(file_id);
        H5close();
        throw std::runtime_error(errorMsg);
    }

    hid_t comment_group = H5Gcreate2( file_id,"/comment", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    /* File comment. */
    std::string description(WData.GetFileDescription());
    if (description.length() <= 0) {
        description = "No description";
    }

    status = H5LTmake_dataset_string(file_id, "/comment/description", description.c_str());
    if (status < 0) {
        std::string errorMsg("Exception while writing description in stfio::exportHDF5File");
        H5Fclose(file_id);
        H5close();
        throw std::runtime_error(errorMsg);
    }

    std::string comment(WData.GetComment());
    if (comment.length() <= 0) {
        comment = "No comment";
    }

    status = H5LTmake_dataset_string(file_id, "/comment/comment", comment.c_str());
    if (status < 0) {
        std::string errorMsg("Exception while writing comment in stfio::exportHDF5File");
        H5Fclose(file_id);
        H5close();
        throw std::runtime_error(errorMsg);
    }
    H5Gclose(comment_group);

    std::vector<std::string> channel_name(WData.size());

    hid_t channels_group = H5Gcreate2( file_id,"/channels", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    for ( std::size_t n_c=0; n_c < WData.size(); ++n_c) {
        /* Channel descriptions. */
        std::ostringstream ossname;
        ossname << WData[n_c].GetChannelName();
        if ( ossname.str() == "" ) {
            ossname << "ch" << (n_c);
        }
        channel_name[n_c] = ossname.str();
        hsize_t dimsc[1] = { 1 };
        hid_t string_typec = H5Tcopy( H5T_C_S1 );
        std::size_t cn_length = channel_name[n_c].length();
        if (cn_length <= 0) cn_length = 1;
        H5Tset_size( string_typec, cn_length );

        std::vector<char> datac(channel_name[n_c].length());
        std::copy(channel_name[n_c].begin(),channel_name[n_c].end(), datac.begin());
        std::ostringstream desc_path; desc_path << "/channels/ch" << (n_c);
        status = H5LTmake_dataset(file_id, desc_path.str().c_str(), 1, dimsc, string_typec, &datac[0]);
        if (status < 0) {
            std::string errorMsg("Exception while writing channel name in stfio::exportHDF5File");
            H5Fclose(file_id);
            H5close();
            throw std::runtime_error(errorMsg);
        }

        std::ostringstream channel_path; channel_path << "/" << channel_name[n_c];
        hid_t channel_group = H5Gcreate2( file_id, channel_path.str().c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        if (channel_group < 0) {
            std::ostringstream errorMsg;
            errorMsg << "Exception while creating channel group for "
                     << channel_path.str().c_str();
            H5Fclose(file_id);
            H5close();
            throw std::runtime_error(errorMsg.str());
        }

        /* Calculate the size and the offsets of our struct members in memory */
        size_t ct_size =  sizeof( ct );
        size_t ct_offset[1] = { HOFFSET( rt, channels ) };
        /* Define an array of channel tables */
        ct c_data = { (int)WData[n_c].size() };

        /* Define field information */
        const char *cfield_names[1]  =  { "n_sections" };
        hid_t      cfield_type[1] = {H5T_NATIVE_INT};
        std::ostringstream c_desc;
        c_desc << "Description of channel " << n_c;
        status = H5TBmake_table( c_desc.str().c_str(), channel_group, "description", (hsize_t)1, (hsize_t)1, ct_size,
                                 cfield_names, ct_offset, cfield_type, 10, NULL, 0, &c_data  );
        if (status < 0) {
            std::string errorMsg("Exception while writing channel description in stfio::exportHDF5File");
            H5Fclose(file_id);
            H5close();
            throw std::runtime_error(errorMsg);
        }

        int max_log10 = 0;
        if (WData[n_c].size() > 1) {
            max_log10 = int(log10((double)WData[n_c].size()-1.0));
        }

        for (std::size_t n_s=0; n_s < WData[n_c].size(); ++n_s) {
            int progbar = 
                // Channel contribution:
                (int)(((double)n_c/(double)WData.size())*100.0+
                      // Section contribution:
                      (double)(n_s)/(double)WData[n_c].size()*(100.0/WData.size()));
            std::ostringstream progStr;
            progStr << "Writing channel #" << n_c + 1 << " of " << WData.size()
                    << ", Section #" << n_s << " of " << WData[n_c].size();
            progDlg.Update(progbar, progStr.str());
            
            // construct a number with leading zeros:
            int n10 = 0;
            if (n_s > 0) {
                n10 = int(log10((double)n_s));
            }
            std::ostringstream strZero; strZero << "";
            for (int n_z=n10; n_z < max_log10; ++n_z) {
                strZero << "0";
            }

            // construct a section name:
            std::ostringstream section_name; section_name << WData[n_c][n_s].GetSectionDescription();
            if ( section_name.str() == "" ) {
                section_name << "sec" << n_s;
            }

            // create a child group in the channel:
            std::ostringstream section_path;
            section_path << channel_path.str() << "/" << "section_" << strZero.str() << n_s;
            hid_t section_group = H5Gcreate2( file_id, section_path.str().c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

            // add data and description, store as 32 bit little endian independent of machine:
            hsize_t dims[1] = { WData[n_c][n_s].size() };
            std::ostringstream data_path;
            data_path << section_path.str() << "/data";
            Vector_float data_cp(WData[n_c][n_s].get().size()); /* 32 bit */
            for (std::size_t n_cp = 0; n_cp < WData[n_c][n_s].get().size(); ++n_cp) {
                data_cp[n_cp] = float(WData[n_c][n_s][n_cp]);
            }
            status = H5LTmake_dataset(file_id, data_path.str().c_str(), 1, dims, H5T_IEEE_F32LE, &data_cp[0]);
            if (status < 0) {
                std::string errorMsg("Exception while writing data in stfio::exportHDF5File");
                H5Fclose(file_id);
                H5close();
                throw std::runtime_error(errorMsg);
            }

            const int NSRECORDS = 1;
            const int NSFIELDS = 3;

            /* Calculate the size and the offsets of our struct members in memory */
            size_t st_size =  sizeof( st );
            size_t st_offset[NSFIELDS] = {  HOFFSET( st, dt ),
                                            HOFFSET( st, xunits ),
                                            HOFFSET( st, yunits )};

            /* Define an array of root tables */
            st s_data;
            s_data.dt = WData.GetXScale();
            if (WData.GetXUnits().length() < UNITLEN)
                strcpy( s_data.xunits, WData.GetXUnits().c_str() );
            if (WData[n_c].GetYUnits().length() < UNITLEN)
                strcpy( s_data.yunits, WData[n_c].GetYUnits().c_str() );

            /* Define field information */
            const char *sfield_names[NSFIELDS]  =  { "dt", "xunits", "yunits" };
            hid_t      sfield_type[NSFIELDS];

            /* Initialize the field field_type */
            hid_t string_type4 = H5Tcopy( H5T_C_S1 );
            hid_t string_type5 = H5Tcopy( H5T_C_S1 );
            H5Tset_size( string_type4,  2);
            std::size_t yu_length = WData[n_c].GetYUnits().length();
            if (yu_length <= 0) yu_length = 1;

            H5Tset_size( string_type5, yu_length );
            sfield_type[0] = H5T_NATIVE_DOUBLE;
            sfield_type[1] = string_type4;
            sfield_type[2] = string_type5;

            std::ostringstream sdesc;
            sdesc << "Description of " << section_name.str();
            status = H5TBmake_table( sdesc.str().c_str(), section_group, "description", (hsize_t)NSFIELDS, (hsize_t)NSRECORDS, st_size,
                                     sfield_names, st_offset, sfield_type, 10, NULL, 0, &s_data  );
            if (status < 0) {
                std::string errorMsg("Exception while writing section description in stfio::exportHDF5File");
                H5Fclose(file_id);
                H5close();
                throw std::runtime_error(errorMsg);
            }
            H5Gclose(section_group);
        }
        H5Gclose(channel_group);
    }
    H5Gclose(channels_group);

    /* Terminate access to the file. */
    status = H5Fclose(file_id);
    if (status < 0) {
        std::string errorMsg("Exception while closing file in stfio::exportHDF5File");
        throw std::runtime_error(errorMsg);
    }

    /* Release all hdf5 resources */
    status = H5close();
    if (status < 0) {
        std::string errorMsg("Exception while closing file in stfio::exportHDF5File");
        throw std::runtime_error(errorMsg);
    }
    
    return (status >= 0);
}
Exemplo n.º 22
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
void SCIA_LV1_WR_H5_SCPN( struct param_record param, unsigned int nr_scpn,
			  const struct scpn_scia *scpn )
{
     hid_t   ads_id;
     hsize_t adim;
     herr_t  stat;

     hid_t   scpn_type[NFIELDS];

     const hbool_t compress = (param.flag_deflate == PARAM_SET) ? TRUE : FALSE;
     const size_t scpn_size = sizeof( struct scpn_scia );
     const char *scpn_names[NFIELDS] = { 
	  "mjd", "flag_mds", "orbit_phase", "srs_param", "num_lines", 
	  "wv_error_calib", "sol_spec", "line_pos", "coeffs"
     };
     const size_t scpn_offs[NFIELDS] = {
	  HOFFSET( struct scpn_scia, mjd ),
	  HOFFSET( struct scpn_scia, flag_mds ),
	  HOFFSET( struct scpn_scia, orbit_phase ),
	  HOFFSET( struct scpn_scia, srs_param ),
	  HOFFSET( struct scpn_scia, num_lines ),
	  HOFFSET( struct scpn_scia, wv_error_calib ),
	  HOFFSET( struct scpn_scia, sol_spec ),
	  HOFFSET( struct scpn_scia, line_pos ),
	  HOFFSET( struct scpn_scia, coeffs )
     };
/*
 * check number of PMD records
 */
     if ( nr_scpn == 0 ) return;
/*
 * open/create group /ADS
 */
     ads_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/ADS" );
     if ( ads_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/ADS" );
/*
 * write SCPN data sets
 */
     adim = SCIENCE_PIXELS;
     scpn_type[0] = H5Topen( param.hdf_file_id, "mjd", H5P_DEFAULT );
     scpn_type[1] = H5Tcopy( H5T_NATIVE_UCHAR );
     scpn_type[2] = H5Tcopy( H5T_NATIVE_FLOAT );
     adim = SCIENCE_CHANNELS;
     scpn_type[3] = H5Tarray_create( H5T_NATIVE_UCHAR, 1, &adim );
     scpn_type[4] = H5Tarray_create( H5T_NATIVE_USHORT, 1, &adim );
     scpn_type[5] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = SCIENCE_PIXELS;
     scpn_type[6] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = 3 * SCIENCE_CHANNELS;
     scpn_type[7] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = NUM_SPEC_COEFFS * SCIENCE_CHANNELS;
     scpn_type[8] = H5Tarray_create( H5T_NATIVE_DOUBLE, 1, &adim );

     stat = H5TBmake_table( "scpn", ads_id, "NEW_SPECTRAL_CALIBRATION", 
                            NFIELDS, 1, scpn_size, scpn_names,
                            scpn_offs, scpn_type, 1,
                            NULL, compress, scpn );
     if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, "scpn" );
/*
 * close interface
 */
 done:
     (void) H5Tclose( scpn_type[0] );
     (void) H5Tclose( scpn_type[1] );
     (void) H5Tclose( scpn_type[2] );
     (void) H5Tclose( scpn_type[3] );
     (void) H5Tclose( scpn_type[4] );
     (void) H5Tclose( scpn_type[5] );
     (void) H5Tclose( scpn_type[6] );
     (void) H5Tclose( scpn_type[7] );
     (void) H5Tclose( scpn_type[8] );
     (void) H5Gclose( ads_id );
}
int ias_rlut_write_linearization_params
(
    const IAS_RLUT_IO *rlut_file,    /* I: Open RLUT file */
    const IAS_RLUT_LINEARIZATION_PARAMS *linearization_params,
                                     /* I: Pointer to an array of data
                                        structures containing the
                                        linearization parameters for all
                                        detectors in the current band/SCA */
    int band_number,                 /* I: Current band number */
    int sca_number,                  /* I: Current SCA number*/
    int num_detectors                /* I: Number of detectors in the
                                        current band/SCA */
)
{
    char bandsca_parameter_name[IAS_RLUT_BANDSCA_GROUP_NAME_LENGTH + 1];
                                           /* Linearization parameter group
                                              name for the current band/SCA */
    const char *field_names[IAS_RLUT_PARAM_NFIELDS];
                                           /* Name of each linearization
                                              parameter */
    size_t offsets[IAS_RLUT_PARAM_NFIELDS];/* Data offsets in
                                              LINEARIZATION_PARAMS
                                              data structure for each
                                              field*/
    size_t field_sizes[IAS_RLUT_PARAM_NFIELDS];
                                           /* Size of each field */
    hid_t field_types[IAS_RLUT_PARAM_NFIELDS];
                                           /* Data type for each field */
    hid_t fields_to_close[IAS_RLUT_PARAM_NFIELDS];
                                           /* Flags indicating open
                                              fields needing to be closed */
    hid_t linearization_param_group_id;    /* Root LINEARIZATION_PARAMETERS
                                              group ID */
    hid_t bandsca_group_id;                /* SCA group ID */
    hsize_t type_size;                     /* Size of base
                                              LINEARIZATION_PARAMS
                                              data structure */
    herr_t hdf_status;                     /* HDF5 error status flag */
    int status;                            /* IAS status flags */
    int return_status = SUCCESS;


    /* Make sure the RLUT file is actually open */
    if ((rlut_file == NULL) || (rlut_file->file_id < 0))
    {
        IAS_LOG_ERROR("NULL pointer to IAS_RLUT_IO data block, or no RLUT "
            "file has been opened");
        return ERROR;
    }

    /* Construct the group name for the current band/SCA */
    status = snprintf(bandsca_parameter_name, sizeof(bandsca_parameter_name),
        "%s/Band%02d_SCA%02d", LINEARIZATION_PARAMS_GROUP_NAME, band_number,
        sca_number);
    if ((status < 0) || (status >= sizeof(bandsca_parameter_name)))
    {
        IAS_LOG_ERROR("Creating group name for band %d SCA %d "
            "linearization parameters", band_number, sca_number);
        return ERROR;
    }

    /* Open the root group */
    linearization_param_group_id = H5Gopen(rlut_file->file_id,
            LINEARIZATION_PARAMS_GROUP_NAME, H5P_DEFAULT);
    if (linearization_param_group_id < 0)
    {
        IAS_LOG_ERROR("Opening root linearization parameters group");
        return ERROR;
    }

    /* Create a new group for the current band/SCA within this group */
    bandsca_group_id = H5Gcreate(linearization_param_group_id,
       bandsca_parameter_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    if (bandsca_group_id < 0)
    {
        IAS_LOG_ERROR("Creating group for band %d SCA %d linearization "
            "parameters", band_number, sca_number);
        H5Gclose(linearization_param_group_id);
        return ERROR;
    }

    /* Build the table definition */
    status = ias_rlut_build_linearization_params_table_description(offsets,
        field_names, field_types, fields_to_close, field_sizes);
    if (status != SUCCESS)
    {
        IAS_LOG_ERROR("Building linearization parameter table description");
        H5Gclose(bandsca_group_id);
        H5Gclose(linearization_param_group_id);
        return ERROR;
    }

    /* Get the size of the data structure */
    type_size = sizeof(*linearization_params);

    /* Write the parameter set for the current band/SCA */
    hdf_status = H5TBmake_table(LINEARIZATION_PARAMS_TABLE_NAME,
        bandsca_group_id, LINEARIZATION_PARAMS_DATASET_NAME,
        IAS_RLUT_PARAM_NFIELDS, num_detectors, type_size, field_names,
        offsets, field_types, sizeof(IAS_RLUT_LINEARIZATION_PARAMS), NULL, 0, 
        linearization_params);

    /* Cleanup the table description */
    ias_rlut_cleanup_table_description(fields_to_close,
        IAS_RLUT_PARAM_NFIELDS);

    /* Check the return status from the write */
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Writing band %d SCA %d linearization parameter "
            "table to RLUT file %s", band_number, sca_number,
            rlut_file->filename);
        return_status = ERROR;
    }

    /* Close the local SCA group */
    hdf_status = H5Gclose(bandsca_group_id);
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Closing band %d SCA %d linearization parameter "
            "group", band_number, sca_number);
        return_status = ERROR;
    }

    /* Close the main linearization parameter group */
    hdf_status = H5Gclose(linearization_param_group_id);
    if (hdf_status < 0)
    {
        IAS_LOG_ERROR("Closing root LINEARIZATION_PARAMETERS group");
        return_status = ERROR;
    }

    return return_status;
}   /* END ias_rlut_write_linearization_params */ 
Exemplo n.º 24
0
/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/
void SCIA_LV1_WR_H5_VLCP( struct param_record param, unsigned int nr_vlcp,
			  const struct vlcp_scia *vlcp )
{
     hid_t   gads_id;
     hsize_t adim;
     herr_t  stat;

     hid_t   vlcp_type[NFIELDS];

     const hbool_t compress = (param.flag_deflate == PARAM_SET) ? TRUE : FALSE;

     const char *vlcp_names[NFIELDS] = { 
	  "orbit_phase", "obm_pmd", "var_lc", "var_lc_error", "solar_stray", 
	  "solar_stray_error", "pmd_stray", "pmd_stray_error", "pmd_dark", 
	  "pmd_dark_error"
     };
     const size_t vlcp_size = sizeof( struct vlcp_scia );
     const size_t vlcp_offs[NFIELDS] = {
	  HOFFSET( struct vlcp_scia, orbit_phase ),
	  HOFFSET( struct vlcp_scia, obm_pmd ),
	  HOFFSET( struct vlcp_scia, var_lc ),
	  HOFFSET( struct vlcp_scia, var_lc_error ),
	  HOFFSET( struct vlcp_scia, solar_stray ),
	  HOFFSET( struct vlcp_scia, solar_stray_error ),
	  HOFFSET( struct vlcp_scia, pmd_stray ),
	  HOFFSET( struct vlcp_scia, pmd_stray_error ),
	  HOFFSET( struct vlcp_scia, pmd_dark ),
	  HOFFSET( struct vlcp_scia, pmd_dark_error )
     };
/*
 * check number of RSPO records
 */
     if ( nr_vlcp == 0 ) return;
/*
 * open/create group /GADS
 */
     gads_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/GADS" );
     if ( gads_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/GADS" );
/*
 * write VLCP data sets
 */
     vlcp_type[0] = H5Tcopy( H5T_NATIVE_FLOAT );
     adim = IR_CHANNELS + PMD_NUMBER;
     vlcp_type[1] = H5Tcopy( H5T_NATIVE_FLOAT );
     adim = IR_CHANNELS * CHANNEL_SIZE;
     vlcp_type[2] = H5Tcopy( H5T_NATIVE_FLOAT );
     vlcp_type[3] = H5Tcopy( H5T_NATIVE_FLOAT );
     adim = SCIENCE_PIXELS;
     vlcp_type[4] = H5Tcopy( H5T_NATIVE_FLOAT );
     vlcp_type[5] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = PMD_NUMBER;
     vlcp_type[6] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     vlcp_type[7] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     adim = IR_PMD_NUMBER;
     vlcp_type[8] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );
     vlcp_type[9] = H5Tarray_create( H5T_NATIVE_FLOAT, 1, &adim );

     stat = H5TBmake_table( "vlcp", gads_id, "LEAKAGE_VARIABLE",
                            NFIELDS, nr_vlcp, vlcp_size, vlcp_names,
                            vlcp_offs, vlcp_type, 1,
                            NULL, compress, vlcp );
     if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, "vlcp" );
/*
 * close interface
 */
 done:
     (void) H5Tclose( vlcp_type[0] );
     (void) H5Tclose( vlcp_type[1] );
     (void) H5Tclose( vlcp_type[2] );
     (void) H5Tclose( vlcp_type[3] );
     (void) H5Tclose( vlcp_type[4] );
     (void) H5Tclose( vlcp_type[5] );
     (void) H5Tclose( vlcp_type[6] );
     (void) H5Tclose( vlcp_type[7] );
     (void) H5Tclose( vlcp_type[8] );
     (void) H5Tclose( vlcp_type[9] );
     (void) H5Gclose( gads_id );
}
Exemplo n.º 25
0
int main(int argc, char *argv[])
{
    (void)argc;
    (void)argv;

typedef struct rt {
    int channels;
    char date[DATELEN];
    char time[TIMELEN];
} rt;

//    H5Fis_hdf5("/dev/null");

/*
* Create a new file using H5ACC_TRUNC access,
* default file creation properties, and default file
* access properties.
* Then close the file.
*/

    const int NRECORDS = 1;
    const int NFIELDS = 3;
    char fName[] = "tmp.h5";

    /* Calculate the size and the offsets of our struct members in memory */
    size_t rt_offset[NFIELDS] = {  HOFFSET( rt, channels ),
                                   HOFFSET( rt, date ),
                                   HOFFSET( rt, time )};

    rt p_data;
    p_data.channels = 1;
    strcpy( p_data.date, "1234-Dec-31");
    strcpy( p_data.time, "12:34:56");


    hid_t file_id = H5Fcreate(fName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);


    /* Define field information */
    const char *field_names[NFIELDS]  =  { "channels", "date", "time" };
    hid_t      field_type[NFIELDS];

    /* Initialize the field field_type */
    hid_t string_type1 = H5Tcopy( H5T_C_S1 );
    hid_t string_type2 = H5Tcopy( H5T_C_S1 );
    H5Tset_size( string_type1,  strlen(p_data.date));
    H5Tset_size( string_type2,  strlen(p_data.time));
    field_type[0] = H5T_NATIVE_INT;
    field_type[1] = string_type1;
    field_type[2] = string_type2;

    std::ostringstream desc;
    desc << "Description of " << fName;

    herr_t status = H5TBmake_table( desc.str().c_str(), file_id, "description", (hsize_t)NFIELDS, (hsize_t)NRECORDS, sizeof(rt),
                                    field_names, rt_offset, field_type, 10, NULL, 0, &p_data  );

    if (status < 0) {
        perror("Exception while writing description in stfio::exportHDF5File");
        H5Fclose(file_id);
        H5close();
        exit(-1);
    }

    H5Fclose(file_id);

    return(0);
}
Exemplo n.º 26
0
int main( void )
{
 typedef struct Particle
 {
  char   name[16];
  int    lati;
  int    longi;
  float  pressure;
  double temperature;
 } Particle;

 Particle  dst_buf[NRECORDS];

 /* Calculate the size and the offsets of our struct members in memory */
 size_t dst_size =  sizeof( Particle );
 size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
                                HOFFSET( Particle, lati ),
                                HOFFSET( Particle, longi ),
                                HOFFSET( Particle, pressure ),
                                HOFFSET( Particle, temperature )};

 size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
                               sizeof( dst_buf[0].lati),
                               sizeof( dst_buf[0].longi),
                               sizeof( dst_buf[0].pressure),
                               sizeof( dst_buf[0].temperature)};


 /* Define an array of Particles */
 Particle  p_data[NRECORDS] = {
 {"zero",0,0, 0.0f, 0.0},
 {"one",10,10, 1.0f, 10.0},
 {"two",  20,20, 2.0f, 20.0},
 {"three",30,30, 3.0f, 30.0},
 {"four", 40,40, 4.0f, 40.0},
 {"five", 50,50, 5.0f, 50.0},
 {"six",  60,60, 6.0f, 60.0},
 {"seven",70,70, 7.0f, 70.0}
  };

  /* Define field information */
  const char *field_names[NFIELDS]  =
  { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
  hid_t      field_type[NFIELDS];
  hid_t      string_type;
  hid_t      file_id;
  hsize_t    chunk_size = 10;
  int        *fill_data = NULL;
  int        compress  = 0;
  herr_t     status;
  int        i;

  /* Initialize field_type */
  string_type = H5Tcopy( H5T_C_S1 );
  H5Tset_size( string_type, 16 );
  field_type[0] = string_type;
  field_type[1] = H5T_NATIVE_INT;
  field_type[2] = H5T_NATIVE_INT;
  field_type[3] = H5T_NATIVE_FLOAT;
  field_type[4] = H5T_NATIVE_DOUBLE;

  /* Create a new file using default properties. */
  file_id = H5Fcreate( "h5_table_01.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
  
  /*-------------------------------------------------------------------------
   * H5TBmake_table
   *-------------------------------------------------------------------------
   */
  
  status=H5TBmake_table ("Table Title",   /* Title of the table                  */
			 file_id,         /* HDF5 object identifier to the file  */
			 TABLE_NAME,      /* Name of the table                   */
			 NFIELDS,         /* Number of fields/columns            */
			 NRECORDS,        /*  */
			 dst_size,        /* Size of the struct/table row        */
			 field_names,     /*  */
			 dst_offset,      /*  */
			 field_type,      /* Type of the individual table fields */
                         chunk_size,      /* Chunking size                       */
			 fill_data,       /*  */
			 compress,        /* Enable/disable compression          */
			 p_data           /* Array with the table data           */
			 );
  
  /*-------------------------------------------------------------------------
   * H5TBread_table
   *-------------------------------------------------------------------------
   */
  
  status = H5TBread_table (file_id,      /* HDF5 object identifier     */
			   TABLE_NAME,   /* Name of the table          */
			   dst_size,     /*  */
			   dst_offset,   /*  */
			   dst_sizes,    /*  */
			   dst_buf);     /* Buffer to return the data  */
  
  /* print it by rows */
  for (i=0; i<NRECORDS; i++) {
    printf ("%-5s %-5d %-5d %-5f %-5f",
	    dst_buf[i].name,
	    dst_buf[i].lati,
	    dst_buf[i].longi,
	    dst_buf[i].pressure,
	    dst_buf[i].temperature);
    printf ("\n");
  }
  
  /*-------------------------------------------------------------------------
   * end
   *-------------------------------------------------------------------------
   */

  /* Release object identifiers */
  H5Tclose (string_type);
  H5Fclose (file_id);
  
  return 0;
}