Beispiel #1
0
/** read solution sizes */
static int read_nvnunrnl (hid_t file_id, int *nv, int *nr, int *nl)
{
  if (H5Lexists (file_id, "/fclib_global", H5P_DEFAULT))
  {
    IO (H5LTread_dataset_int (file_id, "/fclib_global/M/n", nv));
    IO (H5LTread_dataset_int (file_id, "/fclib_global/H/n", nr));
    if (H5Lexists (file_id, "/fclib_global/G", H5P_DEFAULT))
    {
      IO (H5LTread_dataset_int (file_id, "/fclib_global/G/n", nl));
    }
    else *nl = 0;
  }
  else if (H5Lexists (file_id, "/fclib_local", H5P_DEFAULT))
  {
    *nv = 0;
    IO (H5LTread_dataset_int (file_id, "/fclib_local/W/n", nr));
    if (H5Lexists (file_id, "/fclib_local/R", H5P_DEFAULT))
    {
      IO (H5LTread_dataset_int (file_id, "/fclib_local/R/n", nl));
    }
    else *nl = 0;
  }
  else
  {
    fprintf (stderr, "ERROR: neither global nor local problem has been stored. Global or local have to be stored before solutions or guesses\n");
    return 0;
  }

  return 1;
}
int read_input_config(char *input_file, struct input_config *cfg) {
    herr_t   err = 0;
    size_t   type_size;
    hid_t    input_file_id;
    void    *p = NULL;

    cfg->rng_family            = NULL;
    cfg->levy_seed_family      = NULL;
    cfg->levy_seed_parameters  = NULL;
    cfg->levy_basis_dimension  = NULL;
    cfg->levy_basis_resolution = NULL;
    
    input_file_id = H5Fopen(input_file, H5F_ACC_RDONLY, H5P_DEFAULT);
        
    err = H5LTget_dataset_info(input_file_id, "/rng_family", NULL, NULL, &type_size);
    cfg->rng_family = malloc(type_size * sizeof(char));
    err = H5LTread_dataset_string(input_file_id, "/rng_family", cfg->rng_family);
    
    err = H5LTread_dataset_int(input_file_id, "/rng_seed", &cfg->rng_seed);
    
    err = H5LTget_dataset_info(input_file_id, "/levy_seed_family", NULL, NULL, &type_size);
    cfg->levy_seed_family = malloc(type_size * sizeof(char));
    err = H5LTread_dataset_string(input_file_id, "/levy_seed_family", cfg->levy_seed_family);

    err = H5LTread_dataset_int(input_file_id, "/levy_seed_dimension", &cfg->levy_seed_dimension);
    
    if (strcmp(cfg->levy_seed_family, "normal") == 0) {
        p = (struct normal_seed_parameters *)malloc(sizeof(struct normal_seed_parameters));
        err = read_normal_seed_parameters(input_file_id, p);
        cfg->levy_seed_parameters = (void *)p;
    }
    else if (strcmp(cfg->levy_seed_family, "generalised hyperbolic") == 0) {
        printf("Error: GH not yet supported.\n");
        err = -1;
        goto cleanup;
    }
    else {
        printf("Error: Unsupported Levy seed.\n");
        err = -1;
        goto cleanup;
    }
    
    err = H5LTread_dataset_int(input_file_id, "/levy_basis_rank", &cfg->levy_basis_rank);
    cfg->levy_basis_dimension = malloc(cfg->levy_basis_rank * sizeof(int));
    cfg->levy_basis_resolution = malloc(cfg->levy_basis_rank * sizeof(double));
    err = H5LTread_dataset_int(input_file_id, "/levy_basis_dimension", cfg->levy_basis_dimension);
    err = H5LTread_dataset_double(input_file_id, "/levy_basis_resolution", cfg->levy_basis_resolution);
    
  cleanup:
    if (input_file_id >= 0) H5Fclose(input_file_id);
    return (int)err;
}
Beispiel #3
0
int hdf5_load_int(hid_t loc_id, const string& dataset_name) {
  int val;
  herr_t status = H5LTread_dataset_int(loc_id, dataset_name.c_str(), &val);
  CHECK_GE(status, 0)
    << "Failed to load int dataset with name " << dataset_name;
  return val;
}
Beispiel #4
0
/* ------- begin -------------------------- readConvergence.c  --- */
void readConvergence(void) {
  /* This is a self-contained function to read the convergence matrix,
     written by RH. */
  const char routineName[] = "readConvergence";
  char *atmosID;
  int ncid, ncid_mpi, nx, ny;
  size_t attr_size;
  hid_t plist;
  H5T_class_t type_class;

  mpi.rh_converged = matrix_int(mpi.nx, mpi.ny);

  /* --- Open the inputdata file --- */
  if (( plist = H5Pcreate(H5P_FILE_ACCESS )) < 0) HERR(routineName);
  if (( H5Pset_fapl_mpio(plist, mpi.comm, mpi.info) ) < 0) HERR(routineName);
  if (( ncid = H5Fopen(INPUTDATA_FILE, H5F_ACC_RDWR, plist) ) < 0)
    HERR(routineName);
  if (( H5Pclose(plist) ) < 0) HERR(routineName);
  /* Get ncid of the MPI group */
  if (( ncid_mpi = H5Gopen(ncid, "mpi", H5P_DEFAULT) ) < 0) HERR(routineName);

  /* --- Consistency checks --- */
  /* Check that atmosID is the same */
  if (( H5LTget_attribute_info(ncid, "/", "atmosID", NULL, &type_class,
                               &attr_size) ) < 0) HERR(routineName);
  atmosID = (char *) malloc(attr_size + 1);
  if (( H5LTget_attribute_string(ncid, "/", "atmosID", atmosID) ) < 0)
    HERR(routineName);
  if (!strstr(atmosID, atmos.ID)) {
    sprintf(messageStr,
       "Indata file was calculated for different atmosphere (%s) than current",
	     atmosID);
    Error(WARNING, routineName, messageStr);
    }
  free(atmosID);
  /* Check that dimension sizes match */
  if (( H5LTget_attribute_int(ncid, "/", "nx", &nx) ) < 0) HERR(routineName);
  if (nx != mpi.nx) {
    sprintf(messageStr,
	    "Number of x points mismatch: expected %d, found %d.",
	    mpi.nx, (int)nx);
    Error(WARNING, routineName, messageStr);
  }
  if (( H5LTget_attribute_int(ncid, "/", "ny", &ny) ) < 0) HERR(routineName);
  if (ny != mpi.ny) {
    sprintf(messageStr,
	    "Number of y points mismatch: expected %d, found %d.",
	    mpi.ny, (int)ny);
    Error(WARNING, routineName, messageStr);
  }
  /* --- Read variable --- */
  if (( H5LTread_dataset_int(ncid_mpi, CONV_NAME,
                             mpi.rh_converged[0]) ) < 0) HERR(routineName);
  /* --- Close inputdata file --- */
  if (( H5Gclose(ncid_mpi) ) < 0) HERR(routineName);
  if (( H5Fclose(ncid) ) < 0) HERR(routineName);
  return;
}
herr_t read_normal_seed_parameters(hid_t loc_id, struct normal_seed_parameters *p) {
    herr_t err = 0;
    err = H5LTread_dataset_int(loc_id, "/levy_seed_dimension", &p->dimension);
    p->mean       = malloc(p->dimension * sizeof(double));
    p->covariance = malloc(p->dimension * p->dimension * sizeof(double));
    err = H5LTread_dataset_double(loc_id, "/levy_seed_parameters/mean",       p->mean);
    err = H5LTread_dataset_double(loc_id, "/levy_seed_parameters/covariance", p->covariance);
    return err;
}
Beispiel #6
0
/** read local problem;
 * return problem on success; NULL on failure */
struct fclib_local* fclib_read_local (const char *path)
{
  struct fclib_local *problem;
  hid_t  file_id, main_id, id;

  if ((file_id = H5Fopen (path, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
  {
    fprintf (stderr, "ERROR: opening file failed\n");
    return NULL;
  }

  if (!H5Lexists (file_id, "/fclib_local", H5P_DEFAULT))
  {
    fprintf (stderr, "ERROR: spurious input file %s :: fclib_local group does not exists", path);
    return NULL;
  }

  MM (problem = calloc (1, sizeof (struct fclib_local)));

  IO (main_id = H5Gopen (file_id, "/fclib_local", H5P_DEFAULT));
  IO (H5LTread_dataset_int (file_id, "/fclib_local/spacedim", &problem->spacedim));

  IO (id = H5Gopen (file_id, "/fclib_local/W", H5P_DEFAULT));
  problem->W = read_matrix (id);
  IO (H5Gclose (id));

  if (H5Lexists (file_id, "/fclib_local/V", H5P_DEFAULT))
  {
    IO (id = H5Gopen (file_id, "/fclib_local/V", H5P_DEFAULT));
    problem->V = read_matrix (id);
    IO (H5Gclose (id));

    IO (id = H5Gopen (file_id, "/fclib_local/R", H5P_DEFAULT));
    problem->R = read_matrix (id);
    IO (H5Gclose (id));
  }

  IO (id = H5Gopen (file_id, "/fclib_local/vectors", H5P_DEFAULT));
  read_local_vectors (id, problem);
  IO (H5Gclose (id));

  if (H5Lexists (file_id, "/fclib_local/info", H5P_DEFAULT))
  {
    IO (id = H5Gopen (file_id, "/fclib_local/info", H5P_DEFAULT));
    problem->info = read_problem_info (id);
    IO (H5Gclose (id));
  }

  IO (H5Gclose (main_id));
  IO (H5Fclose (file_id));

  return problem;
}
Beispiel #7
0
/** read global problem;
 * return problem on success; NULL on failure */
struct fclib_global* fclib_read_global (const char *path)
{
  struct fclib_global *problem;
  hid_t  file_id, main_id, id;

  if ((file_id = H5Fopen (path, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
  {
    fprintf (stderr, "ERROR: opening file failed\n");
    return NULL;
  }

  MM (problem = calloc (1, sizeof (struct fclib_global)));

  IO (main_id = H5Gopen (file_id, "/fclib_global", H5P_DEFAULT));
  IO (H5LTread_dataset_int (file_id, "/fclib_global/spacedim", &problem->spacedim));

  IO (id = H5Gopen (file_id, "/fclib_global/M", H5P_DEFAULT));
  problem->M = read_matrix (id);
  IO (H5Gclose (id));

  IO (id = H5Gopen (file_id, "/fclib_global/H", H5P_DEFAULT));
  problem->H = read_matrix (id);
  IO (H5Gclose (id));

  if (H5Lexists (file_id, "/fclib_global/G", H5P_DEFAULT))
  {
    IO (id = H5Gopen (file_id, "/fclib_global/G", H5P_DEFAULT));
    problem->G = read_matrix (id);
    IO (H5Gclose (id));
  }

  IO (id = H5Gopen (file_id, "/fclib_global/vectors", H5P_DEFAULT));
  read_global_vectors (id, problem);
  IO (H5Gclose (id));

  if (H5Lexists (file_id, "/fclib_global/info", H5P_DEFAULT))
  {
    IO (id = H5Gopen (file_id, "/fclib_global/info", H5P_DEFAULT));
    problem->info = read_problem_info (id);
    IO (H5Gclose (id));
  }

  IO (H5Gclose (main_id));
  IO (H5Fclose (file_id));

  return problem;
}
Beispiel #8
0
void PHDF5fileClass::ReadPHDF5param(){

  herr_t  status;
  string  dname;
  int     datadims[3];
  double  L[3];

  dname   = "/Parameters/ncell";
  status = H5LTread_dataset_int(file_id, dname.c_str(), datadims);

  dname   = "/Parameters/LxLyLz";
  status = H5LTread_dataset_double(file_id, dname.c_str(), L);

  ndim = 3;
  if (datadims[0]<=1 || datadims[1]<=1 || datadims[2]<=1) ndim = 2;

  for (int i=0; i<ndim; i++){
    dim[i]    = datadims[i];
    LxLyLz[i] = L[i];
  }

}
Beispiel #9
0
/** read initial guesses;
 * return vector of guesses on success; NULL on failure;
 * output numebr of guesses in the variable pointed by 'number_of_guesses' */
struct fclib_solution* fclib_read_guesses (const char *path, int *number_of_guesses)
{
  struct fclib_solution *guesses;
  hid_t  file_id, main_id, id;
  int nv, nr, nl, i;
  char num [128];

  if ((file_id = H5Fopen (path, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)  /* overwrite */
  {
    fprintf (stderr, "ERROR: opening file failed\n");
    return 0;
  }

  if (! read_nvnunrnl (file_id, &nv, &nr, &nl)) return 0;

  if (H5Lexists (file_id, "/guesses", H5P_DEFAULT))
  {
    IO (main_id = H5Gopen (file_id, "/guesses", H5P_DEFAULT));

    IO (H5LTread_dataset_int (file_id, "/guesses/number_of_guesses", number_of_guesses));

    MM (guesses = malloc ((*number_of_guesses) * sizeof (struct fclib_solution)));

    for (i = 0; i < *number_of_guesses; i ++)
    {
      snprintf (num, 128, "%d", i+1);
      IO (id = H5Gopen (main_id, num, H5P_DEFAULT));
      read_solution (id, nv, nr, nl, &guesses [i]);
      IO (H5Gclose (id));
    }

    IO (H5Gclose (main_id));
  }

  IO (H5Fclose (file_id));

  return guesses;
}
Beispiel #10
0
void bright::Reactor1G::loadlib(std::string lib)
{
    // Loads Apporiate Libraries for Reactor and makes them into Burnup Parameters [F, pi(F), di(F), BUi(F), Tij(F)].

    // HDF5 types
    hid_t  rlib;
    herr_t rstat;

    rlib = H5Fopen (lib.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);		//Recator Library

    // Initializes Burnup Parameters...
    hsize_t dimFromIso[1];
    hsize_t dimToIso[1];

    rstat = H5LTget_dataset_info(rlib, "/FromIso_zz", dimFromIso, NULL, NULL);
    rstat = H5LTget_dataset_info(rlib, "/ToIso_zz",   dimToIso,   NULL, NULL);

#ifdef _WIN32
    int * FromIso;
    int * ToIso;

    FromIso = new int [dimFromIso[0]];
    ToIso   = new int [dimToIso[0]];
#else
    int FromIso [dimFromIso[0]];
    int ToIso   [dimToIso[0]];
#endif

    rstat = H5LTread_dataset_int(rlib, "/FromIso_zz", FromIso);
    rstat = H5LTread_dataset_int(rlib, "/ToIso_zz",   ToIso);

    I.clear();
    I.insert(&FromIso[0], &FromIso[dimFromIso[0]]);
    J.clear();
    J.insert(&ToIso[0],   &ToIso[dimToIso[0]]);

    // Get Fluence Vector
    hsize_t dimsF[1]; // Read in number of data points
    rstat = H5LTget_dataset_info(rlib, "/Fluence", dimsF, NULL, NULL);
    int lenF = dimsF[0];

    // Make temp array
#ifdef _WIN32
    float * tempF;
    tempF = new float [lenF];
#else
    float tempF [lenF];
#endif

    rstat = H5LTread_dataset_float(rlib, "/Fluence", tempF);
    F.assign(&tempF[0], &tempF[lenF]);  // Fluence in [n/kb]

    for (nuc_iter i = I.begin(); i != I.end(); i++ )
    {
        std::string iso = pyne::nucname::name(*i);

        // Build BUi_F_
#ifdef _WIN32
        float * tempBUi;
        tempBUi = new float [lenF];
#else
        float tempBUi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Burnup/" + iso).c_str(), tempBUi);
        BUi_F_[*i].assign(&tempBUi[0], &tempBUi[lenF]);

        // Build pi_F_
#ifdef _WIN32
        float * temppi;
        temppi = new float [lenF];
#else
        float temppi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Production/" + iso).c_str(), temppi);
        pi_F_[*i].assign(&temppi[0], &temppi[lenF]);
        pi_F_[*i][0] = pyne::solve_line(0.0, F[2], pi_F_[*i][2], F[1], pi_F_[*i][1]);

        // Build di_F_
#ifdef _WIN32
        float * tempdi;
        tempdi = new float [lenF];
#else
        float tempdi [lenF];
#endif
        rstat = H5LTread_dataset_float(rlib, ("/Destruction/" + iso).c_str(), tempdi);
        di_F_[*i].assign(&tempdi[0], &tempdi[lenF]);
        di_F_[*i][0] = pyne::solve_line(0.0, F[2], di_F_[*i][2], F[1], di_F_[*i][1]);

        // Build Tij_F_
        for (int jn = 0; jn < dimToIso[0] ; jn++)
        {
            int j = ToIso[jn];
            std::string jso = pyne::nucname::name(j);

#ifdef _WIN32
            float * tempTij;
            tempTij = new float [lenF];
#else
            float tempTij [lenF];
#endif
            rstat = H5LTread_dataset_float(rlib, ("/Transmutation/" + iso + "/" + jso).c_str(), tempTij);
            Tij_F_[*i][j].assign(&tempTij[0], &tempTij[lenF]);
        };
    };
    rstat = H5Fclose(rlib);

    // Now get microscopic XS data from KAERI...
    // ...But only if the disadvantage factor is used.
    if (!use_zeta)
        return;

    // HDF5 types
    hid_t  kdblib;			// KaeriData.h5 file reference
    herr_t kdbstat;			// File status

    hsize_t xs_nfields, xs_nrows; // Number of rows and fields (named columns) in XS table

    // open the file
    kdblib = H5Fopen ( (bright::BRIGHT_DATA + "/KaeriData.h5").c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);	// KAERI Data Library

    // Get Thermal Mawell Average Table & Field Data Dimensions
    kdbstat = H5TBget_table_info(kdblib, "/XS/ThermalMaxwellAve", &xs_nfields, &xs_nrows);

    // Creating an empy array of character strings is tricky,
    // because character strings are arrays themselves!
    char ** xs_field_names = new char * [xs_nfields];
    for (int n = 0; n < xs_nfields; n++)
        xs_field_names[n] = new char [50];

#ifdef _WIN32
    size_t * xs_field_sizes;
    size_t * xs_field_offsets;

    xs_field_sizes   = new size_t [xs_nfields];
    xs_field_offsets = new size_t [xs_nfields];
#else
    size_t xs_field_sizes   [xs_nfields];
    size_t xs_field_offsets [xs_nfields];
#endif

    size_t xs_type_size;
    kdbstat = H5TBget_field_info(kdblib, "/XS/ThermalMaxwellAve", xs_field_names, xs_field_sizes, xs_field_offsets, &xs_type_size);

    // Read the "isozz" column so that we can inteligently pick out our data
    int isozz_n = bright::find_index_char( (char *) "isozz", xs_field_names, xs_nfields);
    int * isozz = new int [xs_nrows];
    const size_t temp_xs_field_sizes_isozz_n [1] = {xs_field_sizes[isozz_n]};
    kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[isozz_n], 0, xs_nrows, sizeof(int), 0, temp_xs_field_sizes_isozz_n, isozz);

    // Now, load the XS that we need.
    // NOTE: This maps metastable isotopes to their stable versions if they can't be found!
    int sigma_a_n = bright::find_index_char( (char *) "sigma_a", xs_field_names, xs_nfields);
    int sigma_s_n = bright::find_index_char( (char *) "sigma_s", xs_field_names, xs_nfields);

    for (std::set<int>::iterator i = bright::track_nucs.begin(); i != bright::track_nucs.end(); i++)
    {
        int iso_n = bright::find_index<int>(*i, isozz, xs_nrows);

        if (iso_n < 0)
            iso_n = bright::find_index<int>(10*((*i)/10), isozz);

        if (iso_n < 0)
        {
            sigma_a_therm[*i] = 0.0;
            sigma_s_therm[*i] = 0.0;
            continue;
        };

        double * iso_sig_a = new double [1];
        double * iso_sig_s = new double [1];
        const size_t temp_xs_field_sizes_sigma_a_n [1] = {xs_field_sizes[sigma_a_n]};
        const size_t temp_xs_field_sizes_sigma_s_n [1] = {xs_field_sizes[sigma_s_n]};

        kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[sigma_a_n], iso_n, 1, sizeof(double), 0, temp_xs_field_sizes_sigma_a_n, iso_sig_a);
        kdbstat = H5TBread_fields_name(kdblib, "/XS/ThermalMaxwellAve", xs_field_names[sigma_s_n], iso_n, 1, sizeof(double), 0, temp_xs_field_sizes_sigma_s_n, iso_sig_s);
        sigma_a_therm[*i] = iso_sig_a[0];
        sigma_s_therm[*i] = iso_sig_s[0];
    };

    kdbstat = H5Fclose(kdblib);

    return;
};
Beispiel #11
0
static int test_dsets( void )
{
    int     rank     = 2;
    hsize_t dims[2]  = {2,3};
    hid_t   file_id;
    hid_t   dataset_id;
    char    data_char_in[DIM]    = {1,2,3,4,5,6};
    char    data_char_out[DIM];
    short   data_short_in[DIM]   = {1,2,3,4,5,6};
    short   data_short_out[DIM];
    int     data_int_in[DIM]     = {1,2,3,4,5,6};
    int     data_int_out[DIM];
    long    data_long_in[DIM]    = {1,2,3,4,5,6};
    long    data_long_out[DIM];
    float   data_float_in[DIM]   = {1,2,3,4,5,6};
    float   data_float_out[DIM];
    double  data_double_in[DIM]  = {1,2,3,4,5,6};
    double  data_double_out[DIM];
    const char    *data_string_in = "This is a string";
    char    data_string_out[20];
    int     i;


    /* Create a new file using default properties. */
    file_id = H5Fcreate( FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );

    /*-------------------------------------------------------------------------
    * H5LTmake_dataset test
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset");

    /* Make dataset */
    if ( H5LTmake_dataset( file_id, DSET0_NAME, rank, dims, H5T_NATIVE_INT, data_int_in ) < 0 )
        goto out;

    /* Read dataset using the basic HDF5 API */

    if ( ( dataset_id = H5Dopen2(file_id, DSET0_NAME, H5P_DEFAULT) ) < 0 )
        goto out;

    if ( H5Dread ( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_int_out ) < 0 )
        goto out;

    if ( H5Dclose( dataset_id ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * read using the LT function H5LTread_dataset
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTread_dataset");

    if ( H5LTread_dataset( file_id, DSET0_NAME, H5T_NATIVE_INT, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * test the H5LTmake_dataset_ functions
    *-------------------------------------------------------------------------
    */


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_char
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_char");

    /* Make dataset char */
    if ( H5LTmake_dataset_char( file_id, DSET1_NAME, rank, dims, data_char_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET1_NAME, H5T_NATIVE_CHAR, data_char_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_char_in[i] != data_char_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_char( file_id, DSET1_NAME, data_char_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_char_in[i] != data_char_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_short
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_short");

    /* Make dataset short */
    if ( H5LTmake_dataset_short( file_id, DSET2_NAME, rank, dims, data_short_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET2_NAME, H5T_NATIVE_SHORT, data_short_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_short_in[i] != data_short_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_short( file_id, DSET2_NAME, data_short_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_short_in[i] != data_short_out[i] ) {
            goto out;
        }
    }

    PASSED();

    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_int
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_int");

    /* Make dataset int */
    if ( H5LTmake_dataset_int( file_id, DSET3_NAME, rank, dims, data_int_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET3_NAME, H5T_NATIVE_INT, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_int( file_id, DSET3_NAME, data_int_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_int_in[i] != data_int_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_long
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_long");

    /* Make dataset long */
    if ( H5LTmake_dataset_long( file_id, DSET4_NAME, rank, dims, data_long_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET4_NAME, H5T_NATIVE_LONG, data_long_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_long_in[i] != data_long_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_long( file_id, DSET4_NAME, data_long_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_long_in[i] != data_long_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_float
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_float");

    /* Make dataset float */
    if ( H5LTmake_dataset_float( file_id, DSET5_NAME, rank, dims, data_float_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET5_NAME, H5T_NATIVE_FLOAT, data_float_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_float_in[i] != data_float_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_float( file_id, DSET5_NAME, data_float_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_float_in[i] != data_float_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_double
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_double");

    /* Make dataset double */
    if ( H5LTmake_dataset_double( file_id, DSET6_NAME, rank, dims, data_double_in ) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset( file_id, DSET6_NAME, H5T_NATIVE_DOUBLE, data_double_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_double_in[i] != data_double_out[i] ) {
            goto out;
        }
    }

    /* Read dataset */
    if ( H5LTread_dataset_double( file_id, DSET6_NAME, data_double_out ) < 0 )
        goto out;

    for (i = 0; i < DIM; i++)
    {
        if ( data_double_in[i] != data_double_out[i] ) {
            goto out;
        }
    }

    PASSED();


    /*-------------------------------------------------------------------------
    * H5LTmake_dataset_string
    *-------------------------------------------------------------------------
    */

    TESTING("H5LTmake_dataset_string");

    /* Make dataset string */
    if ( H5LTmake_dataset_string(file_id,DSET7_NAME,data_string_in) < 0 )
        goto out;

    /* Read dataset */
    if ( H5LTread_dataset_string(file_id,DSET7_NAME,data_string_out) < 0 )
        goto out;

    if ( strcmp(data_string_in,data_string_out) != 0 )
        goto out;



    /*-------------------------------------------------------------------------
    * end tests
    *-------------------------------------------------------------------------
    */

    /* Close the file. */
    H5Fclose( file_id );

    PASSED();


    return 0;

out:
    /* Close the file. */
    H5_FAILED();
    return -1;
}
void HDF5GeneralDataLayer<Dtype>::LoadGeneralHDF5FileData(const char* filename) {
  DLOG(INFO) << "Loading The general HDF5 file" << filename;
  hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
  if (file_id < 0) {
    LOG(ERROR) << "Failed opening HDF5 file" << filename;
  }

  HDF5GeneralDataParameter data_param = this->layer_param_.hdf5_general_data_param();
  int fieldNum = data_param.field_size();
  hdf_blobs_.resize(fieldNum);

  const int MIN_DATA_DIM = 1;
  const int MAX_DATA_DIM = 4;
  for(int i = 0; i < fieldNum; ++i){
	  //LOG(INFO) << "Data type: " << data_param.datatype(i).data();
	  if(i < data_param.datatype_size() && 
      strcmp(data_param.datatype(i).data(), "int8") == 0){
		  
      // We take out the io functions here
      const char* dataset_name_ = data_param.field(i).data();
      hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>());

		  CHECK(H5LTfind_dataset(file_id, dataset_name_))
        << "Failed to find HDF5 dataset " << dataset_name_;
      // Verify that the number of dimensions is in the accepted range.
      herr_t status;
      int ndims;
      status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims);
      CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_;
      CHECK_GE(ndims, MIN_DATA_DIM);
      CHECK_LE(ndims, MAX_DATA_DIM);
      
      // Verify that the data format is what we expect: int8
      std::vector<hsize_t> dims(ndims);
      H5T_class_t class_;
      status = H5LTget_dataset_info(file_id, dataset_name_, dims.data(), &class_, NULL);
      CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_;
      CHECK_EQ(class_, H5T_INTEGER) << "Expected integer data";

      vector<int> blob_dims(dims.size());
      for (int j = 0; j < dims.size(); ++j) {
        blob_dims[j] = dims[j];
      }
      hdf_blobs_[i]->Reshape(blob_dims);
      std::cout<<"Trying to allocate memories!\n";
		  int* buffer_data = new int[hdf_blobs_[i]->count()];
      std::cout<<"Memories loaded!!!\n";
		  status = H5LTread_dataset_int(file_id, dataset_name_, buffer_data);
		  CHECK_GE(status, 0) << "Failed to read int8 dataset " << dataset_name_;

		  Dtype* target_data = hdf_blobs_[i]->mutable_cpu_data();
		  for(int j = 0; j < hdf_blobs_[i]->count(); j++){
			  //LOG(INFO) << Dtype(buffer_data[j]);
			  target_data[j] = Dtype(buffer_data[j]);
		  }
		  delete buffer_data;

	  }else{
      // The dataset is still the float32 datatype
      hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>());
		  hdf5_load_nd_dataset(file_id, data_param.field(i).data(),
        MIN_DATA_DIM, MAX_DATA_DIM, hdf_blobs_[i].get());
	  }
  }

  herr_t status = H5Fclose(file_id);
  CHECK_GE(status, 0) << "Failed to close HDF5 file " << filename;

  for(int i = 1; i < fieldNum; ++i){
	  CHECK_EQ(hdf_blobs_[0]->num(), hdf_blobs_[i]->num());
  }
  data_permutation_.clear();
  data_permutation_.resize(hdf_blobs_[0]->shape(0));
  for (int i = 0; i < hdf_blobs_[0]->shape(0); i++)
    data_permutation_[i] = i;
  //TODO: DATA SHUFFLE
  //LOG(INFO) << "Successully loaded " << data_blob_.num() << " rows";
}
/*+++++++++++++++++++++++++ SDMF version 2.4 ++++++++++++++++++++++++++++++*/
float SDMF_orbitPhaseDiff( int orbit )
{
     register hsize_t ni;

     char   rsp_file[MAX_STRING_LENGTH];

     hsize_t nfields, num_roe;
     hid_t   file_id;
     int     *orbitList = NULL;
     float   orbitPhaseDiff = 0.092f;

     const char   roe_flname[]  = "ROE_EXC_all.h5";
     const char   roe_tblname[] = "roe_entry";
/*
 * open output HDF5-file
 */
     (void) snprintf( rsp_file, MAX_STRING_LENGTH, "./%s", roe_flname );
     H5E_BEGIN_TRY {
          file_id = H5Fopen( rsp_file, H5F_ACC_RDONLY, H5P_DEFAULT );
     } H5E_END_TRY;
     if ( file_id < 0 ) {
          (void) snprintf( rsp_file, MAX_STRING_LENGTH, 
                           "%s/%s", DATA_DIR, roe_flname );
          file_id = H5Fopen( rsp_file, H5F_ACC_RDONLY, H5P_DEFAULT );
          if ( file_id < 0 )
               NADC_GOTO_ERROR( NADC_ERR_HDF_FILE, rsp_file );
     }
/*
 * read info_h5 records
 */
     (void) H5TBget_table_info( file_id, roe_tblname, &nfields, &num_roe );
/*
 * read Orbit column
 */
     if ( num_roe == 0 ) return orbitPhaseDiff;

     orbitList = (int *) malloc( num_roe * sizeof( int ));
     if ( orbitList == NULL ) 
	  NADC_GOTO_ERROR( NADC_ERR_ALLOC, "orbitList" );
     if ( H5LTread_dataset_int( file_id, "orbitList", orbitList ) < 0 )
	  NADC_GOTO_ERROR( NADC_ERR_HDF_RD, "orbitList" );

     for ( ni = 0; ni < num_roe; ni++ ) {
	  if ( orbitList[ni] == orbit ) break;
     }

     if ( ni < num_roe ) {
	  double ECL_Exit, ECL_Entry, Period;

	  const size_t field_offset = 0;
	  const size_t dst_sizes    = sizeof( double );

	  H5TBread_fields_name( file_id, roe_tblname, "ECL_EXIT", ni, 1, 
				sizeof(double), &field_offset, &dst_sizes, 
				&ECL_Exit );
	  H5TBread_fields_name( file_id, roe_tblname, "ECL_ENTRY", ni, 1,
				sizeof(double), &field_offset, &dst_sizes, 
				&ECL_Entry );
	  H5TBread_fields_name( file_id, roe_tblname, "PERIOD", ni, 1,
				sizeof(double), &field_offset, &dst_sizes, 
				&Period );
	  orbitPhaseDiff = (float) ((ECL_Entry - ECL_Exit) / Period - 0.5) / 2;
     }
     (void) H5Fclose( file_id );
done:
     if ( orbitList != NULL ) free( orbitList );
     return orbitPhaseDiff;
}
Beispiel #14
0
/** read matrix */
struct fclib_matrix* read_matrix (hid_t id)
{
  struct fclib_matrix *mat;

  MM (mat = malloc (sizeof (struct fclib_matrix)));
 
  IO (H5LTread_dataset_int (id, "nzmax", &mat->nzmax));
  IO (H5LTread_dataset_int (id, "m", &mat->m));
  IO (H5LTread_dataset_int (id, "n", &mat->n));
  IO (H5LTread_dataset_int (id, "nz", &mat->nz));

  if (mat->nz >= 0) /* triplet */
  {
    MM (mat->p = malloc (sizeof (int [mat->nz])));
    MM (mat->i = malloc (sizeof (int [mat->nz])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else if (mat->nz == -1) /* csc */
  {
    MM (mat->p = malloc (sizeof (int [mat->n+1])));
    MM (mat->i = malloc (sizeof (int [mat->nzmax])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else if (mat->nz == -2) /* csr */
  {
    MM (mat->p = malloc (sizeof (int [mat->m+1])));
    MM (mat->i = malloc (sizeof (int [mat->nzmax])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else ASSERT (0, "ERROR: unkown sparse matrix type => fclib_matrix->nz = %d\n", mat->nz);

  MM (mat->x = malloc (sizeof (double [mat->nzmax])));
  IO (H5LTread_dataset_double (id, "x", mat->x));

  if (H5LTfind_dataset (id, "conditioning"))
  {
    H5T_class_t class_id;
    hsize_t dim;
    size_t size;

    MM (mat->info = malloc (sizeof (struct fclib_matrix_info)));
    if (H5LTfind_dataset (id, "comment"))
    {
      IO (H5LTget_dataset_info  (id, "comment", &dim, &class_id, &size));
      MM (mat->info->comment = malloc (sizeof (char [size])));
      IO (H5LTread_dataset_string (id, "comment", mat->info->comment));
    }
    else mat->info->comment = NULL;
    IO (H5LTread_dataset_double (id, "conditioning", &mat->info->conditioning));
    IO (H5LTread_dataset_double (id, "determinant", &mat->info->determinant));
    IO (H5LTread_dataset_int (id, "rank", &mat->info->rank));
  }
  else
  {
    mat->info = NULL;
  }

  return mat;
}