Exemplo n.º 1
0
	typename F::return_value
	iterate(F & functor,
                H5_index_t index_type=H5_INDEX_CRT_ORDER,
                H5_iter_order_t order=H5_ITER_INC,
                hsize_t *idx=0) const {
		h5e::check_error(H5Literate(_self, index_type, order, idx,
					    &F::iterate,
					    static_cast<void*>(&functor)));
		return functor.value;
	}
Exemplo n.º 2
0
//-*****************************************************************************
OrData::OrData( ObjectHeaderPtr iHeader,
                H5Node & iParentGroup,
                int32_t iArchiveVersion )
    : m_children( NULL )
{
    ABCA_ASSERT( iHeader, "Invalid header" );
    ABCA_ASSERT( iParentGroup.isValidObject(), "Invalid group" );

    m_group = OpenGroup( iParentGroup, iHeader->getName().c_str() );
    ABCA_ASSERT( m_group.isValidObject(),
        "Could not open object group: "
        << iHeader->getFullName() );

    std::vector<std::string> objNames;

    herr_t status = H5Literate( m_group.getObject(),
                                H5_INDEX_CRT_ORDER,
                                H5_ITER_INC,
                                NULL,
                                VisitAllLinksCB,
                                ( void * )&objNames );

    ABCA_ASSERT( status >= 0,
                 "OrData::OrData: H5Literate failed" );

    std::vector < std::string >::iterator namesIt;
    uint32_t i = 0;
    if ( !objNames.empty() )
    {
        m_children = new Child[ objNames.size() ];
    }

    std::string parentFullName = iHeader->getFullName();
    if ( parentFullName != "/" )
    {
        parentFullName += "/";
    }

    for ( namesIt = objNames.begin(); namesIt != objNames.end();
          ++namesIt, ++i )
    {
        m_childrenMap[ *namesIt ] = i;

        m_children[i].header.reset( new AbcA::ObjectHeader( *namesIt,
            parentFullName + *namesIt, AbcA::MetaData() ) );
        m_children[i].loadedMetaData = false;
    }

    m_oldGroup = m_group;


    m_data = Alembic::Util::shared_ptr<CprData>(
        new CprData( m_group, iArchiveVersion, ".prop" ) );
}
static char* name(ndio_hdf5_t self)
{ static const char name_[]="data";
  if(self->name) return self->name;
  if(!self->isr)
  { TRY(self->name=(char*)malloc(1+countof(name_)));
    strcpy(self->name,name_);
  }
  else
  { HTRY(H5Literate(self->file,H5_INDEX_NAME,H5_ITER_NATIVE,NULL,query_first_name,&self->name));
  }
  return self->name;
Error:
  return 0;
}
Exemplo n.º 4
0
int getDataSetId_v1(int _iFile)
{
    herr_t status = 0;
    int iDatasetId = 0;
    hsize_t idx = 0;

    /*
    * Begin iteration.
    */
    status = H5Literate(_iFile, H5_INDEX_NAME, H5_ITER_NATIVE, &idx, op_func_v1, &iDatasetId);
    if (status < 0)
    {
        return -1;
    }

    return iDatasetId;
}
Exemplo n.º 5
0
std::string H5Group::ls() const
{
    std::ostringstream os;
    herr_t err;
    OpDataPrintLs opdata;
    opdata.parent = const_cast<H5Group *>(this);
    opdata.os = &os;
    hsize_t idx = 0;

    err = H5Literate(group, H5_INDEX_NAME, H5_ITER_INC, &idx, printLsInfo, &opdata);
    if (err < 0)
    {
        throw H5Exception(__LINE__, __FILE__, _("Cannot list group contents"));
    }

    return os.str();
}
int VsFilter::visitGroup(hid_t locId, const char* name, void* opdata) {
  RECURSION_DATA* data = static_cast< RECURSION_DATA* >(opdata);
  VsGroup* parent = static_cast< VsGroup*> (data->parent);
  VsRegistry* registry = data->registry;
  
  VsLog::debugLog() << "VsFilter::visitGroup: node '" << name
    << "' is a group." << std::endl;

  if (std::string(name) == "..")
  {
    VsLog::debugLog() << "VsFilter::visitGroup: skipping group '..'" << std::endl;
    return 0;
  }

  hid_t groupId = H5Gopen(locId, name, H5P_DEFAULT);
  
  //If unable to get a handle to the hdf5 object, we just drop the object
  //But return 0 to continue iterating over objects
  if (groupId < 0) {
    VsLog::errorLog() <<"VsFilter::visitGroup() - Unable to open group with name " <<name <<std::endl;
    VsLog::errorLog() <<"VsFilter::visitGroup() - This object and all children will be dropped." <<std::endl;
    return 0;
  }
  
  VsGroup* newGroup = new VsGroup(registry, parent, name, groupId);
  
  RECURSION_DATA nextLevelData;
  nextLevelData.registry = registry;
  nextLevelData.parent = newGroup;
  
  // Recurse over all attributes of the group
  VsLog::debugLog() <<"VsFilter::visitGroup(): Recursing on attributes of group " <<newGroup->getFullName() <<std::endl;
  H5Aiterate(groupId, H5_INDEX_NAME, H5_ITER_INC, NULL, visitAttrib, &nextLevelData);

  // Recurse to examine child groups
  VsLog::debugLog() <<"VsFilter::visitGroup(): Recursing on children of group " <<newGroup->getFullName() <<std::endl;
  H5Literate(groupId, H5_INDEX_NAME, H5_ITER_INC, NULL, visitLinks, &nextLevelData);

  // Not needed because the newly declared VsGroup takes ownership of the id
  // And will do the H5GClose when it is deleted
  // H5Gclose(groupId);

  VsLog::debugLog() <<"VsFilter::visitGroup(): Returning." <<std::endl;
  return 0;
}
Exemplo n.º 7
0
void AbstractHdf5Converter<ELEMENT_DIM,SPACE_DIM>::GenerateListOfDatasets(const FileFinder& rH5Folder,
                                                                          const std::string& rFileName)
{
    /*
     * Open file.
     */
    std::string file_name = rH5Folder.GetAbsolutePath() + rFileName + ".h5";
    hid_t file = H5Fopen(file_name.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);

    /*
     * Begin HDF5 iteration, calls a method that populates mDatasetNames.
     */
#if H5_VERS_MAJOR >= 1 && H5_VERS_MINOR >=8
    //std::cout << "HDF5 1.8.x or above detected.\n";
    H5Literate(file, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func, &mDatasetNames);
#else
    //std::cout << "HDF5 1.6.x  detected.\n";
    H5Giterate(file, "/", NULL, op_func, &mDatasetNames);
#endif

    H5Fclose(file);

    // Remove datasets that end in "_Unlimited", as these are paired up with other ones!
    std::string ending = "_Unlimited";

    // Strip off the independent variables from the list
    std::vector<std::string>::iterator iter;
    for (iter = mDatasetNames.begin(); iter != mDatasetNames.end(); )
    {
        // If the dataset name is "Time" OR ...
        // it is longer than the ending we are looking for ("_Unlimited") ...
        // ... AND it ends with the string we are looking for,
        // then erase it.
        if ( (*(iter) == "Time") ||
             ( ( iter->length() > ending.length() ) &&
               ( 0 == iter->compare(iter->length() - ending.length(), ending.length(), ending) ) ) )
        {
            iter = mDatasetNames.erase(iter);
        }
        else
        {
            ++iter;
        }
    }
}
Exemplo n.º 8
0
void H5Group::ls(std::vector<std::string> & name, std::vector<std::string> & type) const
{
    herr_t err;
    OpDataGetLs opdata(const_cast<H5Group *>(this), &name, &type);
    hsize_t idx = 0;

    err = H5Literate(group, H5_INDEX_NAME, H5_ITER_INC, &idx, getLsInfo, &opdata);
    if (err < 0)
    {
        throw H5Exception(__LINE__, __FILE__, _("Cannot list group links."));
    }

    idx = 0;
    err = H5Aiterate(group, H5_INDEX_NAME, H5_ITER_INC, &idx, H5Object::getLsAttributes, &opdata);
    if (err < 0)
    {
        throw H5Exception(__LINE__, __FILE__, _("Cannot list group attributes."));
    }
}
VsFile* VsFilter::readFile(VsRegistry* registry, std::string fileName) {  
  hid_t fapl = H5Pcreate(H5P_FILE_ACCESS);
  H5Pset_fclose_degree(fapl, H5F_CLOSE_SEMI);
  hid_t fileId = H5Fopen(fileName.c_str(), H5F_ACC_RDONLY, fapl);
  H5Pclose(fapl);
  if (fileId < 0) {
    VsLog::errorLog() << "VsFile::readFile(): HDF5 error opening the file '"
      << fileName << "'." << std::endl;
    return NULL;
  }
  
  VsFile* file = new VsFile(registry, fileName, fileId);
 
  RECURSION_DATA data;
  data.registry = registry;
  data.parent = NULL;
  H5Literate(fileId, H5_INDEX_NAME, H5_ITER_INC, 0, visitLinks, &data);
  
  return file;
}
Exemplo n.º 10
0
HDF5Err
HDF5Group::deleteSubtree(HDF5Id group_id, const char *group_name,
                         const H5L_info_t *NK_UNUSED_PARAM(group_info), 
                         void *NK_UNUSED_PARAM(op_data))
{
    // open the child group
    HDF5Group child_group;
    child_group.open(group_id, String(group_name));

    // iterate over the group's children
    HDF5Size iter_index = 0;
    H5Literate(child_group.id(), H5_INDEX_CRT_ORDER, H5_ITER_NATIVE, 
               &iter_index, deleteSubtree, NULL);

    // close the child group
    child_group.close();

    // delete the link
    H5Ldelete(group_id, group_name, H5P_DEFAULT);

    return 0;
}
Exemplo n.º 11
0
/*
 * Class:     hdf_hdf5lib_H5
 * Method:    H5Literate
 * Signature: (JIIJLjava/lang/Object;Ljava/lang/Object;)I
 */
JNIEXPORT jint JNICALL
Java_hdf_hdf5lib_H5_H5Literate
    (JNIEnv *env, jclass clss, jlong grp_id, jint idx_type, jint order,
        jlong idx, jobject callback_op, jobject op_data)
{
    hsize_t       start_idx = (hsize_t)idx;
    herr_t        status = -1;
    cb_wrapper wrapper = {callback_op, op_data};

    ENVPTR->GetJavaVM(ENVPAR &jvm);

    if ((op_data == NULL) || (callback_op == NULL)) {
        h5nullArgument(env,  "H5Literate:  op_data or callback_op is NULL");
    } /* end if */
    else {
        status = H5Literate((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5L_iterate_t)H5L_iterate_cb, (void*)&wrapper);

        if (status < 0)
            h5libraryError(env);
    } /* end else */

    return status;
} /* end Java_hdf_hdf5lib_H5_H5Literate */
Exemplo n.º 12
0
value hdf5_h5l_iterate(value group_v, value index_type_v, value order_v, value idx_v,
  value op_v, value op_data_v)
{
  CAMLparam5(group_v, index_type_v, order_v, idx_v, op_v);
  CAMLxparam1(op_data_v);
  CAMLlocal1(exception);

  struct operator_data op_data;
  hsize_t idx, ret;
  op_data.callback      = &op_v;
  op_data.operator_data = &op_data_v;
  op_data.exception     = &exception;
  idx = Is_block(idx_v) ? Int_val(Field(Field(idx_v, 0), 0)) : 0;
  exception = Val_unit;

  ret = H5Literate(Hid_val(group_v), H5_index_val(index_type_v),
    H5_iter_order_val(order_v), Is_block(idx_v) ? &idx : NULL, hdf5_h5l_operator,
    &op_data);
  if (Is_block(idx_v))
    Store_field(Field(idx_v, 0), 0, Val_int(idx));
  if (exception != Val_unit)
    caml_raise(exception);
  CAMLreturn(Val_h5_iter(ret));
}
Exemplo n.º 13
0
int main (void)
{
  hid_t    file;
  hid_t    grp;
  hid_t    dataset, dataspace;
  hid_t    plist;
  
  herr_t   status;
  hsize_t  dims[2];
  hsize_t  cdims[2];
  
  int      idx_f, idx_g;
  
  /*
   * Create a file.
   */
  file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
  
  /*
   * Create a group in the file.
   */
  grp = H5Gcreate(file, "/Data", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  
  /*
   * Create dataset "Compressed Data" in the group using absolute
   * name. Dataset creation property list is modified to use
   * GZIP compression with the compression effort set to 6.
   * Note that compression can be used only when dataset is chunked.
   */
  dims[0] = 1000;
  dims[1] = 20;
  cdims[0] = 20;
  cdims[1] = 20;
  dataspace = H5Screate_simple(RANK, dims, NULL);
  plist     = H5Pcreate(H5P_DATASET_CREATE);
  H5Pset_chunk(plist, 2, cdims);
  H5Pset_deflate( plist, 6);
  dataset = H5Dcreate(file, "/Data/Compressed_Data", H5T_NATIVE_INT,
		       dataspace, H5P_DEFAULT, plist, H5P_DEFAULT);
  /*
   * Close the first dataset .
   */
  H5Sclose(dataspace);
  H5Dclose(dataset);
  
  /*
   * Create the second dataset.
   */
  dims[0] = 500;
  dims[1] = 20;
  dataspace = H5Screate_simple(RANK, dims, NULL);
  dataset = H5Dcreate(file, "/Data/Float_Data", H5T_NATIVE_FLOAT,
		       dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  
  /*
   *Close the second dataset and file.
   */
  H5Sclose(dataspace);
  H5Dclose(dataset);
  H5Pclose(plist);
  H5Gclose(grp);
  H5Fclose(file);
  
  /*
   * Now reopen the file and group in the file.
   */
  file = H5Fopen(H5FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT);
  grp  = H5Gopen(file, "Data", H5P_DEFAULT);
  
  /*
   * Access "Compressed_Data" dataset in the group.
   */
  dataset = H5Dopen(grp, "Compressed_Data", H5P_DEFAULT);
  if( dataset < 0) printf(" Dataset 'Compressed-Data' is not found. \n");
  printf("\"/Data/Compressed_Data\" dataset is open \n");
  
  /*
   * Close the dataset.
   */
  status = H5Dclose(dataset);
  
  /*
   * Create hard link to the Data group.
   */
  status = H5Lcreate_hard(file, "Data", H5L_SAME_LOC, "Data_new", H5P_DEFAULT, H5P_DEFAULT);
  
  /*
   * We can access "Compressed_Data" dataset using created
   * hard link "Data_new".
   */
  dataset = H5Dopen(file, "/Data_new/Compressed_Data", H5P_DEFAULT);
  if( dataset < 0) printf(" Dataset is not found. \n");
  printf("\"/Data_new/Compressed_Data\" dataset is open \n");
  
  /*
   * Close the dataset.
   */
  status = H5Dclose(dataset);
  
  
  /*
   * Use iterator to see the names of the objects in the root group.
   */
  idx_f = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
  
  /*
   * Unlink  name "Data" and use iterator to see the names
   * of the objects in the file root direvtory.
   */
  if(H5Ldelete(file, "Data", H5P_DEFAULT) < 0)
    printf(" H5Ldelete failed \n");
  else
    printf("\"Data\" is unlinked \n");
  
  idx_f = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
  
  /*
   * Use iterator to see the names of the objects in the group
   * /Data_new.
   */
  idx_g = H5Literate_by_name(grp, "/Data_new", H5_INDEX_NAME, H5_ITER_INC, NULL, group_info, NULL, H5P_DEFAULT);
  
  /*
   * Close the file.
   */
  
  H5Gclose(grp);
  H5Fclose(file);
  
  return 0;
}
Exemplo n.º 14
0
u::logic HDF5FormatLib::GetHDF5StructFile(QString filePathJson1, QString filePathHdf)
{
    hid_t           file;
    herr_t          status;
    H5O_info_t      infobuf;
    struct opdata   od;
    filePathJson = filePathJson1;
    try
    {
    FILE *pfile1;
    pfile1 = fopen(filePathJson1.toStdString().c_str(), "w");
    if (!pfile1)
    {
       throw GenericExc(QObject::tr("Ошибка открытия json файла"));

    }
    fprintf(pfile1, "%s", "{\n");
    fclose(pfile1);

    file = H5Fopen (filePathHdf.toStdString().c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
    if (file < 0)
    {
        throw GenericExc(QObject::tr("Ошибка открытия файла"));
    }
    status = H5Oget_info (file, &infobuf);
    if (status < 0)
    {
        throw GenericExc(QObject::tr("Ошибка получения информации о файле"));
    }
    od.recurs = 0;
    od.prev = NULL;
    od.addr = infobuf.addr;

    status = H5Literate (file, H5_INDEX_NAME, H5_ITER_NATIVE, NULL, op_func,
                (void *) &od);
    if (status < 0)
    {
        throw GenericExc(QObject::tr("Ошибка получения структуры файла"));
    }
    status = H5Fclose (file);
    if (status < 0)
    {
        throw GenericExc(QObject::tr("Ошибка закрытия файла"));
    }
    pfile1 = fopen(filePathJson.toStdString().c_str(), "a");
    if (!pfile1)
    {
       throw GenericExc(QObject::tr("Ошибка открытия json файла"));

    }
    fprintf(pfile1, "%s", "}");
    fclose(pfile1);
    return true;
    } catch (const GenericExc& exc)
    {
        m_errDescription = exc.GetWhat();
        return false;
    } catch (...)
    {
        m_errDescription = QObject::tr("Неизвестная ошибка при получении структуры данных");
        return false;
    }
}
Exemplo n.º 15
0
/****************************************************************
**
**  test_iter_group_large(): Test group iteration functionality
**          for groups with large #'s of objects
**
****************************************************************/
static void
test_iter_group_large(hid_t fapl)
{
    hid_t		file;		/* HDF5 File IDs		*/
    hid_t		dataset;	/* Dataset ID			*/
    hid_t		group;      /* Group ID             */
    hid_t		sid;       /* Dataspace ID			*/
    hid_t		tid;       /* Datatype ID			*/
    hsize_t		dims[] = {SPACE1_DIM1};
    herr_t		ret;		/* Generic return value		*/
    char gname[20];         /* Temporary group name */
    iter_info names[ITER_NGROUPS+2]; /* Names of objects in the root group */
    iter_info *curr_name;        /* Pointer to the current name in the root group */
    int                 i;

    /* Compound datatype */
    typedef struct s1_t {
        unsigned int a;
        unsigned int b;
        float c;
    } s1_t;

    HDmemset(names, 0, sizeof names);

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Large Group Iteration Functionality\n"));

    /* Create file */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    /* Create dataspace for datasets */
    sid = H5Screate_simple(SPACE1_RANK, dims, NULL);
    CHECK(sid, FAIL, "H5Screate_simple");

    /* Create a bunch of groups */
    for(i = 0; i < ITER_NGROUPS; i++) {
        sprintf(gname, "Group_%d", i);

        /* Add the name to the list of objects in the root group */
        HDstrcpy(names[i].name, gname);
        names[i].type = H5O_TYPE_GROUP;

        /* Create a group */
        group = H5Gcreate2(file, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(group, FAIL, "H5Gcreate2");

        /* Close a group */
        ret = H5Gclose(group);
        CHECK(ret, FAIL, "H5Gclose");
    } /* end for */

    /* Create a dataset  */
    dataset = H5Dcreate2(file, "Dataset1", H5T_STD_U32LE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(dataset, FAIL, "H5Dcreate2");

    /* Add the name to the list of objects in the root group */
    HDstrcpy(names[ITER_NGROUPS].name, "Dataset1");
    names[ITER_NGROUPS].type = H5O_TYPE_DATASET;

    /* Close Dataset */
    ret = H5Dclose(dataset);
    CHECK(ret, FAIL, "H5Dclose");

    /* Close Dataspace */
    ret = H5Sclose(sid);
    CHECK(ret, FAIL, "H5Sclose");

    /* Create a datatype */
    tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
    CHECK(tid, FAIL, "H5Tcreate");

    /* Insert fields */
    ret = H5Tinsert(tid, "a", HOFFSET(s1_t, a), H5T_NATIVE_INT);
    CHECK(ret, FAIL, "H5Tinsert");

    ret = H5Tinsert(tid, "b", HOFFSET(s1_t, b), H5T_NATIVE_INT);
    CHECK(ret, FAIL, "H5Tinsert");

    ret = H5Tinsert(tid, "c", HOFFSET(s1_t, c), H5T_NATIVE_FLOAT);
    CHECK(ret, FAIL, "H5Tinsert");

    /* Save datatype for later */
    ret = H5Tcommit2(file, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Tcommit2");

    /* Add the name to the list of objects in the root group */
    HDstrcpy(names[ITER_NGROUPS + 1].name, "Datatype1");
    names[ITER_NGROUPS + 1].type = H5O_TYPE_NAMED_DATATYPE;

    /* Close datatype */
    ret = H5Tclose(tid);
    CHECK(ret, FAIL, "H5Tclose");

    /* Need to sort the names in the root group, cause that's what the library does */
    HDqsort(names, (size_t)(ITER_NGROUPS + 2), sizeof(iter_info), iter_strcmp2);

    /* Iterate through the file to see members of the root group */
    curr_name = &names[0];
    ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, NULL, liter_cb2, curr_name);
    CHECK(ret, FAIL, "H5Literate");
    for(i = 1; i < 100; i++) {
        hsize_t idx = i;

        curr_name = &names[i];
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb2, curr_name);
        CHECK(ret, FAIL, "H5Literate");
    } /* end for */

    /* Close file */
    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");
} /* test_iterate_group_large() */
Exemplo n.º 16
0
/****************************************************************
**
**  test_iter_group(): Test group iteration functionality
**
****************************************************************/
static void
test_iter_group(hid_t fapl, hbool_t new_format)
{
    hid_t file;             /* File ID */
    hid_t dataset;          /* Dataset ID */
    hid_t datatype;         /* Common datatype ID */
    hid_t filespace;        /* Common dataspace ID */
    hid_t root_group,grp;   /* Root group ID */
    int i;                  /* counting variable */
    hsize_t idx;            /* Index in the group */
    char name[NAMELEN];     /* temporary name buffer */
    char *lnames[NDATASETS + 2];/* Names of the links created */
    char dataset_name[NAMELEN];  /* dataset name */
    iter_info info;         /* Custom iteration information */
    H5G_info_t ginfo;       /* Buffer for querying object's info */
    herr_t ret;		    /* Generic return value */

    /* Output message about test being performed */
    MESSAGE(5, ("Testing Group Iteration Functionality\n"));

    /* Create the test file with the datasets */
    file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
    CHECK(file, FAIL, "H5Fcreate");

    /* Test iterating over empty group */
    info.command = RET_ZERO;
    idx = 0;
    ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    VERIFY(ret, SUCCEED, "H5Literate");

    datatype = H5Tcopy(H5T_NATIVE_INT);
    CHECK(datatype, FAIL, "H5Tcopy");

    filespace=H5Screate(H5S_SCALAR);
    CHECK(filespace, FAIL, "H5Screate");

    for(i=0; i< NDATASETS; i++) {
        sprintf(name,"Dataset %d",i);
        dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        CHECK(dataset, FAIL, "H5Dcreate2");

        /* Keep a copy of the dataset names around for later */
        lnames[i] = HDstrdup(name);
        CHECK(lnames[i], NULL, "strdup");

        ret = H5Dclose(dataset);
        CHECK(ret, FAIL, "H5Dclose");
    } /* end for */

    /* Create a group and named datatype under root group for testing */
    grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Gcreate2");

    lnames[NDATASETS] = HDstrdup("grp");
    CHECK(lnames[NDATASETS], NULL, "strdup");

    ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    CHECK(ret, FAIL, "H5Tcommit2");

    lnames[NDATASETS + 1] = HDstrdup("dtype");
    CHECK(lnames[NDATASETS], NULL, "strdup");

    /* Close everything up */
    ret = H5Tclose(datatype);
    CHECK(ret, FAIL, "H5Tclose");

    ret = H5Gclose(grp);
    CHECK(ret, FAIL, "H5Gclose");

    ret = H5Sclose(filespace);
    CHECK(ret, FAIL, "H5Sclose");

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Sort the dataset names */
    HDqsort(lnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp);


    /* Iterate through the datasets in the root group in various ways */
    file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl);
    CHECK(file, FAIL, "H5Fopen");

    /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
     * iterate through B-tree for group members in internal library design.
     */
    root_group = H5Gopen2(file, "/", H5P_DEFAULT);
    CHECK(root_group, FAIL, "H5Gopen2");

    ret = H5Gget_info(root_group, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info");

    for(i = 0; i< (int)ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */

        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_name_by_idx");

        ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Oget_info_by_idx");
    } /* end for */

    H5E_BEGIN_TRY {
        ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Lget_name_by_idx");

    ret = H5Gclose(root_group);
    CHECK(ret, FAIL, "H5Gclose");

    /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually
     * iterate through B-tree for group members in internal library design.
     *  (Same as test above, but with the file ID instead of opening the root group)
     */
    ret = H5Gget_info(file, &ginfo);
    CHECK(ret, FAIL, "H5Gget_info");
    VERIFY(ginfo.nlinks, NDATASETS + 2, "H5Gget_info");

    for(i = 0; i< (int)ginfo.nlinks; i++) {
        H5O_info_t oinfo;               /* Object info */

        ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Lget_name_by_idx");

        ret = H5Oget_info_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT);
        CHECK(ret, FAIL, "H5Oget_info_by_idx");
    } /* end for */

    H5E_BEGIN_TRY {
        ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Lget_name_by_idx");

    /* Test invalid indices for starting iteration */
    info.command = RET_ZERO;
    idx = (hsize_t)-1;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test skipping exactly as many entries as in the group */
    idx = NDATASETS + 2;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test skipping more entries than are in the group */
    idx = NDATASETS + 3;
    H5E_BEGIN_TRY {
        ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info);
    } H5E_END_TRY;
    VERIFY(ret, FAIL, "H5Literate");

    /* Test all objects in group, when callback always returns 0 */
    info.command = RET_ZERO;
    idx = 0;
    if((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0)
        TestErrPrintf("Group iteration function didn't return zero correctly!\n");

    /* Test all objects in group, when callback always returns 1 */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = RET_TWO;
    idx = i = 0;
    while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 2, "H5Literate");

        /* Increment the number of times "2" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (hsize_t)i, "H5Literate");
        if(idx > (NDATASETS + 2))
            TestErrPrintf("Group iteration function walked too far!\n");

        /* Verify that the correct name is retrieved */
        if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0)
            TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
    } /* end while */
    VERIFY(ret, -1, "H5Literate");

    if(i != (NDATASETS + 2))
        TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__);

    /* Test all objects in group, when callback changes return value */
    /* This also tests the "restarting" ability, because the index changes */
    info.command = new_format ? RET_CHANGE2 : RET_CHANGE;
    idx = i = 0;
    while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) >= 0) {
        /* Verify return value from iterator gets propagated correctly */
        VERIFY(ret, 1, "H5Literate");

        /* Increment the number of times "1" is returned */
        i++;

        /* Verify that the index is the correct value */
        VERIFY(idx, (hsize_t)(i + 10), "H5Literate");
        if(idx > (NDATASETS + 2))
            TestErrPrintf("Group iteration function walked too far!\n");

        /* Verify that the correct name is retrieved */
        if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0)
            TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]);
    } /* end while */
    VERIFY(ret, -1, "H5Literate");

    if(i != 42 || idx != 52)
        TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__);

    ret = H5Fclose(file);
    CHECK(ret, FAIL, "H5Fclose");

    /* Free the dataset names */
    for(i = 0; i< (NDATASETS + 2); i++)
        HDfree(lnames[i]);
} /* test_iter_group() */
Exemplo n.º 17
0
int seek_chromosome(char *chrom, genome_t *genome,
                    chromosome_t *chromosome, bool verbose) {
  hid_t h5file = -1;
  hid_t h5group = -1;
  H5G_info_t h5group_info;
  char *where = NULL;

  /* must be specified to H5Literate; allows interruption and
     resumption, but I don't use it */
  hsize_t idx = 0;

  err_state_t err_state;

  if (verbose) {
    fprintf(stderr, "%s\n", chrom);
  }

  assert(is_valid_genome(genome));

  /* close old chromosome and start creating the new one */
  close_chromosome(chromosome);
  chromosome->chrom = chrom;

  if (genome->dirname) {
    /* if genome is a directory, compute path and open h5file */
    char *h5filename = NULL;
    char *h5filename_suffix;

    /* allocate space for h5filename, including 2 bytes for '/' and '\0' */
    h5filename = xmalloc((strlen(genome->dirname) + strlen(chrom) +
                          strlen(SUFFIX_H5) + 2) * sizeof(char));
    assert(h5filename);

    /* set h5filename */
    h5filename_suffix = stpcpy(h5filename, genome->dirname);
    h5filename_suffix = stpcpy(h5filename_suffix, "/");
    h5filename_suffix = stpcpy(h5filename_suffix, chrom);
    strcpy(h5filename_suffix, SUFFIX_H5);

    /* open the chromosome file */
    disable_h5_errors(&err_state);
    h5file = H5Fopen(h5filename, H5F_ACC_RDWR, H5P_DEFAULT);
    enable_h5_errors(&err_state);

    /* read chromosome from the root group */
    chromosome->h5file = h5file;

    /* allocate space for where = "/\0" */
    where = strdup("/");
    assert(where);

    /* free no-longer-used filename */
    free(h5filename);
  } else {
    /* if genome is a file, compute internal path and open group */
    if (genome->h5file >= 0) {
      h5file = genome->h5file;
      char *where_suffix;

      /* allocate space for where, including 2 bytes for '/' and '\0' */
      where = xmalloc((strlen(chrom) + 2) * sizeof(char));
      assert(where);

      /* read chromosome from subgroup of h5file */
      where_suffix = stpcpy(where, "/");
      strcpy(where_suffix, chrom);
    }
  }

  if (h5file >= 0) {
    /* Open the chromosome group, regardless of dir/file implementation */
    disable_h5_errors(&err_state);
    h5group = H5Gopen(h5file, where, H5P_DEFAULT);
    enable_h5_errors(&err_state);
  }
  chromosome->h5group = h5group;

  /* clean up memory before returning */
  free(where);

  /* if opening failed, then return -1 with h5file set bad */
  if (is_valid_chromosome(chromosome)) {
    /* allocate supercontig metadata array */
    assert(H5Gget_info(chromosome->h5group, &h5group_info) >= 0);
    init_supercontig_array(h5group_info.nlinks, chromosome);

    /* populate supercontig metadata array */
    assert(H5Literate(chromosome->h5group, H5_INDEX_NAME, H5_ITER_INC, &idx,
                      supercontig_visitor, chromosome) == 0);
    return 0;
  } else {
    if (verbose) {
      fprintf(stderr, " can't open chromosome: %s\n", chromosome->chrom);
    }
    return -1;
  }
}
Exemplo n.º 18
0
/*--------------------------------------------------------------*/
int NXVvalidateGroup(pNXVcontext self, hid_t groupID,
	xmlNodePtr groupNode)
{
		hash_table namesSeen, baseNames;
		xmlNodePtr cur = NULL;
		xmlChar *name = NULL, *myClass = NULL;
		xmlChar *target = NULL;
		hid_t childID;
		char fName[256], childName[512], nxdlChildPath[512], childPath[512];
		char mynxdlPath[512];
		char *savedNXDLPath, *pPtr;
		SecondPassData spd;
		hsize_t idx = 0;

		/*
			manage nxdlPath, xmlGetNodePath does not work
		*/
		savedNXDLPath = self->nxdlPath;
		myClass = xmlGetProp(groupNode,(xmlChar *)"type");
		if(self->nxdlPath == NULL) {
			snprintf(mynxdlPath,sizeof(mynxdlPath),"/%s", (char *) myClass);
		} else {
			snprintf(mynxdlPath,sizeof(mynxdlPath),"%s/%s",
				self->nxdlPath, (char *) myClass);
		}
		self->nxdlPath = mynxdlPath;

		/*
			tell what we are doing
		*/
		H5Iget_name(groupID,fName,sizeof(fName));
		NXVsetLog(self,"sev","debug");
		NXVsetLog(self,"message","Validating group");
		NXVsetLog(self,"nxdlPath",self->nxdlPath);
		NXVsetLog(self,"dataPath",fName);
		NXVlog(self);


		validateGroupAttributes(self, groupID, groupNode);
		hash_construct_table(&namesSeen,100);

		/* first pass */
		cur = groupNode->xmlChildrenNode;
		while(cur != NULL){
			if(xmlStrcmp(cur->name,(xmlChar *) "group") == 0){
					childID = findGroup(self, groupID, cur);
					if(childID >= 0){
							H5Iget_name(childID, childName,sizeof(childName));
							/*
								we have to get at the HDF5 name. There may be no
								name in the NXDL, but a suitable group has been found
								by NXclass.
							*/
							pPtr = strrchr(childName,'/');
							if(pPtr != NULL){
								hash_insert(pPtr+1,strdup(""),&namesSeen);
							} else {
								hash_insert(childName,strdup(""),&namesSeen);
							}
							NXVvalidateGroup(self,childID,cur);
					} else {
						name = xmlGetProp(cur,(xmlChar *)"type");
						snprintf(nxdlChildPath,sizeof(nxdlChildPath),"%s/%s",
							self->nxdlPath, (char *)name);
						xmlFree(name);
						NXVsetLog(self,"dataPath",fName);
						NXVsetLog(self,"nxdlPath", nxdlChildPath);
						if(!isOptional(cur)){
							NXVsetLog(self,"sev","error");
							NXVsetLog(self,"message","Required group missing");
							NXVlog(self);
							self->errCount++;
						} else {
							NXVsetLog(self,"sev","warnopt");
							NXVsetLog(self,"message","Optional group missing");
							NXVlog(self);
						}
					}
			}
			if(xmlStrcmp(cur->name,(xmlChar *) "field") == 0){
					name = xmlGetProp(cur,(xmlChar *)"name");
					if(H5LTfind_dataset(groupID,(char *)name) ) {
						childID = H5Dopen(groupID,(char *)name,H5P_DEFAULT);
					} else {
						childID = -1;
					}
					snprintf(childPath,sizeof(childPath),"%s/%s",
						fName,name);
					if(childID < 0){
						NXVsetLog(self,"dataPath",childPath);
						snprintf(nxdlChildPath,sizeof(nxdlChildPath),
							"%s/%s", self->nxdlPath, name);
						NXVsetLog(self,"nxdlPath", nxdlChildPath);
						if(!isOptional(cur)){
									NXVsetLog(self,"sev","error");
									NXVsetLog(self,"message","Required field missing");
									NXVlog(self);
									self->errCount++;
						} else {
							NXVsetLog(self,"sev","warnopt");
							NXVsetLog(self,"message","Optional field missing");
							NXVlog(self);
						}
					} else {
						if(xmlStrcmp(name,(xmlChar *)"depends_on") == 0){
							/*
								This must b validated from the field level. As
								it might point to fields which are not in the
								application definition
							*/
							validateDependsOn(self,groupID,childID);
						} else {
							NXVvalidateField(self,childID, cur);
						}
						hash_insert((char *)name,strdup(""),&namesSeen);
					}
					xmlFree(name);
			}
			if(xmlStrcmp(cur->name,(xmlChar *) "link") == 0){
				name = xmlGetProp(cur,(xmlChar *)"name");
				target = xmlGetProp(cur,(xmlChar *)"target");
				hash_insert((char *)name,strdup(""),&namesSeen);
				validateLink(self,groupID,name, target);
				xmlFree(name);
				xmlFree(target);
			}
			cur = cur->next;
		}

		/*
			Second pass: search the HDF5 group for additional
			stuff which have not checked yet. Most of the hard work
			is in the SecondPassIterator.
		*/
		hash_construct_table(&baseNames,100);
		NXVloadBaseClass(self,&baseNames,(char *)myClass);
		spd.baseNames = &baseNames;
		spd.namesSeen = &namesSeen;
		spd.self = self;
		NXVsetLog(self,"nxdlPath", mynxdlPath);
		H5Literate(groupID, H5_INDEX_NAME, H5_ITER_INC, &idx,
			SecondPassIterator, &spd);

		/*
			clean up
		*/
		hash_free_table(&namesSeen,free);
		hash_free_table(&baseNames,free);
		xmlFree(myClass);
		/*
			restore my paths...
		*/
		self->nxdlPath = savedNXDLPath;
		return 0;
	}
Exemplo n.º 19
0
H5RandomReader::H5RandomReader(const std::string fileName, const std::string groupPath) throw (InvalidFileException) {

    try {
        file.openFile(fileName, H5F_ACC_RDONLY);}
    catch ( H5::FileIException ) {
        throw InvalidFileException("Cannot acces file");}
    try {
        group = file.openGroup(groupPath);}
    catch ( H5::GroupIException ) {
        file.close();
        throw InvalidFileException("Cannot access group");}
    /*
     * extract timeline. This is also necessary to get the nbSteps.
     */
    try {
        timeline = group.openDataSet("timeline");
    	nSteps = timeline.getSpace().getSimpleExtentNpoints();}
    catch ( H5::DataSetIException error ) {
        //error.printError();
        group.close();
        file.close();
        throw InvalidFileException("Cannot access timeline dataset");}
    if (logging::info)
        std::cerr << "Opened group \"" <<  fileName << groupPath << "\" which has " << nSteps << " steps.\n";
    /*
     * extract objects names in the xpGroup
     */

    std::vector<std::string>  names;
    H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, iterInGroup, &names);
    /*
     * extract data from object in xpGroup
     * these data can be of 3 types: matrix, translate or wrench
     * each data are saved in related map
     */
    for (unsigned int i=0; i<names.size(); i++){ //TODO: skip timeline
        H5::DataSet dSet = group.openDataSet(names[i]);
        if (H5Aexists(dSet.getId(), "ArborisViewerType")) {
            H5::Attribute att = dSet.openAttribute("ArborisViewerType");
            std::string type;
            att.read(att.getDataType(), type);
            if (type == "matrix"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==3) {
                    hsize_t dims[3];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 4 && dims[2] == 4)
                        dimension_ok = true;}
                if (dimension_ok)
                    matrices[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",4,4).\n";
                    dSet.close();}}
            else if (type == "translate"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 3)
                        dimension_ok = true;}
                if (dimension_ok)
                    translates[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",3).\n";
                    dSet.close();}}
            else if (type == "wrench") {
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 6)
                        dimension_ok = true;}
                if (dimension_ok)
                    wrenches[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which as wrong dimensions. I was expecting (" << nSteps << ",6).\n";
                    dSet.close();}}
            else {
                if (logging::warning)
                    std::cerr << "Skipping dataset \"" << names[i] << "\" whose ArborisViewerType attribute as unknown value \"" << type << "\".\n";
                dSet.close();}
            att.close();
        }
        else {
            if (logging::info)
                std::cerr << "Skipping dataset \"" << names[i] << "\" which has no ArborisViewerType attribute.\n";
            dSet.close();
        }
    }
};
Exemplo n.º 20
0
int
main()
{
   printf("\n*** Checking HDF5 file functions.\n");
   printf("*** Creating HDF5 file in the canonical netCDF-4 way...");
   {
      hid_t fapl_id, fcpl_id, fileid, grpid, fileid2;
      hsize_t num_obj;

      /* Create file access and create property lists. */
      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR;
      
      /* Set latest_format in access propertly list. This ensures that
       * the latest, greatest, HDF5 versions are used in the file. */ 
/*      if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, 
	H5F_LIBVER_LATEST) < 0) ERR;*/

      /* Set H5P_CRT_ORDER_TRACKED in the creation property list. This
       * turns on HDF5 creation ordering in the file. */
      if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED |
					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;
      if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED |
					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;

      /* Set close degree. */
      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;

      /* Create the file. */
      if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR;

      /* Open the root group. */
      if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR;

      /* Close up. */
      if (H5Pclose(fapl_id) < 0 ||
	  H5Pclose(fcpl_id) < 0 ||
	  H5Gclose(grpid) < 0 ||
	  H5Fclose(fileid) < 0)
	 ERR;

      /* Reopen the file and check it. */
      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;

      if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, fapl_id)) < 0) ERR;
      if (H5Gget_num_objs(fileid, &num_obj) < 0) ERR;
      if (num_obj) ERR;

      /* Open another copy of the same file. Must use the same file
       * access degree or HDF5 will not open the file. */
      if ((fileid2 = H5Fopen(FILE_NAME, H5F_ACC_RDWR, fapl_id)) < 0) ERR;

      if (H5Fclose(fileid) < 0) ERR;
      if (H5Fclose(fileid2) < 0) ERR;
      if (H5Pclose(fapl_id) < 0) ERR;
   }
   SUMMARIZE_ERR;
   printf("*** Opening a HDF5 file with H5Literate...");
   {
      hid_t fapl_id, fileid, grpid;
      hsize_t idx = 0;
      char obj_name[STR_LEN + 1];
      hsize_t num_obj;
      int i;

      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;
      if (H5Pset_cache(fapl_id, 0, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, 
		       CHUNK_CACHE_PREEMPTION) < 0) ERR;
      if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, fapl_id)) < 0) ERR;
      if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR;

      if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR;
      for (i = 0; i < num_obj; i++)
      {
	 if (H5Literate(grpid, H5_INDEX_CRT_ORDER, H5_ITER_INC, &idx, op_func, 
			(void *)obj_name) != 1) ERR;
	 printf("encountered object %s\n", obj_name);
      }

      if (H5Gclose(grpid) < 0) ERR;
      if (H5Fclose(fileid) < 0) ERR;
      if (H5Pclose(fapl_id) < 0) ERR;
   }
   SUMMARIZE_ERR;
   printf("*** Opening a HDF5 file in the canonical netCDF-4 way...");
   {
      hid_t fapl_id, fileid, grpid;
      H5_index_t idx_field = H5_INDEX_CRT_ORDER;
      H5O_info_t obj_info;
      hsize_t num_obj;
      ssize_t size;
      char obj_name[STR_LEN + 1];
      int i;

      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;
      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;
      if (H5Pset_cache(fapl_id, 0, CHUNK_CACHE_NELEMS, CHUNK_CACHE_SIZE, 
		       CHUNK_CACHE_PREEMPTION) < 0) ERR;
      if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDONLY, fapl_id)) < 0) ERR;
      if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR;

      /* How many objects in this group? */
      if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR;
      for (i = 0; i < num_obj; i++)
      {
	 if (H5Oget_info_by_idx(grpid, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, 
				i, &obj_info, H5P_DEFAULT)) ERR;
	 if ((size = H5Lget_name_by_idx(grpid, ".", idx_field, H5_ITER_INC, i,
					NULL, 0, H5P_DEFAULT)) < 0) ERR;
	 if (H5Lget_name_by_idx(grpid, ".", idx_field, H5_ITER_INC, i,
				obj_name, size+1, H5P_DEFAULT) < 0) ERR;
      }

      if (H5Gclose(grpid) < 0) ERR;
      if (H5Fclose(fileid) < 0) ERR;
      if (H5Pclose(fapl_id) < 0) ERR;
   }
   SUMMARIZE_ERR;
   FINAL_RESULTS;
}
    void GalacticusReader::getOutputsMeta(long &numOutputs) {
        char line[1000];
        double aexp;
        int snapnum;

        string s("Outputs");
        Group group(fp->openGroup(s));
        hsize_t size = group.getNumObjs();
        cout << "Number of outputs stored in this file is " << size << endl;

        outputNames.clear();
        int idx2  = H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, &outputNames);

        // should close the group now
        group.close();

        OutputMeta o;
        for (int i=0; i<size; i++) {
            string sg = s + string("/") + outputNames[i];
            Group group(fp->openGroup(sg));
            Attribute att = group.openAttribute("outputExpansionFactor");

            // check type
            H5T_class_t type_class = att.getTypeClass();
            if (type_class != H5T_FLOAT) {
                cout << "Float attribute does not have correct type!" << endl;
                abort();
            }

            // check byte order
            FloatType intype = att.getFloatType();
            H5std_string order_string;
            H5T_order_t order = intype.getOrder(order_string);

            // check again data sizes
            if (sizeof(double) != intype.getSize()) {
                cout << "Mismatch of double data type." << endl;
                abort();
            }

            // read the attribute
            att.read(intype, &aexp);

            // assign values
            o.outputName = s + string("/") + outputNames[i] + string("/") + string("nodeData"); // complete name to access the data block
            o.outputExpansionFactor = (float) aexp;

            // get snapshot number = number at the end of "Output%d"
            string prefix = "Output"; // Is this always the case??? Could also search for a number using boost regex
            string numstr = outputNames[i].substr(prefix.length(),outputNames[i].length());
            o.ioutput = (int) atoi(numstr.c_str());
            snapnum = getSnapnum(o.ioutput);
            o.snapnum = snapnum;

            // store in map
            outputMetaMap[snapnum] = o;

            // close the group
            group.close();

        }

        numOutputs = size;
        return;
    }
    int GalacticusReader::readNextBlock(string outputName) {
        // read one complete Output* block from Galacticus HDF5-file
        // should fit into memory ... if not, need to adjust this
        // and provide the number of values to be read each time

        long nvalues;
        //char outputname[1000];

        //performance output stuff
        boost::posix_time::ptime startTime;
        boost::posix_time::ptime endTime;
        string newtext = "";
        boost::regex re(":z[0-9.]*");
        

        startTime = boost::posix_time::microsec_clock::universal_time();

        // first get names of all DataSets in nodeData group and their item size
        //cout << "outputName: " << outputName<< endl;

        Group group(fp->openGroup(outputName));
        // maybe check here that it worked?

        hsize_t len = group.getNumObjs();

        //cout << "Iterating over Datasets in the group ... " << endl;
        //H5L_iterate_t
        //vector<string> dsnames;
        dataSetNames.clear(); // actually, the names should be exactly the same as for the group before!! --> check this???
        int idx2  = H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, &dataSetNames);

        string s;
        string dsname;
        string matchname;
        int numDataSets = dataSetNames.size();
        //cout << "numDataSets: " << numDataSets << endl;
        
        // create a key-value map for the dataset names, do it from scratch for each block,
        // and remove redshifts from the dataset names (where necessary)    
        dataSetMap.clear();
        for (int k=0; k<numDataSets; k++) {
            dsname = dataSetNames[k];
            // convert to matchname, i.e. remove possibly given redshift from the name:
            string matchname = boost::regex_replace(dsname, re, newtext);
            dataSetMap[matchname] = k;
            //dataSetMatchNames.push_back(matchname);
        }

        // clear datablocks from previous block, before reading new ones:
        datablocks.clear();

        // read each desired data set, use corresponding read routine for different types
        for (int k=0; k<numDataSets; k++) {

            dsname = dataSetNames[k];
            s = string(outputName) + string("/") + dsname;

            DataSet *dptr = new DataSet(fp->openDataSet(s));
            DataSet dataset = *dptr; // for convenience

            // check class type
            H5T_class_t type_class = dataset.getTypeClass();
            if (type_class == H5T_INTEGER) {
                //cout << "DataSet has long type!" << endl;
                long *data = readLongDataSet(s, nvalues);
            } else if (type_class == H5T_FLOAT) {
                //cout << "DataSet has double type!" << endl;
                double *data2 = readDoubleDataSet(s, nvalues);
            }
            //cout << nvalues << " values read." << endl;
        }
        // How to proceed from here onwards??
        // Could read all data into data[0] to data[104] or so,
        // but I need to keep the information which is which!
        // Alternatively create one big structure to hold it all?

        // => use a small class that contains
        // 1) name of dataset
        // 2) array of values, number of values
        // use vector<newclass> to create a vector of these datasets.
        // maybe can use datasets themselves, so no need to define own class?
        // => assigning to the new class has already happened now inside the read-class.

        endTime = boost::posix_time::microsec_clock::universal_time();
        printf("Time for reading output %s (%ld rows): %lld ms\n", outputName.c_str(), nvalues, (long long int) (endTime-startTime).total_milliseconds());
        fflush(stdout);
            
        return nvalues; //assume that nvalues is the same for each dataset (datablock) inside one Output-group (same redshift)
    }
Exemplo n.º 23
0
/*--------------------------------------------------------------
 Finding groups is hideous:
 * They may be specified by name. This seems easy but is complicated
   by the fact that the group name can either be a name attribute or
	 an attribute field.  A design flaw of NXDL, IMHO.
 * They may be specified by type and I need to search by NX_class.
---------------------------------------------------------------*/
static hid_t findGroup(pNXVcontext self, hid_t parentGroup, xmlNodePtr groupNode)
{
	xmlChar *name = NULL, *nxClass = NULL, *nodePath = NULL;
	xmlNodePtr cur = NULL;
	findByClassData fbcd;
	hid_t gid, status;
	hsize_t idx = 0;

  name = xmlGetProp(groupNode,(xmlChar *)"name");
	if(name == NULL){
		cur = groupNode->xmlChildrenNode;
		while(cur != NULL){
			if(xmlStrcmp(cur->name,(xmlChar *)"attribute") == 0){
				name = xmlGetProp(cur,(xmlChar *)"name");
				if(name != NULL){
					break;
				}
			}
			cur = cur->next;
		}
	}
	if(name != NULL){
		if(H5LTpath_valid(parentGroup,(char *)name, 1)){
			status = H5Gopen(parentGroup,(char *)name,H5P_DEFAULT);
		} else {
			status =  -1;
		}
		xmlFree(name);
		return status;
	}

	/*
		no name to be found: search by type
	*/
	nxClass = xmlGetProp(groupNode,(xmlChar *)"type");
	if(nxClass == NULL){
		NXVsetLog(self,"sev","error");
		nodePath = xmlGetNodePath(cur);
		NXVsetLog(self,"nxdlPath", (char *)nodePath);
		NXVsetLog(self,"message","Malformed group entry, type missing");
		NXVlog(self);
		xmlFree(nodePath);
		self->errCount++;
		return -1;
	}

	fbcd.nxClass = (char *)nxClass;
	fbcd.name = NULL;
	H5Literate(parentGroup, H5_INDEX_NAME, H5_ITER_INC, &idx,
		FindByClassIterator, &fbcd);
	if(fbcd.name != NULL){
		gid = H5Gopen(parentGroup,fbcd.name,H5P_DEFAULT);
		free(fbcd.name);
		xmlFree(fbcd.nxClass);
		return gid;
	}
	xmlFree(fbcd.nxClass);


	return -1;
}