static void _get_series_names(hid_t group) { int i, len; char buf[MAX_GROUP_NAME+1]; H5G_info_t group_info; H5Gget_info(group, &group_info); num_series = (int)group_info.nlinks; if (num_series < 0) { debug("No Data Series in group"); return; } series_names = xmalloc(sizeof(char*)*num_series); for (i = 0; (num_series>0) && (i<num_series); i++) { len = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) { info("Invalid series name=%s", buf); // put into list anyway so list doesn't have a null. } series_names[i] = xstrdup(buf); } }
extern hid_t get_group(hid_t parent, const char *name) { char buf[MAX_GROUP_NAME]; hsize_t nobj; hid_t gid; int i, len; H5G_info_t group_info; if (parent < 0) { debug3("PROFILE: parent is not HDF5 object"); return -1; } H5Gget_info(parent, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(parent, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len > 0) && (len < MAX_GROUP_NAME)) { if (strcmp(buf, name) == 0) { gid = H5Gopen(parent, name, H5P_DEFAULT); if (gid < 0) error("PROFILE: Failed to open %s", name); return gid; } } } return -1; }
void pyne::Material::_load_comp_protocol0(hid_t db, std::string datapath, int row) { hid_t matgroup = H5Gopen2(db, datapath.c_str(), H5P_DEFAULT); hid_t nucset; double nucvalue; ssize_t nuckeylen; std::string nuckey; // get the number of members in the material group H5G_info_t group_info; H5Gget_info(matgroup, &group_info); hsize_t matG = group_info.nlinks; // Iterate over datasets in the group. for (int matg = 0; matg < matG; matg++) { nuckeylen = 1 + H5Lget_name_by_idx(matgroup, ".", H5_INDEX_NAME, H5_ITER_INC, matg, NULL, 0, H5P_DEFAULT); char * nkey = new char[nuckeylen]; nuckeylen = H5Lget_name_by_idx(matgroup, ".", H5_INDEX_NAME, H5_ITER_INC, matg, nkey, nuckeylen, H5P_DEFAULT); nuckey = nkey; nucset = H5Dopen2(matgroup, nkey, H5P_DEFAULT); nucvalue = h5wrap::get_array_index<double>(nucset, row); if (nuckey == "Mass" || nuckey == "MASS" || nuckey == "mass") mass = nucvalue; else comp[pyne::nucname::id(nuckey)] = nucvalue; H5Dclose(nucset); delete[] nkey; }; // Set meta data atoms_per_molecule = -1.0; };
int main(){ hid_t fprop; hid_t fid; hid_t vol_id = H5VL_memvol_init(); herr_t status; hid_t g1, g2; hid_t plist; char name[1024]; fprop = H5Pcreate(H5P_FILE_ACCESS); H5Pset_vol(fprop, vol_id, &fprop); fid = H5Fcreate("test", H5F_ACC_TRUNC, H5P_DEFAULT, fprop); H5VLget_plugin_name(fid, name, 1024); printf ("Using VOL %s\n", name); g1 = H5Gcreate2(fid, "g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Gclose(g1); g2 = H5Gcreate2(fid, "g2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); g1 = H5Gcreate2(g2, "g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Gclose(g1); H5Gclose(g2); // is this allowed? //g3 = H5Gcreate2(fid, "g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); //H5Gclose(g3); printf("Testing additional functions\n"); g1 = H5Gopen2(fid, "g1", H5P_DEFAULT ); plist = H5Gget_create_plist(g1); H5G_info_t group_info; H5Gget_info(g1, & group_info ); H5Gget_info_by_idx(fid, "g1", H5_INDEX_CRT_ORDER, H5_ITER_NATIVE, 0, & group_info, H5P_DEFAULT ) ; H5Gget_info_by_idx(fid, "g1", H5_INDEX_NAME, H5_ITER_NATIVE, 0, & group_info, H5P_DEFAULT ) ; H5Gget_info_by_name(fid, "g1", & group_info, H5P_DEFAULT); H5Pclose(plist); status = H5Gclose(g1); g1 = H5Gopen2(fid, "g2", H5P_DEFAULT ); H5Gclose(g1); //g1 = H5Gopen2(fid, "INVALID", H5P_DEFAULT ); //H5Gclose(g1); g1 = H5Gcreate_anon( fid, H5P_DEFAULT, H5P_DEFAULT ); H5Gclose(g1); H5Fclose(fid); H5VL_memvol_finalize(); printf("Status: %d\n", status); return 0; }
// Read children names of an object AH5_children_t AH5_read_children_name(hid_t file_id, const char* path) { H5G_info_t ginfo; AH5_children_t children; hsize_t i, j = 0; ssize_t size; hid_t group_id; char temp[AH5_ELEMENT_NAME_LENGTH], *temp2; children.childnames = NULL; /* - path must exist - number of children must be greater than zero - element name - must be readable - must be shorter than AH5_ELEMENT_NAME_LENGTH - must NOT be same as "_param" */ if (AH5_path_valid(file_id, path) || strcmp(path, "/") == 0) { group_id = H5Gopen1(file_id, path); H5Gget_info(group_id, &ginfo); if (ginfo.nlinks > 0) { children.childnames = (char **) malloc((size_t) ginfo.nlinks * sizeof(char *)); for (i = 0; i < ginfo.nlinks; i++) { size = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_DEFAULT); if (size < 0) printf("***** ERROR: Cannot read all children of \"%s\". *****\n\n", path); else if (size >= AH5_ELEMENT_NAME_LENGTH) { temp2 = (char *) malloc ((size + 1) * sizeof(char)); H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp2, size + 1, H5P_DEFAULT); printf("***** ERROR: Maximum name length (%i) exceeded in \"%s/%s\". *****\n\n", AH5_ELEMENT_NAME_LENGTH - 1, path, temp2); free(temp2); } else { H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp, size + 1, H5P_DEFAULT); if (strcmp(temp, "_param") != 0) // exclude parameterized attributes { children.childnames[j] = (char *) malloc((size + 2) * sizeof(char)); strcpy(children.childnames[j], "/"); strcat(children.childnames[j++], temp); } } } if (j == 0) free(children.childnames); } H5Gclose(group_id); } children.nb_children = j; return children; }
//-------------------------------------------------------------------------- // Function: CommonFG::getNumObjs ///\brief Returns the number of objects in this group. ///\return Number of objects ///\exception H5::FileIException or H5::GroupIException // Programmer Binh-Minh Ribler - January, 2003 //-------------------------------------------------------------------------- hsize_t CommonFG::getNumObjs() const { H5G_info_t ginfo; /* Group information */ herr_t ret_value = H5Gget_info(getLocId(), &ginfo); if(ret_value < 0) { throwException("getNumObjs", "H5Gget_info failed"); } return (ginfo.nlinks); }
/* * Class: hdf_hdf5lib_H5 * Method: H5Gget_info * Signature: (J)Lhdf/hdf5lib/structs/H5G_info_t; */ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Gget_1info(JNIEnv *env, jclass cls, jlong loc_id) { H5G_info_t group_info; if (H5Gget_info((hid_t)loc_id, &group_info) < 0) { h5libraryError(env); return NULL; } /* end if */ return create_H5G_info_t(env, group_info); } /* end Java_hdf_hdf5lib_H5_H5Gget_1info */
const unsigned int H5LinksList::getSize() const { H5G_info_t info; herr_t err = H5Gget_info(getParent().getH5Id(), &info); if (err < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot get the number of links.")); } return (unsigned int)info.nlinks; }
static void _extract_all_tasks(FILE *fp, hid_t gid_step, hid_t gid_nodes, int nnodes, int stepx) { hid_t gid_tasks, gid_task = 0, gid_node = -1, gid_level = -1; H5G_info_t group_info; int ntasks, itx, len, task_id; char task_name[MAX_GROUP_NAME+1]; char* node_name; char buf[MAX_GROUP_NAME+1]; bool hd = true; gid_tasks = get_group(gid_step, GRP_TASKS); if (gid_tasks < 0) fatal("No tasks in step %d", stepx); H5Gget_info(gid_tasks, &group_info); ntasks = (int) group_info.nlinks; if (ntasks <= 0) fatal("No tasks in step %d", stepx); for (itx = 0; itx<ntasks; itx++) { // Get the name of the group. len = H5Lget_name_by_idx(gid_tasks, ".", H5_INDEX_NAME, H5_ITER_INC, itx, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len > 0) && (len < MAX_GROUP_NAME)) { gid_task = H5Gopen(gid_tasks, buf, H5P_DEFAULT); if (gid_task < 0) fatal("Failed to open %s", buf); } else fatal("Illegal task name %s",buf); task_id = get_int_attribute(gid_task, ATTR_TASKID); node_name = get_string_attribute(gid_task, ATTR_NODENAME); sprintf(task_name,"%s_%d", GRP_TASK, task_id); gid_node = H5Gopen(gid_nodes, node_name, H5P_DEFAULT); if (gid_node < 0) fatal("Failed to open %s for Task_%d", node_name, task_id); gid_level = get_group(gid_node, GRP_SAMPLES); if (gid_level < 0) fatal("Failed to open group %s for node=%s task=%d", GRP_SAMPLES,node_name, task_id); _extract_series(fp, stepx, hd, gid_level, node_name, task_name); hd = false; xfree(node_name); H5Gclose(gid_level); H5Gclose(gid_node); H5Gclose(gid_task); } H5Gclose(gid_tasks); }
const unsigned int H5Group::getLinksSize() const { herr_t err; H5G_info_t info; err = H5Gget_info(group, &info); if (err < 0) { throw H5Exception(__LINE__, __FILE__, _("Cannot get the links number")); } return (unsigned int)info.nlinks; }
int getListDims6(int dataset, int* items) { H5G_info_t group_info; herr_t status = H5Gget_info(dataset, &group_info); if (status < 0) { *items = 0; return -1; } *items = (int)group_info.nlinks; return 0; }
i32 HDF5Group::numChildren() const { // get the group info H5G_info_t group_info; HDF5Err res = H5Gget_info(m_id, &group_info); if (res < 0) { THROW(Iex::IoExc, "Could not get info for group"); } // return the number of links from the group info struct return group_info.nlinks; }
// Read children names of an object AH5_children_t AH5_read_children_name(hid_t file_id, const char *path) { H5G_info_t ginfo; AH5_children_t children; hsize_t i, j = 0; ssize_t size; hid_t group_id; char *temp; children.childnames = NULL; /* - path must exist - number of children must be greater than zero - element name - must be readable - must NOT be same as "_param" */ if (AH5_path_valid(file_id, path) || strcmp(path, "/") == 0) { group_id = H5Gopen(file_id, path, H5P_DEFAULT); H5Gget_info(group_id, &ginfo); if (ginfo.nlinks > 0) { temp = malloc(sizeof(*temp)); children.childnames = (char **) malloc((size_t) ginfo.nlinks * sizeof(char *)); for (i = 0; i < ginfo.nlinks; i++) { size = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_DEFAULT); if (size < 0) AH5_log_error("Cannot read all children of \"%s\". *****\n\n", path); else { temp = realloc(temp, (size + 1) * sizeof(*temp)); H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp, size + 1, H5P_DEFAULT); if (strcmp(temp, "_param") != 0) // exclude parameterized attributes { children.childnames[j] = (char *) malloc((size + 2) * sizeof(char)); strcpy(children.childnames[j], "/"); strcat(children.childnames[j++], temp); } } } free(temp); if (j == 0) free(children.childnames); } H5Gclose(group_id); } children.nb_children = j; return children; }
listOfPaths_t* listOfObjectsInGroup( hid_t group, H5G_obj_t filter) { ERROR_SWITCH_INIT herr_t get_err; H5G_info_t groupInfo; H5G_obj_t objType; hsize_t i, numberOfLinks; size_t size; char* name = NULL; listOfPaths_t* listOfPaths = NULL; // get number of links in group ERROR_SWITCH_OFF get_err = H5Gget_info(group, &groupInfo); ERROR_SWITCH_ON if (get_err < 0) return listOfPaths; numberOfLinks = groupInfo.nlinks; // loop on all links in group for (i=0; i<numberOfLinks; i++) { // get type of current link objType = H5Gget_objtype_by_idx(group, i); // only add requested type of objects to the list if ( objType == filter || filter == -1) { // get length of object name ERROR_SWITCH_OFF size = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, -1, H5P_DEFAULT); ERROR_SWITCH_ON if (size < 0) return listOfPaths; // get object name // allocate with extra character for '\0' name = (char *) malloc((size+1)*sizeof(char)); ERROR_SWITCH_OFF size = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, name, size+1, H5P_DEFAULT); ERROR_SWITCH_ON if (size < 0) { free(name); return listOfPaths; } // add it into the list listOfPaths = addCopyToList(listOfPaths, name); free(name); name = NULL; } }
void DCGroup::getEntriesInternal(H5Handle base, const std::string baseGroup, std::string baseName, VisitObjCBType *param) throw (DCException) { H5G_info_t group_info; H5Gget_info(base, &group_info); for (size_t i = 0; i < group_info.nlinks; ++i) { std::string currentBaseName = baseName; std::string currentEntryName = ""; H5O_info_t obj_info; H5Oget_info_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, &obj_info, H5P_DEFAULT); if (param->entries) { ssize_t len_name = H5Lget_name_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_LINK_ACCESS_DEFAULT); char *link_name_c = new char[len_name + 1]; H5Lget_name_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, link_name_c, len_name + 1, H5P_LINK_ACCESS_DEFAULT); currentEntryName = std::string(link_name_c) + std::string("/"); currentBaseName += currentEntryName; delete[] link_name_c; } if (obj_info.type == H5O_TYPE_GROUP) { hid_t group_id = H5Oopen_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, H5P_DEFAULT); getEntriesInternal(group_id, baseGroup, currentBaseName, param); H5Oclose(group_id); } if (obj_info.type == H5O_TYPE_DATASET) { if (param->entries) param->entries[param->count].name = currentEntryName; param->count++; } } }
int getVariableNames_v1(int _iFile, char **pstNameList) { hsize_t i = 0; hsize_t iCount = 0; herr_t status = 0; int iNbItem = 0; H5O_info_t oinfo; H5G_info_t ginfo; status = H5Gget_info(_iFile, &ginfo); if (status != 0) { return 0; } iCount = ginfo.nlinks; for (i = 0; i < iCount; i++) { status = H5Oget_info_by_idx(_iFile, "/", H5_INDEX_NAME, H5_ITER_NATIVE, i, &oinfo, H5P_DEFAULT); if (status < 0) { return 0; } if (oinfo.type == H5O_TYPE_DATASET) { if (pstNameList != NULL) { ssize_t iLen = H5Lget_name_by_idx(_iFile, ".", H5_INDEX_NAME, H5_ITER_INC, i, 0, 0, H5P_DEFAULT) + 1; pstNameList[iNbItem] = (char*)MALLOC(sizeof(char) * iLen); H5Lget_name_by_idx(_iFile, ".", H5_INDEX_NAME, H5_ITER_INC, i, pstNameList[iNbItem], iLen, H5P_DEFAULT); } iNbItem++; } } return iNbItem; }
int seek_chromosome(char *chrom, genome_t *genome, chromosome_t *chromosome, bool verbose) { hid_t h5file = -1; hid_t h5group = -1; H5G_info_t h5group_info; char *where = NULL; /* must be specified to H5Literate; allows interruption and resumption, but I don't use it */ hsize_t idx = 0; err_state_t err_state; if (verbose) { fprintf(stderr, "%s\n", chrom); } assert(is_valid_genome(genome)); /* close old chromosome and start creating the new one */ close_chromosome(chromosome); chromosome->chrom = chrom; if (genome->dirname) { /* if genome is a directory, compute path and open h5file */ char *h5filename = NULL; char *h5filename_suffix; /* allocate space for h5filename, including 2 bytes for '/' and '\0' */ h5filename = xmalloc((strlen(genome->dirname) + strlen(chrom) + strlen(SUFFIX_H5) + 2) * sizeof(char)); assert(h5filename); /* set h5filename */ h5filename_suffix = stpcpy(h5filename, genome->dirname); h5filename_suffix = stpcpy(h5filename_suffix, "/"); h5filename_suffix = stpcpy(h5filename_suffix, chrom); strcpy(h5filename_suffix, SUFFIX_H5); /* open the chromosome file */ disable_h5_errors(&err_state); h5file = H5Fopen(h5filename, H5F_ACC_RDWR, H5P_DEFAULT); enable_h5_errors(&err_state); /* read chromosome from the root group */ chromosome->h5file = h5file; /* allocate space for where = "/\0" */ where = strdup("/"); assert(where); /* free no-longer-used filename */ free(h5filename); } else { /* if genome is a file, compute internal path and open group */ if (genome->h5file >= 0) { h5file = genome->h5file; char *where_suffix; /* allocate space for where, including 2 bytes for '/' and '\0' */ where = xmalloc((strlen(chrom) + 2) * sizeof(char)); assert(where); /* read chromosome from subgroup of h5file */ where_suffix = stpcpy(where, "/"); strcpy(where_suffix, chrom); } } if (h5file >= 0) { /* Open the chromosome group, regardless of dir/file implementation */ disable_h5_errors(&err_state); h5group = H5Gopen(h5file, where, H5P_DEFAULT); enable_h5_errors(&err_state); } chromosome->h5group = h5group; /* clean up memory before returning */ free(where); /* if opening failed, then return -1 with h5file set bad */ if (is_valid_chromosome(chromosome)) { /* allocate supercontig metadata array */ assert(H5Gget_info(chromosome->h5group, &h5group_info) >= 0); init_supercontig_array(h5group_info.nlinks, chromosome); /* populate supercontig metadata array */ assert(H5Literate(chromosome->h5group, H5_INDEX_NAME, H5_ITER_INC, &idx, supercontig_visitor, chromosome) == 0); return 0; } else { if (verbose) { fprintf(stderr, " can't open chromosome: %s\n", chromosome->chrom); } return -1; } }
/**************************************************************** ** ** test_links(): Test soft and hard link iteration ** ****************************************************************/ static void test_links(hid_t fapl) { hid_t file; /* File ID */ char obj_name[NAMELEN]; /* Names of the object in group */ ssize_t name_len; /* Length of object's name */ hid_t gid, gid1; H5G_info_t ginfo; /* Buffer for querying object's info */ hsize_t i; herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Soft and Hard Link Iteration Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); /* create groups */ gid = H5Gcreate2(file, "/g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(gid, FAIL, "H5Gcreate2"); gid1 = H5Gcreate2(file, "/g1/g1.1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(gid1, FAIL, "H5Gcreate2"); /* create soft and hard links to the group "/g1". */ ret = H5Lcreate_soft("something", gid, "softlink", H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lcreate_soft"); ret = H5Lcreate_hard(gid, "/g1", H5L_SAME_LOC, "hardlink", H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lcreate_hard"); ret = H5Gget_info(gid, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, 3, "H5Gget_info"); /* Test these two functions, H5Oget_info_by_idx and H5Lget_name_by_idx */ for(i = 0; i < ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ H5L_info_t linfo; /* Link info */ /* Get link name */ name_len = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(name_len, FAIL, "H5Lget_name_by_idx"); /* Get link type */ ret = H5Lget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &linfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_info_by_idx"); /* Get object type */ if(linfo.type == H5L_TYPE_HARD) { ret = H5Oget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end if */ if(!HDstrcmp(obj_name, "g1.1")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); else if(!HDstrcmp(obj_name, "hardlink")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); else if(!HDstrcmp(obj_name, "softlink")) VERIFY(linfo.type, H5L_TYPE_SOFT, "H5Lget_name_by_idx"); else CHECK(0, 0, "unknown object name"); } /* end for */ ret = H5Gclose(gid); CHECK(ret, FAIL, "H5Gclose"); ret = H5Gclose(gid1); CHECK(ret, FAIL, "H5Gclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); } /* test_links() */
asynStatus hdf5Driver::openFile (const char *path) { asynStatus status = asynSuccess; const char *functionName = "loadFile"; hid_t fileId, groupId; H5G_info_t groupInfo; size_t totalFrames = 0; size_t maxWidth = 0, maxHeight = 0; // Reset some parameters setIntegerParam(HDF5DatasetsCount, 0); setIntegerParam(HDF5TotalFrames, 0); setIntegerParam(HDF5FirstFrame, 0); setIntegerParam(HDF5LastFrame, 0); setIntegerParam(HDF5CurrentFrame, 0); setIntegerParam(ADMaxSizeX, 0); setIntegerParam(ADMaxSizeY, 0); callParamCallbacks(); // Get a file handle if((fileId = H5Fopen(path, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) { asynPrint(pasynUserSelf, ASYN_TRACE_ERROR, "%s:%s couldn't open file '%s'\n", driverName, functionName, path); status = asynError; goto end; } // Get a handle to the '/entry' group if((groupId = H5Gopen2(fileId, "/entry", H5P_DEFAULT)) < 0) { asynPrint(pasynUserSelf, ASYN_TRACE_ERROR, "%s:%s couldn't open 'entry' group\n", driverName, functionName); status = asynError; goto closeFile; } // Need groupInfo to obtain number of links if(H5Gget_info(groupId, &groupInfo)) { asynPrint(pasynUserSelf, ASYN_TRACE_ERROR, "%s:%s couldn't get group info\n", driverName, functionName); status = asynError; goto closeGroup; } // Deallocate information of previous file if(mpDatasets) { for(size_t i = 0; i < mDatasetsCount; ++i) H5Dclose(mpDatasets[i].id); free(mpDatasets); } // Allocate memory to store dataset information mpDatasets = (struct dsetInfo*) calloc(groupInfo.nlinks, sizeof(*mpDatasets)); mDatasetsCount = 0; // Iterate over '/entry' objects for(size_t i = 0; i < groupInfo.nlinks; ++i) { // Get object name char dsetName[256]; H5Lget_name_by_idx(groupId, ".", H5_INDEX_NAME, H5_ITER_INC, i, dsetName, sizeof(dsetName), H5P_DEFAULT); // If it doesn't start with 'data' it isn't a dataset. Ignore it. if(strncmp(dsetName, "data", 4)) continue; // Get a handle to the dataset info structure struct dsetInfo *pDSet = &mpDatasets[mDatasetsCount++]; pDSet->id = H5Dopen2(groupId, dsetName, H5P_DEFAULT); // Read dataset attributes H5LTget_attribute_int(pDSet->id, ".", "image_nr_low", &pDSet->imageNrLow); H5LTget_attribute_int(pDSet->id, ".", "image_nr_high", &pDSet->imageNrHigh); // Read dimensions (assume a 3D dataset) hsize_t dims[3] = {0,0,0}; H5LTget_dataset_info(pDSet->id, ".", dims, NULL, NULL); totalFrames += dims[0]; pDSet->height = dims[1]; pDSet->width = dims[2]; // Calculate maxHeight and maxWidth if(dims[1] > maxHeight) maxHeight = dims[1]; if(dims[2] > maxWidth) maxWidth = dims[2]; // Read type if(parseType(pDSet->id, &pDSet->type)) { asynPrint(pasynUserSelf, ASYN_TRACE_ERROR, "%s:%s couldn't parse dataset type\n", driverName, functionName); status = asynError; } } // Update parameters setIntegerParam(HDF5DatasetsCount, (int) mDatasetsCount); setIntegerParam(HDF5TotalFrames, (int) totalFrames); if(mDatasetsCount > 0) { int firstFrame = mpDatasets[0].imageNrLow; int lastFrame = mpDatasets[mDatasetsCount-1].imageNrHigh; setIntegerParam(HDF5FirstFrame, firstFrame); setIntegerParam(HDF5LastFrame, lastFrame); setIntegerParam(HDF5CurrentFrame, firstFrame); setIntegerParam(ADMaxSizeX, maxWidth); setIntegerParam(ADMaxSizeY, maxHeight); int autoLoad; getIntegerParam(HDF5AutoLoadFrame, &autoLoad); if(autoLoad) { setIntegerParam(ADImageMode, ADImageSingle); setIntegerParam(ADNumImages, 1); setIntegerParam(ADAcquire, 1); epicsEventSignal(mStartEventId); } } callParamCallbacks(); closeGroup: H5Gclose(groupId); closeFile: H5Fclose(fileId); end: return status; }
/**************************************************************** ** ** test_iter_group(): Test group iteration functionality ** ****************************************************************/ static void test_iter_group(hid_t fapl, hbool_t new_format) { hid_t file; /* File ID */ hid_t dataset; /* Dataset ID */ hid_t datatype; /* Common datatype ID */ hid_t filespace; /* Common dataspace ID */ hid_t root_group,grp; /* Root group ID */ int i; /* counting variable */ hsize_t idx; /* Index in the group */ char name[NAMELEN]; /* temporary name buffer */ char *lnames[NDATASETS + 2];/* Names of the links created */ char dataset_name[NAMELEN]; /* dataset name */ iter_info info; /* Custom iteration information */ H5G_info_t ginfo; /* Buffer for querying object's info */ herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Group Iteration Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); /* Test iterating over empty group */ info.command = RET_ZERO; idx = 0; ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); VERIFY(ret, SUCCEED, "H5Literate"); datatype = H5Tcopy(H5T_NATIVE_INT); CHECK(datatype, FAIL, "H5Tcopy"); filespace=H5Screate(H5S_SCALAR); CHECK(filespace, FAIL, "H5Screate"); for(i=0; i< NDATASETS; i++) { sprintf(name,"Dataset %d",i); dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Keep a copy of the dataset names around for later */ lnames[i] = HDstrdup(name); CHECK(lnames[i], NULL, "strdup"); ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); } /* end for */ /* Create a group and named datatype under root group for testing */ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Gcreate2"); lnames[NDATASETS] = HDstrdup("grp"); CHECK(lnames[NDATASETS], NULL, "strdup"); ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); lnames[NDATASETS + 1] = HDstrdup("dtype"); CHECK(lnames[NDATASETS], NULL, "strdup"); /* Close everything up */ ret = H5Tclose(datatype); CHECK(ret, FAIL, "H5Tclose"); ret = H5Gclose(grp); CHECK(ret, FAIL, "H5Gclose"); ret = H5Sclose(filespace); CHECK(ret, FAIL, "H5Sclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Sort the dataset names */ HDqsort(lnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Iterate through the datasets in the root group in various ways */ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl); CHECK(file, FAIL, "H5Fopen"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. */ root_group = H5Gopen2(file, "/", H5P_DEFAULT); CHECK(root_group, FAIL, "H5Gopen2"); ret = H5Gget_info(root_group, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info"); for(i = 0; i< (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Gclose(root_group); CHECK(ret, FAIL, "H5Gclose"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. * (Same as test above, but with the file ID instead of opening the root group) */ ret = H5Gget_info(file, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, NDATASETS + 2, "H5Gget_info"); for(i = 0; i< (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Oget_info_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); /* Test invalid indices for starting iteration */ info.command = RET_ZERO; idx = (hsize_t)-1; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test skipping exactly as many entries as in the group */ idx = NDATASETS + 2; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test skipping more entries than are in the group */ idx = NDATASETS + 3; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test all objects in group, when callback always returns 0 */ info.command = RET_ZERO; idx = 0; if((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) TestErrPrintf("Group iteration function didn't return zero correctly!\n"); /* Test all objects in group, when callback always returns 1 */ /* This also tests the "restarting" ability, because the index changes */ info.command = RET_TWO; idx = i = 0; while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) { /* Verify return value from iterator gets propagated correctly */ VERIFY(ret, 2, "H5Literate"); /* Increment the number of times "2" is returned */ i++; /* Verify that the index is the correct value */ VERIFY(idx, (hsize_t)i, "H5Literate"); if(idx > (NDATASETS + 2)) TestErrPrintf("Group iteration function walked too far!\n"); /* Verify that the correct name is retrieved */ if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0) TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]); } /* end while */ VERIFY(ret, -1, "H5Literate"); if(i != (NDATASETS + 2)) TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__); /* Test all objects in group, when callback changes return value */ /* This also tests the "restarting" ability, because the index changes */ info.command = new_format ? RET_CHANGE2 : RET_CHANGE; idx = i = 0; while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) >= 0) { /* Verify return value from iterator gets propagated correctly */ VERIFY(ret, 1, "H5Literate"); /* Increment the number of times "1" is returned */ i++; /* Verify that the index is the correct value */ VERIFY(idx, (hsize_t)(i + 10), "H5Literate"); if(idx > (NDATASETS + 2)) TestErrPrintf("Group iteration function walked too far!\n"); /* Verify that the correct name is retrieved */ if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0) TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]); } /* end while */ VERIFY(ret, -1, "H5Literate"); if(i != 42 || idx != 52) TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Free the dataset names */ for(i = 0; i< (NDATASETS + 2); i++) HDfree(lnames[i]); } /* test_iter_group() */
static void _merge_series_data(hid_t jgid_tasks, hid_t jg_node, hid_t nsg_node) { hid_t jg_samples, nsg_samples; hid_t g_series, g_series_total = -1; hsize_t num_samples, n_series; int idsx, len; void *data = NULL, *series_total = NULL; uint32_t type; char *data_type; char nam_series[MAX_GROUP_NAME+1]; hdf5_api_ops_t* ops = NULL; H5G_info_t group_info; H5O_info_t object_info; if (jg_node < 0) { info("Job Node is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } jg_samples = H5Gcreate(jg_node, GRP_SAMPLES, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_samples < 0) { info("Failed to create job node Samples"); return; } nsg_samples = get_group(nsg_node, GRP_SAMPLES); if (nsg_samples < 0) { H5Gclose(jg_samples); debug("Failed to get node-step Samples"); return; } H5Gget_info(nsg_samples, &group_info); n_series = group_info.nlinks; if (n_series < 1) { // No series? H5Gclose(jg_samples); H5Gclose(nsg_samples); info("No Samples"); return; } for (idsx = 0; idsx < n_series; idsx++) { H5Oget_info_by_idx(nsg_samples, ".", H5_INDEX_NAME, H5_ITER_INC, idsx, &object_info, H5P_DEFAULT); if (object_info.type != H5O_TYPE_GROUP) continue; len = H5Lget_name_by_idx(nsg_samples, ".", H5_INDEX_NAME, H5_ITER_INC, idsx, nam_series, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", nam_series); continue; } g_series = H5Gopen(nsg_samples, nam_series, H5P_DEFAULT); if (g_series < 0) { info("Failed to open %s", nam_series); continue; } H5Gget_info(g_series, &group_info); num_samples = group_info.nlinks; if (num_samples <= 0) { H5Gclose(g_series); info("_series %s has no samples", nam_series); continue; } // Get first sample in series to find out how big the data is. data_type = get_string_attribute(g_series, ATTR_DATATYPE); if (!data_type) { H5Gclose(g_series); info("Failed to get datatype for Time Series Dataset"); continue; } type = acct_gather_profile_type_from_string(data_type); xfree(data_type); data = _get_all_samples(g_series, nam_series, type, num_samples); if (data == NULL) { H5Gclose(g_series); info("Failed to get memory for Time Series Dataset"); continue; } put_hdf5_data(jg_samples, type, SUBDATA_SERIES, nam_series, data, num_samples); ops = profile_factory(type); if (ops == NULL) { xfree(data); H5Gclose(g_series); info("Failed to create operations for %s", acct_gather_profile_type_to_string(type)); continue; } series_total = (*(ops->series_total))(num_samples, data); if (series_total != NULL) { // Totals for series attaches to node g_series_total = make_group(jg_node, GRP_TOTALS); if (g_series_total < 0) { H5Gclose(g_series); xfree(series_total); xfree(data); xfree(ops); info("Failed to make Totals for Node"); continue; } put_hdf5_data(g_series_total, type, SUBDATA_SUMMARY, nam_series, series_total, 1); H5Gclose(g_series_total); } xfree(series_total); xfree(ops); xfree(data); H5Gclose(g_series); } return; }
/**************************************************************** ** ** test_grp_memb_funcs(): Test group member information ** functionality ** ****************************************************************/ static void test_grp_memb_funcs(hid_t fapl) { hid_t file; /* File ID */ hid_t dataset; /* Dataset ID */ hid_t datatype; /* Common datatype ID */ hid_t filespace; /* Common dataspace ID */ hid_t root_group,grp; /* Root group ID */ int i; /* counting variable */ char name[NAMELEN]; /* temporary name buffer */ char *dnames[NDATASETS+2];/* Names of the datasets created */ char *obj_names[NDATASETS+2];/* Names of the objects in group */ char dataset_name[NAMELEN]; /* dataset name */ ssize_t name_len; /* Length of object's name */ H5G_info_t ginfo; /* Buffer for querying object's info */ herr_t ret = SUCCEED; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Group Member Information Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); datatype = H5Tcopy(H5T_NATIVE_INT); CHECK(datatype, FAIL, "H5Tcopy"); filespace = H5Screate(H5S_SCALAR); CHECK(filespace, FAIL, "H5Screate"); for(i = 0; i < NDATASETS; i++) { sprintf(name, "Dataset %d", i); dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Keep a copy of the dataset names around for later */ dnames[i] = HDstrdup(name); CHECK(dnames[i], NULL, "strdup"); ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); } /* end for */ /* Create a group and named datatype under root group for testing */ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Gcreate2"); dnames[NDATASETS] = HDstrdup("grp"); CHECK(dnames[NDATASETS], NULL, "strdup"); ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); dnames[NDATASETS + 1] = HDstrdup("dtype"); CHECK(dnames[NDATASETS], NULL, "strdup"); /* Close everything up */ ret = H5Tclose(datatype); CHECK(ret, FAIL, "H5Tclose"); ret = H5Gclose(grp); CHECK(ret, FAIL, "H5Gclose"); ret = H5Sclose(filespace); CHECK(ret, FAIL, "H5Sclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Sort the dataset names */ HDqsort(dnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Iterate through the datasets in the root group in various ways */ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl); CHECK(file, FAIL, "H5Fopen"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. */ root_group = H5Gopen2(file, "/", H5P_DEFAULT); CHECK(root_group, FAIL, "H5Gopen2"); ret = H5Gget_info(root_group, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info"); for(i = 0; i < (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ /* Test with NULL for name, to query length */ name_len = H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, NULL, (size_t)NAMELEN, H5P_DEFAULT); CHECK(name_len, FAIL, "H5Lget_name_by_idx"); ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)(name_len + 1), H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); /* Double-check that the length is the same */ VERIFY(ret, name_len, "H5Lget_name_by_idx"); /* Keep a copy of the dataset names around for later */ obj_names[i] = HDstrdup(dataset_name); CHECK(obj_names[i], NULL, "strdup"); ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); if(!HDstrcmp(dataset_name, "grp")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); if(!HDstrcmp(dataset_name, "dtype")) VERIFY(oinfo.type, H5O_TYPE_NAMED_DATATYPE, "H5Lget_name_by_idx"); if(!HDstrncmp(dataset_name, "Dataset", (size_t)7)) VERIFY(oinfo.type, H5O_TYPE_DATASET, "H5Lget_name_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); /* Sort the dataset names */ HDqsort(obj_names, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Compare object names */ for(i = 0; i< (int)ginfo.nlinks; i++) { ret = HDstrcmp(dnames[i], obj_names[i]); VERIFY(ret, 0, "HDstrcmp"); } /* end for */ ret = H5Gclose(root_group); CHECK(ret, FAIL, "H5Gclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Free the dataset names */ for(i = 0; i< (NDATASETS + 2); i++) { HDfree(dnames[i]); HDfree(obj_names[i]); } /* end for */ } /* test_grp_memb_funcs() */
static void _merge_task_totals(hid_t jg_tasks, hid_t nsg_node, char* node_name) { hid_t jg_task, jg_totals, nsg_totals, g_total, nsg_tasks, nsg_task = -1; hsize_t nobj, ntasks = -1; int i, len, taskx, taskid, taskcpus, size_data; void *data; uint32_t type; char buf[MAX_GROUP_NAME+1]; char group_name[MAX_GROUP_NAME+1]; H5G_info_t group_info; if (jg_tasks < 0) { info("Job Tasks is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } nsg_tasks = get_group(nsg_node, GRP_TASKS); if (nsg_tasks < 0) { debug("No Tasks group in node-step file"); return; } H5Gget_info(nsg_tasks, &group_info); ntasks = group_info.nlinks; for (taskx = 0; ((int)ntasks>0) && (taskx<((int)ntasks)); taskx++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_tasks, ".", H5_INDEX_NAME, H5_ITER_INC, taskx, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", buf); continue; } nsg_task = H5Gopen(nsg_tasks, buf, H5P_DEFAULT); if (nsg_task < 0) { debug("Failed to open %s", buf); continue; } taskid = get_int_attribute(nsg_task, ATTR_TASKID); sprintf(group_name, "%s_%d", GRP_TASK, taskid); jg_task = H5Gcreate(jg_tasks, group_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_task < 0) { H5Gclose(nsg_task); info("Failed to create job task group"); continue; } put_string_attribute(jg_task, ATTR_NODENAME, node_name); put_int_attribute(jg_task, ATTR_TASKID, taskid); taskcpus = get_int_attribute(nsg_task, ATTR_CPUPERTASK); put_int_attribute(jg_task, ATTR_CPUPERTASK, taskcpus); nsg_totals = get_group(nsg_task, GRP_TOTALS); if (nsg_totals < 0) { H5Gclose(jg_task); H5Gclose(nsg_task); continue; } jg_totals = H5Gcreate(jg_task, GRP_TOTALS, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_totals < 0) { H5Gclose(jg_task); H5Gclose(nsg_task); info("Failed to create job task totals"); continue; } H5Gget_info(nsg_totals, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_totals, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", buf); continue; } g_total = H5Gopen(nsg_totals, buf, H5P_DEFAULT); if (g_total < 0) { info("Failed to open %s", buf); continue; } type = get_uint32_attribute(g_total, ATTR_DATATYPE); if (!type) { H5Gclose(g_total); info("No %s attribute", ATTR_DATATYPE); continue; } data = get_hdf5_data(g_total, type, buf, &size_data); if (data == NULL) { H5Gclose(g_total); info("Failed to get group %s type %s data", buf, acct_gather_profile_type_to_string(type)); continue; } put_hdf5_data(jg_totals, type, SUBDATA_DATA, buf, data, 1); xfree(data); H5Gclose(g_total); } H5Gclose(nsg_totals); H5Gclose(nsg_task); H5Gclose(jg_totals); H5Gclose(jg_task); } H5Gclose(nsg_tasks); }
int hdf5_get_num_links(hid_t loc_id) { H5G_info_t info; herr_t status = H5Gget_info(loc_id, &info); CHECK_GE(status, 0) << "Error while counting HDF5 links."; return info.nlinks; }
static void _merge_node_totals(hid_t jg_node, hid_t nsg_node) { hid_t jg_totals, nsg_totals, g_total; hsize_t nobj; int i, len, size_data; void *data; uint32_t type; char buf[MAX_GROUP_NAME+1]; H5G_info_t group_info; if (jg_node < 0) { info("Job Node is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } jg_totals = H5Gcreate(jg_node, GRP_TOTALS, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_totals < 0) { info("Failed to create job node totals"); return; } nsg_totals = get_group(nsg_node, GRP_TOTALS); if (nsg_totals < 0) { H5Gclose(jg_totals); return; } H5Gget_info(nsg_totals, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_totals, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("invalid group name %s", buf); continue; } g_total = H5Gopen(nsg_totals, buf, H5P_DEFAULT); if (g_total < 0) { info("Failed to open %s", buf); continue; } type = get_uint32_attribute(g_total, ATTR_DATATYPE); if (!type) { H5Gclose(g_total); info("No %s attribute", ATTR_DATATYPE); continue; } data = get_hdf5_data(g_total, type, buf, &size_data); if (data == NULL) { H5Gclose(g_total); info("Failed to get group %s type %s data", buf, acct_gather_profile_type_to_string(type)); continue; } put_hdf5_data(jg_totals, type, SUBDATA_DATA, buf, data, 1); xfree(data); H5Gclose(g_total); } H5Gclose(nsg_totals); H5Gclose(jg_totals); return; }
hsize_t nchildren() const { H5G_info_t inf; h5e::check_error(H5Gget_info(_self, &inf)); return inf.nlinks; }
static void _get_all_task_series(FILE *fp, bool hd, hid_t jgid_step, int stepx) { hid_t jgid_tasks, jgid_task = 0, jgid_nodes, jgid_node; H5G_info_t group_info; int ntasks,itx, tid; uint64_t *task_id; char **task_node_name; /* Node Name for each task */ char **tod = NULL; /* Date time at each sample */ char **series_name; /* Node Names */ double **all_series; /* Pointers to all sampled for each node */ double *et = NULL; /* Elapsed time at each sample */ uint64_t *series_smp; /* Number of samples in this series */ int nnodes, ndx, len, nsmp = 0, nitem = -1; char jgrp_node_name[MAX_GROUP_NAME+1]; char jgrp_task_name[MAX_GROUP_NAME+1]; char buf[MAX_GROUP_NAME+1]; void* series_data = NULL; hdf5_api_ops_t* ops; jgid_nodes = get_group(jgid_step, GRP_NODES); if (jgid_nodes < 0) fatal("Failed to open group %s", GRP_NODES); jgid_tasks = get_group(jgid_step, GRP_TASKS); if (jgid_tasks < 0) fatal("No tasks in step %d", stepx); H5Gget_info(jgid_tasks, &group_info); ntasks = (int) group_info.nlinks; if (ntasks <= 0) fatal("No tasks in step %d", stepx); task_id = xmalloc(ntasks*sizeof(uint64_t)); if (task_id == NULL) fatal("Failed to get memory for task_ids"); task_node_name = xmalloc(ntasks*sizeof(char*)); if (task_node_name == NULL) fatal("Failed to get memory for task_node_names"); for (itx = 0; itx<ntasks; itx++) { // Get the name of the group. len = H5Lget_name_by_idx(jgid_tasks, ".", H5_INDEX_NAME, H5_ITER_INC, itx, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len > 0) && (len < MAX_GROUP_NAME)) { jgid_task = H5Gopen(jgid_tasks, buf, H5P_DEFAULT); if (jgid_task < 0) fatal("Failed to open %s", buf); } else fatal("Illegal task name %s",buf); task_id[itx] = get_int_attribute(jgid_task, ATTR_TASKID); task_node_name[itx] = get_string_attribute(jgid_task, ATTR_NODENAME); H5Gclose(jgid_task); } H5Gclose(jgid_tasks); nnodes = get_int_attribute(jgid_step, ATTR_NNODES); // allocate node arrays series_smp = (uint64_t*) xmalloc(ntasks*(sizeof(uint64_t))); if (series_smp == NULL) { fatal("Failed to get memory for node_samples"); return; /* Fix for CLANG false positive */ } series_name = (char**) xmalloc(ntasks*(sizeof(char*))); if (series_name == NULL) { fatal("Failed to get memory for series_name"); return; /* Fix for CLANG false positive */ } all_series = (double**) xmalloc(ntasks*(sizeof(double*))); if (all_series == NULL) { fatal("Failed to get memory for all_series"); return; /* Fix for CLANG false positive */ } for (ndx=0; ndx<nnodes; ndx++) { len = H5Lget_name_by_idx(jgid_nodes, ".", H5_INDEX_NAME, H5_ITER_INC, ndx, jgrp_node_name, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) fatal("Invalid node name=%s", jgrp_node_name); jgid_node = get_group(jgid_nodes, jgrp_node_name); if (jgid_node < 0) fatal("Failed to open group %s", jgrp_node_name); for (itx = 0; itx<ntasks; itx++) { if (strcmp(jgrp_node_name, task_node_name[itx]) != 0) continue; tid = task_id[itx]; series_name[itx] = xstrdup_printf("%s_%d %s", GRP_TASK,tid,jgrp_node_name); sprintf(jgrp_task_name,"%s_%d",GRP_TASK, tid); ops = NULL; nitem = 0; series_data = _get_series_data(jgid_node, jgrp_task_name, &ops, &nitem); if (series_data==NULL || nitem==0 || ops==NULL) { if (ops != NULL) xfree(ops); continue; } all_series[itx] = ops->get_series_values( params.data_item, series_data, nitem); if (!all_series[ndx]) fatal("No data item %s",params.data_item); series_smp[itx] = nitem; if (nsmp == 0) { nsmp = nitem; tod = ops->get_series_tod(series_data, nitem); et = ops->get_series_values("time", series_data, nitem); } else { if (nitem > nsmp) { // new largest number of samples _delete_string_list(tod, nsmp); xfree(et); nsmp = nitem; tod = ops->get_series_tod(series_data, nitem); et = ops->get_series_values("time", series_data, nitem); } } xfree(ops); xfree(series_data); } H5Gclose(jgid_node); } if (nsmp == 0) { // May be bad series name info("No values %s for series %s found in step %d", params.data_item,params.series, stepx); } else { _series_analysis(fp, hd, stepx, ntasks, nsmp, series_name, tod, et, all_series, series_smp); } for (itx=0; itx<ntasks; itx++) { xfree(all_series[itx]); } xfree(series_name); xfree(all_series); xfree(series_smp); _delete_string_list(tod, nsmp); xfree(et); _delete_string_list(task_node_name, ntasks); xfree(task_id); H5Gclose(jgid_nodes); }