inline void Volume<T>::read(HDF5Id file_id, u32 index) { HDF5Group volume_root_group; HDF5Group::getRootGroup(file_id, kVolumeRootGroup, volume_root_group); // get the size of the volume group name u32 size = H5Lget_name_by_idx(volume_root_group.id(), ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, index, NULL, 0, H5P_DEFAULT); // for some reason HDF5 shorts the name size by 1. Restore it to // proper length here. ++size; // get the name of the link at index char buf[size]; H5Lget_name_by_idx(volume_root_group.id(), ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, index, buf, size, H5P_DEFAULT); String volume_name(buf); // now read in this volume by name readVolume(volume_root_group.id(), volume_name); }
value hdf5_h5l_get_name_by_idx(value loc_v, value group_name_v, value index_field_v, value order_v, value lapl_v, value n_v) { CAMLparam5(loc_v, group_name_v, index_field_v, order_v, lapl_v); CAMLxparam1(n_v); CAMLlocal1(name_v); hid_t loc_id = Hid_val(loc_v), lapl_id = H5P_opt_val(lapl_v); const char *group_name = String_val(group_name_v); H5_index_t index_field = H5_index_val(index_field_v); H5_iter_order_t order = H5_iter_order_val(order_v); hsize_t n = Int_val(n_v); char *name; ssize_t size; size = H5Lget_name_by_idx(loc_id, group_name, index_field, order, n, NULL, 0, lapl_id); if (size < 0) fail(); size++; name = malloc(size); if (name == NULL) caml_raise_out_of_memory(); size = H5Lget_name_by_idx(loc_id, group_name, index_field, order, n, name, size, lapl_id); if (size < 0) { free(name); fail(); } name_v = caml_copy_string(name); free(name); CAMLreturn(name_v); }
void pyne::Material::_load_comp_protocol0(hid_t db, std::string datapath, int row) { hid_t matgroup = H5Gopen2(db, datapath.c_str(), H5P_DEFAULT); hid_t nucset; double nucvalue; ssize_t nuckeylen; std::string nuckey; // get the number of members in the material group H5G_info_t group_info; H5Gget_info(matgroup, &group_info); hsize_t matG = group_info.nlinks; // Iterate over datasets in the group. for (int matg = 0; matg < matG; matg++) { nuckeylen = 1 + H5Lget_name_by_idx(matgroup, ".", H5_INDEX_NAME, H5_ITER_INC, matg, NULL, 0, H5P_DEFAULT); char * nkey = new char[nuckeylen]; nuckeylen = H5Lget_name_by_idx(matgroup, ".", H5_INDEX_NAME, H5_ITER_INC, matg, nkey, nuckeylen, H5P_DEFAULT); nuckey = nkey; nucset = H5Dopen2(matgroup, nkey, H5P_DEFAULT); nucvalue = h5wrap::get_array_index<double>(nucset, row); if (nuckey == "Mass" || nuckey == "MASS" || nuckey == "mass") mass = nucvalue; else comp[pyne::nucname::id(nuckey)] = nucvalue; H5Dclose(nucset); delete[] nkey; }; // Set meta data atoms_per_molecule = -1.0; };
// Read children names of an object AH5_children_t AH5_read_children_name(hid_t file_id, const char* path) { H5G_info_t ginfo; AH5_children_t children; hsize_t i, j = 0; ssize_t size; hid_t group_id; char temp[AH5_ELEMENT_NAME_LENGTH], *temp2; children.childnames = NULL; /* - path must exist - number of children must be greater than zero - element name - must be readable - must be shorter than AH5_ELEMENT_NAME_LENGTH - must NOT be same as "_param" */ if (AH5_path_valid(file_id, path) || strcmp(path, "/") == 0) { group_id = H5Gopen1(file_id, path); H5Gget_info(group_id, &ginfo); if (ginfo.nlinks > 0) { children.childnames = (char **) malloc((size_t) ginfo.nlinks * sizeof(char *)); for (i = 0; i < ginfo.nlinks; i++) { size = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_DEFAULT); if (size < 0) printf("***** ERROR: Cannot read all children of \"%s\". *****\n\n", path); else if (size >= AH5_ELEMENT_NAME_LENGTH) { temp2 = (char *) malloc ((size + 1) * sizeof(char)); H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp2, size + 1, H5P_DEFAULT); printf("***** ERROR: Maximum name length (%i) exceeded in \"%s/%s\". *****\n\n", AH5_ELEMENT_NAME_LENGTH - 1, path, temp2); free(temp2); } else { H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp, size + 1, H5P_DEFAULT); if (strcmp(temp, "_param") != 0) // exclude parameterized attributes { children.childnames[j] = (char *) malloc((size + 2) * sizeof(char)); strcpy(children.childnames[j], "/"); strcat(children.childnames[j++], temp); } } } if (j == 0) free(children.childnames); } H5Gclose(group_id); } children.nb_children = j; return children; }
string hdf5_get_name_by_idx(hid_t loc_id, int idx) { ssize_t str_size = H5Lget_name_by_idx( loc_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE, idx, NULL, 0, H5P_DEFAULT); CHECK_GE(str_size, 0) << "Error retrieving HDF5 dataset at index " << idx; char *c_str = new char[str_size+1]; ssize_t status = H5Lget_name_by_idx( loc_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE, idx, c_str, str_size+1, H5P_DEFAULT); CHECK_GE(status, 0) << "Error retrieving HDF5 dataset at index " << idx; string result(c_str); delete[] c_str; return result; }
// Read children names of an object AH5_children_t AH5_read_children_name(hid_t file_id, const char *path) { H5G_info_t ginfo; AH5_children_t children; hsize_t i, j = 0; ssize_t size; hid_t group_id; char *temp; children.childnames = NULL; /* - path must exist - number of children must be greater than zero - element name - must be readable - must NOT be same as "_param" */ if (AH5_path_valid(file_id, path) || strcmp(path, "/") == 0) { group_id = H5Gopen(file_id, path, H5P_DEFAULT); H5Gget_info(group_id, &ginfo); if (ginfo.nlinks > 0) { temp = malloc(sizeof(*temp)); children.childnames = (char **) malloc((size_t) ginfo.nlinks * sizeof(char *)); for (i = 0; i < ginfo.nlinks; i++) { size = H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_DEFAULT); if (size < 0) AH5_log_error("Cannot read all children of \"%s\". *****\n\n", path); else { temp = realloc(temp, (size + 1) * sizeof(*temp)); H5Lget_name_by_idx(group_id, ".", H5_INDEX_NAME, H5_ITER_INC, i, temp, size + 1, H5P_DEFAULT); if (strcmp(temp, "_param") != 0) // exclude parameterized attributes { children.childnames[j] = (char *) malloc((size + 2) * sizeof(char)); strcpy(children.childnames[j], "/"); strcat(children.childnames[j++], temp); } } } free(temp); if (j == 0) free(children.childnames); } H5Gclose(group_id); } children.nb_children = j; return children; }
listOfPaths_t* listOfObjectsInGroup( hid_t group, H5G_obj_t filter) { ERROR_SWITCH_INIT herr_t get_err; H5G_info_t groupInfo; H5G_obj_t objType; hsize_t i, numberOfLinks; size_t size; char* name = NULL; listOfPaths_t* listOfPaths = NULL; // get number of links in group ERROR_SWITCH_OFF get_err = H5Gget_info(group, &groupInfo); ERROR_SWITCH_ON if (get_err < 0) return listOfPaths; numberOfLinks = groupInfo.nlinks; // loop on all links in group for (i=0; i<numberOfLinks; i++) { // get type of current link objType = H5Gget_objtype_by_idx(group, i); // only add requested type of objects to the list if ( objType == filter || filter == -1) { // get length of object name ERROR_SWITCH_OFF size = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, -1, H5P_DEFAULT); ERROR_SWITCH_ON if (size < 0) return listOfPaths; // get object name // allocate with extra character for '\0' name = (char *) malloc((size+1)*sizeof(char)); ERROR_SWITCH_OFF size = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, name, size+1, H5P_DEFAULT); ERROR_SWITCH_ON if (size < 0) { free(name); return listOfPaths; } // add it into the list listOfPaths = addCopyToList(listOfPaths, name); free(name); name = NULL; } }
extern hid_t get_group(hid_t parent, const char *name) { char buf[MAX_GROUP_NAME]; hsize_t nobj; hid_t gid; int i, len; H5G_info_t group_info; if (parent < 0) { debug3("PROFILE: parent is not HDF5 object"); return -1; } H5Gget_info(parent, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(parent, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len > 0) && (len < MAX_GROUP_NAME)) { if (strcmp(buf, name) == 0) { gid = H5Gopen(parent, name, H5P_DEFAULT); if (gid < 0) error("PROFILE: Failed to open %s", name); return gid; } } } return -1; }
static void _get_series_names(hid_t group) { int i, len; char buf[MAX_GROUP_NAME+1]; H5G_info_t group_info; H5Gget_info(group, &group_info); num_series = (int)group_info.nlinks; if (num_series < 0) { debug("No Data Series in group"); return; } series_names = xmalloc(sizeof(char*)*num_series); for (i = 0; (num_series>0) && (i<num_series); i++) { len = H5Lget_name_by_idx(group, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) { info("Invalid series name=%s", buf); // put into list anyway so list doesn't have a null. } series_names[i] = xstrdup(buf); } }
/* * Class: hdf_hdf5lib_H5 * Method: H5Lget_name_by_idx * Signature: (JLjava/lang/String;IIJJ)Ljava/lang/String; */ JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx (JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint index_field, jint order, jlong link_n, jlong access_id) { jlong status_size; jstring str = NULL; size_t buf_size; const char *lName; char *lValue; PIN_JAVA_STRING(name, lName); if (lName != NULL) { /* get the length of the link name */ status_size = H5Lget_name_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)NULL, (size_t)0, H5P_DEFAULT); if(status_size < 0) { h5libraryError(env); } /* end if */ else { buf_size = (size_t)status_size + 1;/* add extra space for the null terminator */ lValue = (char*)HDmalloc(sizeof(char) * buf_size); if (lValue == NULL) { h5outOfMemory(env, "H5Lget_name_by_idx: malloc failed "); } /* end if */ else { status_size = H5Lget_name_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)lValue, (size_t)buf_size, (hid_t)access_id); if (status_size < 0) { HDfree(lValue); h5libraryError(env); } /* end if */ else { str = ENVPTR->NewStringUTF(ENVPAR lValue); HDfree(lValue); if (str == NULL) h5JNIFatalError(env, "H5Lget_name_by_idx: return string not created"); } /* end else */ } /* end else */ } /* end else */ UNPIN_JAVA_STRING(name, lName); } return str; } /* end Java_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx */
//-------------------------------------------------------------------------- // Function: CommonFG::getObjnameByIdx ///\brief Returns the name of an object in this group, given the /// object's index. ///\param idx - IN: Transient index of the object ///\return Object name ///\exception H5::FileIException or H5::GroupIException ///\par Description /// The value of idx can be any nonnegative number less than the /// total number of objects in the group, which is returned by /// the function \c CommonFG::getNumObjs. Note that this is a /// transient index; thus, an object may have a different index /// each time the group is opened. // Programmer Binh-Minh Ribler - Mar, 2005 //-------------------------------------------------------------------------- H5std_string CommonFG::getObjnameByIdx(hsize_t idx) const { // call H5Lget_name_by_idx with name as NULL to get its length ssize_t name_len = H5Lget_name_by_idx(getLocId(), ".", H5_INDEX_NAME, H5_ITER_INC, idx, NULL, 0, H5P_DEFAULT); if(name_len < 0) { throwException("getObjnameByIdx", "H5Lget_name_by_idx failed"); } // now, allocate C buffer to get the name char* name_C = new char[name_len+1]; name_len = H5Lget_name_by_idx(getLocId(), ".", H5_INDEX_NAME, H5_ITER_INC, idx, name_C, name_len+1, H5P_DEFAULT); // clean up and return the string H5std_string name = H5std_string(name_C); delete []name_C; return (name); }
//-------------------------------------------------------------------------- // Function: CommonFG::getObjnameByIdx ///\brief Retrieves the name of an object in this group, given the /// object's index. ///\param idx - IN: Transient index of the object ///\param name - IN/OUT: Retrieved name of the object ///\param size - IN: Length to retrieve ///\return Actual size of the object name or 0, if object has no name ///\exception H5::FileIException or H5::GroupIException ///\par Description /// The value of idx can be any nonnegative number less than the /// total number of objects in the group, which is returned by /// the function \c CommonFG::getNumObjs. Note that this is a /// transient index; thus, an object may have a different index /// each time the group is opened. // Programmer Binh-Minh Ribler - January, 2003 //-------------------------------------------------------------------------- ssize_t CommonFG::getObjnameByIdx(hsize_t idx, char* name, size_t size) const { ssize_t name_len = H5Lget_name_by_idx(getLocId(), ".", H5_INDEX_NAME, H5_ITER_INC, idx, name, size, H5P_DEFAULT); if(name_len < 0) { throwException("getObjnameByIdx", "H5Lget_name_by_idx failed"); } return (name_len); }
void DCGroup::getEntriesInternal(H5Handle base, const std::string baseGroup, std::string baseName, VisitObjCBType *param) throw (DCException) { H5G_info_t group_info; H5Gget_info(base, &group_info); for (size_t i = 0; i < group_info.nlinks; ++i) { std::string currentBaseName = baseName; std::string currentEntryName = ""; H5O_info_t obj_info; H5Oget_info_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, &obj_info, H5P_DEFAULT); if (param->entries) { ssize_t len_name = H5Lget_name_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, NULL, 0, H5P_LINK_ACCESS_DEFAULT); char *link_name_c = new char[len_name + 1]; H5Lget_name_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, link_name_c, len_name + 1, H5P_LINK_ACCESS_DEFAULT); currentEntryName = std::string(link_name_c) + std::string("/"); currentBaseName += currentEntryName; delete[] link_name_c; } if (obj_info.type == H5O_TYPE_GROUP) { hid_t group_id = H5Oopen_by_idx(base, ".", H5_INDEX_NAME, H5_ITER_INC, i, H5P_DEFAULT); getEntriesInternal(group_id, baseGroup, currentBaseName, param); H5Oclose(group_id); } if (obj_info.type == H5O_TYPE_DATASET) { if (param->entries) param->entries[param->count].name = currentEntryName; param->count++; } } }
std::vector<std::string> fast5_get_multi_read_groups(fast5_file& fh) { std::vector<std::string> out; size_t buffer_size = 0; char* buffer = NULL; // get the number of groups in the root group H5G_info_t group_info; int ret = H5Gget_info_by_name(fh.hdf5_file, "/", &group_info, H5P_DEFAULT); if(ret < 0) { fprintf(stderr, "error getting group info\n"); exit(EXIT_FAILURE); } for(size_t group_idx = 0; group_idx < group_info.nlinks; ++group_idx) { // retrieve the size of this group name ssize_t size = H5Lget_name_by_idx(fh.hdf5_file, "/", H5_INDEX_NAME, H5_ITER_INC, group_idx, NULL, 0, H5P_DEFAULT); if(size < 0) { fprintf(stderr, "error getting group name size\n"); exit(EXIT_FAILURE); } size += 1; // for null terminator if(size > buffer_size) { buffer = (char*)realloc(buffer, size); buffer_size = size; } // copy the group name H5Lget_name_by_idx(fh.hdf5_file, "/", H5_INDEX_NAME, H5_ITER_INC, group_idx, buffer, buffer_size, H5P_DEFAULT); buffer[size] = '\0'; out.push_back(buffer); } free(buffer); buffer = NULL; buffer_size = 0; return out; }
std::string fast5_get_raw_read_internal_name(fast5_file& fh) { // This code is From scrappie's fast5_interface // retrieve the size of the read name ssize_t size = H5Lget_name_by_idx(fh.hdf5_file, LEGACY_FAST5_RAW_ROOT, H5_INDEX_NAME, H5_ITER_INC, 0, NULL, 0, H5P_DEFAULT); if (size < 0) { return ""; } // copy the read name out of the fast5 char* name = (char*)calloc(1 + size, sizeof(char)); H5Lget_name_by_idx(fh.hdf5_file, LEGACY_FAST5_RAW_ROOT, H5_INDEX_NAME, H5_ITER_INC, 0, name, 1 + size, H5P_DEFAULT); // cleanup std::string out(name); free(name); return out; }
static void _extract_all_tasks(FILE *fp, hid_t gid_step, hid_t gid_nodes, int nnodes, int stepx) { hid_t gid_tasks, gid_task = 0, gid_node = -1, gid_level = -1; H5G_info_t group_info; int ntasks, itx, len, task_id; char task_name[MAX_GROUP_NAME+1]; char* node_name; char buf[MAX_GROUP_NAME+1]; bool hd = true; gid_tasks = get_group(gid_step, GRP_TASKS); if (gid_tasks < 0) fatal("No tasks in step %d", stepx); H5Gget_info(gid_tasks, &group_info); ntasks = (int) group_info.nlinks; if (ntasks <= 0) fatal("No tasks in step %d", stepx); for (itx = 0; itx<ntasks; itx++) { // Get the name of the group. len = H5Lget_name_by_idx(gid_tasks, ".", H5_INDEX_NAME, H5_ITER_INC, itx, buf, MAX_GROUP_NAME, H5P_DEFAULT); if ((len > 0) && (len < MAX_GROUP_NAME)) { gid_task = H5Gopen(gid_tasks, buf, H5P_DEFAULT); if (gid_task < 0) fatal("Failed to open %s", buf); } else fatal("Illegal task name %s",buf); task_id = get_int_attribute(gid_task, ATTR_TASKID); node_name = get_string_attribute(gid_task, ATTR_NODENAME); sprintf(task_name,"%s_%d", GRP_TASK, task_id); gid_node = H5Gopen(gid_nodes, node_name, H5P_DEFAULT); if (gid_node < 0) fatal("Failed to open %s for Task_%d", node_name, task_id); gid_level = get_group(gid_node, GRP_SAMPLES); if (gid_level < 0) fatal("Failed to open group %s for node=%s task=%d", GRP_SAMPLES,node_name, task_id); _extract_series(fp, stepx, hd, gid_level, node_name, task_name); hd = false; xfree(node_name); H5Gclose(gid_level); H5Gclose(gid_node); H5Gclose(gid_task); } H5Gclose(gid_tasks); }
int getVariableNames_v1(int _iFile, char **pstNameList) { hsize_t i = 0; hsize_t iCount = 0; herr_t status = 0; int iNbItem = 0; H5O_info_t oinfo; H5G_info_t ginfo; status = H5Gget_info(_iFile, &ginfo); if (status != 0) { return 0; } iCount = ginfo.nlinks; for (i = 0; i < iCount; i++) { status = H5Oget_info_by_idx(_iFile, "/", H5_INDEX_NAME, H5_ITER_NATIVE, i, &oinfo, H5P_DEFAULT); if (status < 0) { return 0; } if (oinfo.type == H5O_TYPE_DATASET) { if (pstNameList != NULL) { ssize_t iLen = H5Lget_name_by_idx(_iFile, ".", H5_INDEX_NAME, H5_ITER_INC, i, 0, 0, H5P_DEFAULT) + 1; pstNameList[iNbItem] = (char*)MALLOC(sizeof(char) * iLen); H5Lget_name_by_idx(_iFile, ".", H5_INDEX_NAME, H5_ITER_INC, i, pstNameList[iNbItem], iLen, H5P_DEFAULT); } iNbItem++; } } return iNbItem; }
static void _extract_node_level(FILE* fp, int stepx, hid_t jgid_nodes, int nnodes, char* data_set_name) { hid_t jgid_node, gid_level; int nodex, len; char jgrp_node_name[MAX_GROUP_NAME+1]; bool header = true; for (nodex=0; nodex<nnodes; nodex++) { len = H5Lget_name_by_idx(jgid_nodes, ".", H5_INDEX_NAME, H5_ITER_INC, nodex, jgrp_node_name, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) { info("Invalid node name=%s", jgrp_node_name); continue; } jgid_node = get_group(jgid_nodes, jgrp_node_name); if (jgid_node < 0) { info("Failed to open group %s", jgrp_node_name); continue; } if (params.node && strcmp(params.node, "*") && strcmp(params.node, jgrp_node_name)) continue; gid_level = _get_series_parent(jgid_node); if (gid_level == -1) { H5Gclose(jgid_node); continue; } _extract_series(fp, stepx, header, gid_level, jgrp_node_name, data_set_name); header = false; H5Gclose(gid_level); H5Gclose(jgid_node); } }
static void* _get_all_samples(hid_t gid_series, char* nam_series, uint32_t type, int nsamples) { void* data = NULL; hid_t id_data_set, dtyp_memory, g_sample, sz_dest; herr_t ec; int smpx ,len; void *data_prior = NULL, *data_cur = NULL; char name_sample[MAX_GROUP_NAME+1]; hdf5_api_ops_t* ops; ops = profile_factory(type); if (ops == NULL) { error("Failed to create operations for %s", acct_gather_profile_type_to_string(type)); return NULL; } data = (*(ops->init_job_series))(nsamples); if (data == NULL) { xfree(ops); error("Failed to get memory for combined data"); return NULL; } dtyp_memory = (*(ops->create_memory_datatype))(); if (dtyp_memory < 0) { xfree(ops); xfree(data); error("Failed to create %s memory datatype", acct_gather_profile_type_to_string(type)); return NULL; } for (smpx=0; smpx<nsamples; smpx++) { len = H5Lget_name_by_idx(gid_series, ".", H5_INDEX_NAME, H5_ITER_INC, smpx, name_sample, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { error("Invalid group name %s", name_sample); continue; } g_sample = H5Gopen(gid_series, name_sample, H5P_DEFAULT); if (g_sample < 0) { info("Failed to open %s", name_sample); } id_data_set = H5Dopen(g_sample, get_data_set_name(name_sample), H5P_DEFAULT); if (id_data_set < 0) { H5Gclose(g_sample); error("Failed to open %s dataset", acct_gather_profile_type_to_string(type)); continue; } sz_dest = (*(ops->dataset_size))(); data_cur = xmalloc(sz_dest); if (data_cur == NULL) { H5Dclose(id_data_set); H5Gclose(g_sample); error("Failed to get memory for prior data"); continue; } ec = H5Dread(id_data_set, dtyp_memory, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_cur); if (ec < 0) { xfree(data_cur); H5Dclose(id_data_set); H5Gclose(g_sample); error("Failed to read %s data", acct_gather_profile_type_to_string(type)); continue; } (*(ops->merge_step_series))(g_sample, data_prior, data_cur, data+(smpx)*sz_dest); xfree(data_prior); data_prior = data_cur; H5Dclose(id_data_set); H5Gclose(g_sample); } xfree(data_cur); H5Tclose(dtyp_memory); xfree(ops); return data; }
static void _merge_series_data(hid_t jgid_tasks, hid_t jg_node, hid_t nsg_node) { hid_t jg_samples, nsg_samples; hid_t g_series, g_series_total = -1; hsize_t num_samples, n_series; int idsx, len; void *data = NULL, *series_total = NULL; uint32_t type; char *data_type; char nam_series[MAX_GROUP_NAME+1]; hdf5_api_ops_t* ops = NULL; H5G_info_t group_info; H5O_info_t object_info; if (jg_node < 0) { info("Job Node is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } jg_samples = H5Gcreate(jg_node, GRP_SAMPLES, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_samples < 0) { info("Failed to create job node Samples"); return; } nsg_samples = get_group(nsg_node, GRP_SAMPLES); if (nsg_samples < 0) { H5Gclose(jg_samples); debug("Failed to get node-step Samples"); return; } H5Gget_info(nsg_samples, &group_info); n_series = group_info.nlinks; if (n_series < 1) { // No series? H5Gclose(jg_samples); H5Gclose(nsg_samples); info("No Samples"); return; } for (idsx = 0; idsx < n_series; idsx++) { H5Oget_info_by_idx(nsg_samples, ".", H5_INDEX_NAME, H5_ITER_INC, idsx, &object_info, H5P_DEFAULT); if (object_info.type != H5O_TYPE_GROUP) continue; len = H5Lget_name_by_idx(nsg_samples, ".", H5_INDEX_NAME, H5_ITER_INC, idsx, nam_series, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", nam_series); continue; } g_series = H5Gopen(nsg_samples, nam_series, H5P_DEFAULT); if (g_series < 0) { info("Failed to open %s", nam_series); continue; } H5Gget_info(g_series, &group_info); num_samples = group_info.nlinks; if (num_samples <= 0) { H5Gclose(g_series); info("_series %s has no samples", nam_series); continue; } // Get first sample in series to find out how big the data is. data_type = get_string_attribute(g_series, ATTR_DATATYPE); if (!data_type) { H5Gclose(g_series); info("Failed to get datatype for Time Series Dataset"); continue; } type = acct_gather_profile_type_from_string(data_type); xfree(data_type); data = _get_all_samples(g_series, nam_series, type, num_samples); if (data == NULL) { H5Gclose(g_series); info("Failed to get memory for Time Series Dataset"); continue; } put_hdf5_data(jg_samples, type, SUBDATA_SERIES, nam_series, data, num_samples); ops = profile_factory(type); if (ops == NULL) { xfree(data); H5Gclose(g_series); info("Failed to create operations for %s", acct_gather_profile_type_to_string(type)); continue; } series_total = (*(ops->series_total))(num_samples, data); if (series_total != NULL) { // Totals for series attaches to node g_series_total = make_group(jg_node, GRP_TOTALS); if (g_series_total < 0) { H5Gclose(g_series); xfree(series_total); xfree(data); xfree(ops); info("Failed to make Totals for Node"); continue; } put_hdf5_data(g_series_total, type, SUBDATA_SUMMARY, nam_series, series_total, 1); H5Gclose(g_series_total); } xfree(series_total); xfree(ops); xfree(data); H5Gclose(g_series); } return; }
static bool read_struct(int dataset, VarInfo6& info) { int complex = 0; int size = getDimsNode(dataset, &complex, info.pdims); info.dims = static_cast<int>(info.pdims.size()); info.size = 0; if (size == 0) { generateInfo(info); closeList6(dataset); return true; } int fieldCount = 0; int ret = getListDims6(dataset, &fieldCount); if (ret < 0) { closeList6(dataset); return false; } //open __refs__ node int refs = getDataSetIdFromName(dataset, "__refs__"); H5O_info_t oinfo; for (int i = 0; i < fieldCount; ++i) { H5Oget_info_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_NATIVE, i, &oinfo, H5P_DEFAULT); ssize_t len = H5Lget_name_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, i, 0, 0, H5P_DEFAULT) + 1; char* name = (char*)MALLOC(sizeof(char) * len); H5Lget_name_by_idx(dataset, ".", H5_INDEX_NAME, H5_ITER_INC, i, name, len, H5P_DEFAULT); std::string cname(name); FREE(name); if (cname != "__dims__" && cname != "__refs__") { int dataref = getDataSetIdFromName(dataset, cname.data()); if (dataref < 0) { closeList6(dataset); return false; } int refdim = 0; getDatasetInfo(dataref, &complex, &refdim, NULL); std::vector<int> refdims(refdim); int refcount = getDatasetInfo(dataref, &complex, &refdim, refdims.data()); std::vector<hobj_ref_t> vrefs(refcount); ret = H5Dread(dataref, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, vrefs.data()); if (ret < 0) { return false; } //import field for (int j = 0; j < refcount; ++j) { int data = H5Rdereference(refs, H5R_OBJECT, &vrefs[j]); if (data < 0) { return false; } VarInfo6 info2; ret = read_data(data, info2); if (ret == false) { return false; } info.size += info2.size; } closeDataSet(dataref); } } generateInfo(info); closeList6(refs); closeList6(dataset); return true; }
/**************************************************************** ** ** test_grp_memb_funcs(): Test group member information ** functionality ** ****************************************************************/ static void test_grp_memb_funcs(hid_t fapl) { hid_t file; /* File ID */ hid_t dataset; /* Dataset ID */ hid_t datatype; /* Common datatype ID */ hid_t filespace; /* Common dataspace ID */ hid_t root_group,grp; /* Root group ID */ int i; /* counting variable */ char name[NAMELEN]; /* temporary name buffer */ char *dnames[NDATASETS+2];/* Names of the datasets created */ char *obj_names[NDATASETS+2];/* Names of the objects in group */ char dataset_name[NAMELEN]; /* dataset name */ ssize_t name_len; /* Length of object's name */ H5G_info_t ginfo; /* Buffer for querying object's info */ herr_t ret = SUCCEED; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Group Member Information Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); datatype = H5Tcopy(H5T_NATIVE_INT); CHECK(datatype, FAIL, "H5Tcopy"); filespace = H5Screate(H5S_SCALAR); CHECK(filespace, FAIL, "H5Screate"); for(i = 0; i < NDATASETS; i++) { sprintf(name, "Dataset %d", i); dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Keep a copy of the dataset names around for later */ dnames[i] = HDstrdup(name); CHECK(dnames[i], NULL, "strdup"); ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); } /* end for */ /* Create a group and named datatype under root group for testing */ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Gcreate2"); dnames[NDATASETS] = HDstrdup("grp"); CHECK(dnames[NDATASETS], NULL, "strdup"); ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); dnames[NDATASETS + 1] = HDstrdup("dtype"); CHECK(dnames[NDATASETS], NULL, "strdup"); /* Close everything up */ ret = H5Tclose(datatype); CHECK(ret, FAIL, "H5Tclose"); ret = H5Gclose(grp); CHECK(ret, FAIL, "H5Gclose"); ret = H5Sclose(filespace); CHECK(ret, FAIL, "H5Sclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Sort the dataset names */ HDqsort(dnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Iterate through the datasets in the root group in various ways */ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl); CHECK(file, FAIL, "H5Fopen"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. */ root_group = H5Gopen2(file, "/", H5P_DEFAULT); CHECK(root_group, FAIL, "H5Gopen2"); ret = H5Gget_info(root_group, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info"); for(i = 0; i < (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ /* Test with NULL for name, to query length */ name_len = H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, NULL, (size_t)NAMELEN, H5P_DEFAULT); CHECK(name_len, FAIL, "H5Lget_name_by_idx"); ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)(name_len + 1), H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); /* Double-check that the length is the same */ VERIFY(ret, name_len, "H5Lget_name_by_idx"); /* Keep a copy of the dataset names around for later */ obj_names[i] = HDstrdup(dataset_name); CHECK(obj_names[i], NULL, "strdup"); ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); if(!HDstrcmp(dataset_name, "grp")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); if(!HDstrcmp(dataset_name, "dtype")) VERIFY(oinfo.type, H5O_TYPE_NAMED_DATATYPE, "H5Lget_name_by_idx"); if(!HDstrncmp(dataset_name, "Dataset", (size_t)7)) VERIFY(oinfo.type, H5O_TYPE_DATASET, "H5Lget_name_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); /* Sort the dataset names */ HDqsort(obj_names, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Compare object names */ for(i = 0; i< (int)ginfo.nlinks; i++) { ret = HDstrcmp(dnames[i], obj_names[i]); VERIFY(ret, 0, "HDstrcmp"); } /* end for */ ret = H5Gclose(root_group); CHECK(ret, FAIL, "H5Gclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Free the dataset names */ for(i = 0; i< (NDATASETS + 2); i++) { HDfree(dnames[i]); HDfree(obj_names[i]); } /* end for */ } /* test_grp_memb_funcs() */
/**************************************************************** ** ** test_links(): Test soft and hard link iteration ** ****************************************************************/ static void test_links(hid_t fapl) { hid_t file; /* File ID */ char obj_name[NAMELEN]; /* Names of the object in group */ ssize_t name_len; /* Length of object's name */ hid_t gid, gid1; H5G_info_t ginfo; /* Buffer for querying object's info */ hsize_t i; herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Soft and Hard Link Iteration Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); /* create groups */ gid = H5Gcreate2(file, "/g1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(gid, FAIL, "H5Gcreate2"); gid1 = H5Gcreate2(file, "/g1/g1.1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(gid1, FAIL, "H5Gcreate2"); /* create soft and hard links to the group "/g1". */ ret = H5Lcreate_soft("something", gid, "softlink", H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lcreate_soft"); ret = H5Lcreate_hard(gid, "/g1", H5L_SAME_LOC, "hardlink", H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lcreate_hard"); ret = H5Gget_info(gid, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, 3, "H5Gget_info"); /* Test these two functions, H5Oget_info_by_idx and H5Lget_name_by_idx */ for(i = 0; i < ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ H5L_info_t linfo; /* Link info */ /* Get link name */ name_len = H5Lget_name_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(name_len, FAIL, "H5Lget_name_by_idx"); /* Get link type */ ret = H5Lget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &linfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_info_by_idx"); /* Get object type */ if(linfo.type == H5L_TYPE_HARD) { ret = H5Oget_info_by_idx(gid, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end if */ if(!HDstrcmp(obj_name, "g1.1")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); else if(!HDstrcmp(obj_name, "hardlink")) VERIFY(oinfo.type, H5O_TYPE_GROUP, "H5Lget_name_by_idx"); else if(!HDstrcmp(obj_name, "softlink")) VERIFY(linfo.type, H5L_TYPE_SOFT, "H5Lget_name_by_idx"); else CHECK(0, 0, "unknown object name"); } /* end for */ ret = H5Gclose(gid); CHECK(ret, FAIL, "H5Gclose"); ret = H5Gclose(gid1); CHECK(ret, FAIL, "H5Gclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); } /* test_links() */
void SalomeIO::ReadFE( hid_t file_id, std::vector<std::string> & fe_type_vec, hsize_t n_fem_types, const std::string my_mesh_name_dir ) { Mesh& mesh = GetMesh(); // Get the element name char **el_fem_type = new char*[n_fem_types]; std::vector<int> index(n_fem_types); const uint fe_name_nchars = 4; for(int i=0; i<(int)n_fem_types; i++) { el_fem_type[i] = new char[fe_name_nchars]; H5Lget_name_by_idx(file_id,my_mesh_name_dir.c_str(), H5_INDEX_NAME, H5_ITER_INC,i, el_fem_type[i], fe_name_nchars, H5P_DEFAULT); std::string temp_i(el_fem_type[i]); if (mesh.GetDimension() == 3) { if ( temp_i.compare("HE8") == 0 || temp_i.compare("H20") == 0 || temp_i.compare("H27") == 0 || temp_i.compare("TE4") == 0 || temp_i.compare("T10") == 0 ) { index[mesh.GetDimension() -1] = i; fe_type_vec[mesh.GetDimension() -1] = el_fem_type[i];} else if ( temp_i.compare("QU4") == 0 || temp_i.compare("QU8") == 0 || temp_i.compare("QU9") == 0 || temp_i.compare("TR3") == 0 || temp_i.compare("TR6") == 0 ) { index[mesh.GetDimension() -1 -1] = i; fe_type_vec[mesh.GetDimension() -1 -1] = el_fem_type[i]; } else if ( temp_i.compare("SE2") == 0 || temp_i.compare("SE3") == 0 ) { index[mesh.GetDimension() -1 -1 -1] = i; fe_type_vec[mesh.GetDimension() -1 -1 -1] = el_fem_type[i]; } } else if (mesh.GetDimension() == 2) { if ( temp_i.compare("QU4") == 0 || temp_i.compare("QU8") == 0 || temp_i.compare("QU9") == 0 || temp_i.compare("TR3") == 0 || temp_i.compare("TR6") == 0 ) { index[mesh.GetDimension() -1] = i; fe_type_vec[mesh.GetDimension() -1] = el_fem_type[i]; } else if ( temp_i.compare("SE2") == 0 || temp_i.compare("SE3") == 0 ) { index[mesh.GetDimension() -1 -1] = i; fe_type_vec[mesh.GetDimension() -1 -1] = el_fem_type[i]; } } else if (mesh.GetDimension() == 1) { if ( temp_i.compare("SE2") == 0 || temp_i.compare("SE3") == 0 ) { index[mesh.GetDimension() -1] = i; fe_type_vec[mesh.GetDimension() -1] = el_fem_type[i];} } } // clean for(int i=0; i<(int)n_fem_types; i++) delete[] el_fem_type[i]; delete[] el_fem_type; return; }
/**************************************************************** ** ** test_iter_group(): Test group iteration functionality ** ****************************************************************/ static void test_iter_group(hid_t fapl, hbool_t new_format) { hid_t file; /* File ID */ hid_t dataset; /* Dataset ID */ hid_t datatype; /* Common datatype ID */ hid_t filespace; /* Common dataspace ID */ hid_t root_group,grp; /* Root group ID */ int i; /* counting variable */ hsize_t idx; /* Index in the group */ char name[NAMELEN]; /* temporary name buffer */ char *lnames[NDATASETS + 2];/* Names of the links created */ char dataset_name[NAMELEN]; /* dataset name */ iter_info info; /* Custom iteration information */ H5G_info_t ginfo; /* Buffer for querying object's info */ herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Group Iteration Functionality\n")); /* Create the test file with the datasets */ file = H5Fcreate(DATAFILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); CHECK(file, FAIL, "H5Fcreate"); /* Test iterating over empty group */ info.command = RET_ZERO; idx = 0; ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); VERIFY(ret, SUCCEED, "H5Literate"); datatype = H5Tcopy(H5T_NATIVE_INT); CHECK(datatype, FAIL, "H5Tcopy"); filespace=H5Screate(H5S_SCALAR); CHECK(filespace, FAIL, "H5Screate"); for(i=0; i< NDATASETS; i++) { sprintf(name,"Dataset %d",i); dataset = H5Dcreate2(file, name, datatype, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Keep a copy of the dataset names around for later */ lnames[i] = HDstrdup(name); CHECK(lnames[i], NULL, "strdup"); ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); } /* end for */ /* Create a group and named datatype under root group for testing */ grp = H5Gcreate2(file, "grp", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Gcreate2"); lnames[NDATASETS] = HDstrdup("grp"); CHECK(lnames[NDATASETS], NULL, "strdup"); ret = H5Tcommit2(file, "dtype", datatype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); lnames[NDATASETS + 1] = HDstrdup("dtype"); CHECK(lnames[NDATASETS], NULL, "strdup"); /* Close everything up */ ret = H5Tclose(datatype); CHECK(ret, FAIL, "H5Tclose"); ret = H5Gclose(grp); CHECK(ret, FAIL, "H5Gclose"); ret = H5Sclose(filespace); CHECK(ret, FAIL, "H5Sclose"); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Sort the dataset names */ HDqsort(lnames, (size_t)(NDATASETS + 2), sizeof(char *), iter_strcmp); /* Iterate through the datasets in the root group in various ways */ file = H5Fopen(DATAFILE, H5F_ACC_RDONLY, fapl); CHECK(file, FAIL, "H5Fopen"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. */ root_group = H5Gopen2(file, "/", H5P_DEFAULT); CHECK(root_group, FAIL, "H5Gopen2"); ret = H5Gget_info(root_group, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, (NDATASETS + 2), "H5Gget_info"); for(i = 0; i< (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Oget_info_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(root_group, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS+3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Gclose(root_group); CHECK(ret, FAIL, "H5Gclose"); /* These two functions, H5Oget_info_by_idx and H5Lget_name_by_idx, actually * iterate through B-tree for group members in internal library design. * (Same as test above, but with the file ID instead of opening the root group) */ ret = H5Gget_info(file, &ginfo); CHECK(ret, FAIL, "H5Gget_info"); VERIFY(ginfo.nlinks, NDATASETS + 2, "H5Gget_info"); for(i = 0; i< (int)ginfo.nlinks; i++) { H5O_info_t oinfo; /* Object info */ ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, dataset_name, (size_t)NAMELEN, H5P_DEFAULT); CHECK(ret, FAIL, "H5Lget_name_by_idx"); ret = H5Oget_info_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)i, &oinfo, H5P_DEFAULT); CHECK(ret, FAIL, "H5Oget_info_by_idx"); } /* end for */ H5E_BEGIN_TRY { ret = (herr_t)H5Lget_name_by_idx(file, ".", H5_INDEX_NAME, H5_ITER_INC, (hsize_t)(NDATASETS + 3), dataset_name, (size_t)NAMELEN, H5P_DEFAULT); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Lget_name_by_idx"); /* Test invalid indices for starting iteration */ info.command = RET_ZERO; idx = (hsize_t)-1; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test skipping exactly as many entries as in the group */ idx = NDATASETS + 2; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test skipping more entries than are in the group */ idx = NDATASETS + 3; H5E_BEGIN_TRY { ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info); } H5E_END_TRY; VERIFY(ret, FAIL, "H5Literate"); /* Test all objects in group, when callback always returns 0 */ info.command = RET_ZERO; idx = 0; if((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) TestErrPrintf("Group iteration function didn't return zero correctly!\n"); /* Test all objects in group, when callback always returns 1 */ /* This also tests the "restarting" ability, because the index changes */ info.command = RET_TWO; idx = i = 0; while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) > 0) { /* Verify return value from iterator gets propagated correctly */ VERIFY(ret, 2, "H5Literate"); /* Increment the number of times "2" is returned */ i++; /* Verify that the index is the correct value */ VERIFY(idx, (hsize_t)i, "H5Literate"); if(idx > (NDATASETS + 2)) TestErrPrintf("Group iteration function walked too far!\n"); /* Verify that the correct name is retrieved */ if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0) TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]); } /* end while */ VERIFY(ret, -1, "H5Literate"); if(i != (NDATASETS + 2)) TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__); /* Test all objects in group, when callback changes return value */ /* This also tests the "restarting" ability, because the index changes */ info.command = new_format ? RET_CHANGE2 : RET_CHANGE; idx = i = 0; while((ret = H5Literate(file, H5_INDEX_NAME, H5_ITER_INC, &idx, liter_cb, &info)) >= 0) { /* Verify return value from iterator gets propagated correctly */ VERIFY(ret, 1, "H5Literate"); /* Increment the number of times "1" is returned */ i++; /* Verify that the index is the correct value */ VERIFY(idx, (hsize_t)(i + 10), "H5Literate"); if(idx > (NDATASETS + 2)) TestErrPrintf("Group iteration function walked too far!\n"); /* Verify that the correct name is retrieved */ if(HDstrcmp(info.name, lnames[(size_t)(idx - 1)]) != 0) TestErrPrintf("Group iteration function didn't return name correctly for link - lnames[%u] = '%s'!\n", (unsigned)(idx - 1), lnames[(size_t)(idx - 1)]); } /* end while */ VERIFY(ret, -1, "H5Literate"); if(i != 42 || idx != 52) TestErrPrintf("%u: Group iteration function didn't perform multiple iterations correctly!\n", __LINE__); ret = H5Fclose(file); CHECK(ret, FAIL, "H5Fclose"); /* Free the dataset names */ for(i = 0; i< (NDATASETS + 2); i++) HDfree(lnames[i]); } /* test_iter_group() */
static void _merge_task_totals(hid_t jg_tasks, hid_t nsg_node, char* node_name) { hid_t jg_task, jg_totals, nsg_totals, g_total, nsg_tasks, nsg_task = -1; hsize_t nobj, ntasks = -1; int i, len, taskx, taskid, taskcpus, size_data; void *data; uint32_t type; char buf[MAX_GROUP_NAME+1]; char group_name[MAX_GROUP_NAME+1]; H5G_info_t group_info; if (jg_tasks < 0) { info("Job Tasks is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } nsg_tasks = get_group(nsg_node, GRP_TASKS); if (nsg_tasks < 0) { debug("No Tasks group in node-step file"); return; } H5Gget_info(nsg_tasks, &group_info); ntasks = group_info.nlinks; for (taskx = 0; ((int)ntasks>0) && (taskx<((int)ntasks)); taskx++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_tasks, ".", H5_INDEX_NAME, H5_ITER_INC, taskx, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", buf); continue; } nsg_task = H5Gopen(nsg_tasks, buf, H5P_DEFAULT); if (nsg_task < 0) { debug("Failed to open %s", buf); continue; } taskid = get_int_attribute(nsg_task, ATTR_TASKID); sprintf(group_name, "%s_%d", GRP_TASK, taskid); jg_task = H5Gcreate(jg_tasks, group_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_task < 0) { H5Gclose(nsg_task); info("Failed to create job task group"); continue; } put_string_attribute(jg_task, ATTR_NODENAME, node_name); put_int_attribute(jg_task, ATTR_TASKID, taskid); taskcpus = get_int_attribute(nsg_task, ATTR_CPUPERTASK); put_int_attribute(jg_task, ATTR_CPUPERTASK, taskcpus); nsg_totals = get_group(nsg_task, GRP_TOTALS); if (nsg_totals < 0) { H5Gclose(jg_task); H5Gclose(nsg_task); continue; } jg_totals = H5Gcreate(jg_task, GRP_TOTALS, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_totals < 0) { H5Gclose(jg_task); H5Gclose(nsg_task); info("Failed to create job task totals"); continue; } H5Gget_info(nsg_totals, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_totals, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("Invalid group name %s", buf); continue; } g_total = H5Gopen(nsg_totals, buf, H5P_DEFAULT); if (g_total < 0) { info("Failed to open %s", buf); continue; } type = get_uint32_attribute(g_total, ATTR_DATATYPE); if (!type) { H5Gclose(g_total); info("No %s attribute", ATTR_DATATYPE); continue; } data = get_hdf5_data(g_total, type, buf, &size_data); if (data == NULL) { H5Gclose(g_total); info("Failed to get group %s type %s data", buf, acct_gather_profile_type_to_string(type)); continue; } put_hdf5_data(jg_totals, type, SUBDATA_DATA, buf, data, 1); xfree(data); H5Gclose(g_total); } H5Gclose(nsg_totals); H5Gclose(nsg_task); H5Gclose(jg_totals); H5Gclose(jg_task); } H5Gclose(nsg_tasks); }
static void _extract_data() { hid_t fid_job, jgid_root, jgid_step, jgid_nodes, jgid_node, jgid_level; int nsteps, nnodes, stepx, isx, len; char jgrp_step_name[MAX_GROUP_NAME+1]; char jgrp_node_name[MAX_GROUP_NAME+1]; bool header; FILE* fp = fopen(params.output, "w"); if (fp == NULL) { error("Failed to create output file %s -- %m", params.output); } fid_job = H5Fopen(params.input, H5F_ACC_RDONLY, H5P_DEFAULT); if (fid_job < 0) { error("Failed to open %s", params.input); return; } jgid_root = H5Gopen(fid_job, "/", H5P_DEFAULT); if (jgid_root < 0) { H5Fclose(fid_job); error("Failed to open root"); return; } nsteps = get_int_attribute(jgid_root, ATTR_NSTEPS); for (stepx=0; stepx<nsteps; stepx++) { if ((params.step_id != -1) && (stepx != params.step_id)) continue; sprintf(jgrp_step_name, "%s_%d", GRP_STEP, stepx); jgid_step = get_group(jgid_root, jgrp_step_name); if (jgid_step < 0) { error("Failed to open group %s", jgrp_step_name); continue; } if (params.level && !strncasecmp(params.level, "Node:", 5)) { nnodes = get_int_attribute(jgid_step, ATTR_NNODES); jgid_nodes = get_group(jgid_step, GRP_NODES); if (jgid_nodes < 0) { H5Gclose(jgid_step); error("Failed to open group %s", GRP_NODES); continue; } len = H5Lget_name_by_idx(jgid_nodes, ".", H5_INDEX_NAME, H5_ITER_INC, 0, jgrp_node_name, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) { H5Gclose(jgid_nodes); H5Gclose(jgid_step); error("Invalid node name %s", jgrp_node_name); continue; } jgid_node = get_group(jgid_nodes, jgrp_node_name); if (jgid_node < 0) { H5Gclose(jgid_nodes); H5Gclose(jgid_step); info("Failed to open group %s", jgrp_node_name); continue; } jgid_level = _get_series_parent(jgid_node); if (jgid_level == -1) { H5Gclose(jgid_node); H5Gclose(jgid_nodes); H5Gclose(jgid_step); continue; } _get_series_names(jgid_level); H5Gclose(jgid_level); H5Gclose(jgid_node); if (!params.series || !strcmp(params.series, "*")) { for (isx=0; isx<num_series; isx++) { _extract_node_level( fp, stepx, jgid_nodes, nnodes, true, series_names[isx]); } } else if (!strcmp(params.series, GRP_TASKS)) { header = true; for (isx=0; isx<num_series; isx++) { if (strstr(series_names[isx], GRP_TASK)) { _extract_node_level( fp, stepx, jgid_nodes, nnodes, header, series_names[isx]); header = false; } } } else { _extract_node_level(fp, stepx, jgid_nodes, nnodes, true, params.series); } _delete_string_list(series_names, num_series); series_names = NULL; num_series = 0; H5Gclose(jgid_nodes); } else { error("%s is an illegal level", params.level); } H5Gclose(jgid_step); } H5Gclose(jgid_root); H5Fclose(fid_job); fclose(fp); }
int main() { printf("\n*** Checking HDF5 attribute functions some more.\n"); printf("*** Creating tst_xplatform2_3.nc with HDF only..."); { hid_t fapl_id, fcpl_id; size_t chunk_cache_size = MY_CHUNK_CACHE_SIZE; size_t chunk_cache_nelems = CHUNK_CACHE_NELEMS; float chunk_cache_preemption = CHUNK_CACHE_PREEMPTION; hid_t fileid, grpid, attid, spaceid; hid_t s1_typeid, vlen_typeid, s3_typeid; hid_t file_typeid1[NUM_OBJ], native_typeid1[NUM_OBJ]; hid_t file_typeid2, native_typeid2; hsize_t num_obj; H5O_info_t obj_info; char obj_name[STR_LEN + 1]; hsize_t dims[1] = {ATT_LEN}; /* netcdf attributes always 1-D. */ struct s1 { float x; double y; }; struct s3 { hvl_t data[NUM_VL]; }; /* cvc stands for "Compound with Vlen of Compound." */ struct s3 cvc_out[ATT_LEN]; int i, j, k; /* Create some output data: a struct s3 array (length ATT_LEN) * which holds an array of vlen (length NUM_VL) of struct s1. */ for (i = 0; i < ATT_LEN; i++) for (j = 0; j < NUM_VL; j++) { cvc_out[i].data[j].len = i + 1; if (!(cvc_out[i].data[j].p = calloc(sizeof(struct s1), cvc_out[i].data[j].len))) ERR; for (k = 0; k < cvc_out[i].data[j].len; k++) { ((struct s1 *)cvc_out[i].data[j].p)[k].x = 42.42; ((struct s1 *)cvc_out[i].data[j].p)[k].y = 2.0; } } /* Create the HDF5 file, with cache control, creation order, and * all the timmings. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR; if (H5Pset_cache(fapl_id, 0, chunk_cache_nelems, chunk_cache_size, chunk_cache_preemption) < 0) ERR; if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; if (H5Pclose(fapl_id) < 0) ERR; if (H5Pclose(fcpl_id) < 0) ERR; /* Open the root group. */ if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; /* Create the compound type for struct s1. */ if ((s1_typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s1))) < 0) ERR; if (H5Tinsert(s1_typeid, X_NAME, offsetof(struct s1, x), H5T_NATIVE_FLOAT) < 0) ERR; if (H5Tinsert(s1_typeid, Y_NAME, offsetof(struct s1, y), H5T_NATIVE_DOUBLE) < 0) ERR; if (H5Tcommit(grpid, S1_TYPE_NAME, s1_typeid) < 0) ERR; /* Create a vlen type. Its a vlen of struct s1. */ if ((vlen_typeid = H5Tvlen_create(s1_typeid)) < 0) ERR; if (H5Tcommit(grpid, VLEN_TYPE_NAME, vlen_typeid) < 0) ERR; /* Create the struct s3 type, which contains the vlen. */ if ((s3_typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s3))) < 0) ERR; if (H5Tinsert(s3_typeid, VL_NAME, offsetof(struct s3, data), vlen_typeid) < 0) ERR; if (H5Tcommit(grpid, S3_TYPE_NAME, s3_typeid) < 0) ERR; /* Create an attribute of this new type. */ if ((spaceid = H5Screate_simple(1, dims, NULL)) < 0) ERR; if ((attid = H5Acreate(grpid, S3_ATT_NAME, s3_typeid, spaceid, H5P_DEFAULT)) < 0) ERR; if (H5Awrite(attid, s3_typeid, cvc_out) < 0) ERR; /* Close the types. */ if (H5Tclose(s1_typeid) < 0 || H5Tclose(vlen_typeid) < 0 || H5Tclose(s3_typeid) < 0) ERR; /* Close the att. */ if (H5Aclose(attid) < 0) ERR; /* Close the space. */ if (H5Sclose(spaceid) < 0) ERR; /* Close the group and file. */ if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; /* How many objects in this group? (There should be 3, the * types. Atts don't count as objects to HDF5.) */ if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR; if (num_obj != NUM_OBJ) ERR; /* For each object in the group... */ for (i = 0; i < num_obj; i++) { /* Get the name, and make sure this is a type. */ if (H5Oget_info_by_idx(grpid, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, i, &obj_info, H5P_DEFAULT) < 0) ERR; if (H5Lget_name_by_idx(grpid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, STR_LEN + 1, H5P_DEFAULT) < 0) ERR; if (obj_info.type != H5O_TYPE_NAMED_DATATYPE) ERR; /* Get the typeid and native typeid. */ if ((file_typeid1[i] = H5Topen2(grpid, obj_name, H5P_DEFAULT)) < 0) ERR; if ((native_typeid1[i] = H5Tget_native_type(file_typeid1[i], H5T_DIR_DEFAULT)) < 0) ERR; } /* There is one att: open it by index. */ if ((attid = H5Aopen_idx(grpid, 0)) < 0) ERR; /* Get file and native typeids of the att. */ if ((file_typeid2 = H5Aget_type(attid)) < 0) ERR; if ((native_typeid2 = H5Tget_native_type(file_typeid2, H5T_DIR_DEFAULT)) < 0) ERR; /* Close the attribute. */ if (H5Aclose(attid) < 0) ERR; /* Close the typeids. */ for (i = 0; i < NUM_OBJ; i++) { if (H5Tclose(file_typeid1[i]) < 0) ERR; if (H5Tclose(native_typeid1[i]) < 0) ERR; } if (H5Tclose(file_typeid2) < 0) ERR; if (H5Tclose(native_typeid2) < 0) ERR; /* Close the group and file. */ if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Deallocate our vlens. */ for (i = 0; i < ATT_LEN; i++) for (j = 0; j < NUM_VL; j++) free(cvc_out[i].data[j].p); } SUMMARIZE_ERR; printf("*** Checking vlen of compound file..."); { #define NUM_OBJ_1 1 #define ATT_NAME "Poseidon" hid_t fapl_id, fcpl_id; hid_t fileid, grpid, attid, spaceid; hid_t vlen_typeid; hid_t file_typeid1[NUM_OBJ_1], native_typeid1[NUM_OBJ_1]; hid_t file_typeid2, native_typeid2; hsize_t num_obj; H5O_info_t obj_info; char obj_name[STR_LEN + 1]; hsize_t dims[1] = {ATT_LEN}; /* netcdf attributes always 1-D. */ /* vc stands for "Vlen of Compound." */ hvl_t vc_out[ATT_LEN]; int i, k; /* Create some output data: an array of vlen (length ATT_LEN) of * int. */ for (i = 0; i < ATT_LEN; i++) { vc_out[i].len = i + 1; if (!(vc_out[i].p = calloc(sizeof(int), vc_out[i].len))) ERR; for (k = 0; k < vc_out[i].len; k++) ((int *)vc_out[i].p)[k] = 42; } /* Create the HDF5 file with creation order. */ if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR; if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | H5P_CRT_ORDER_INDEXED)) < 0) ERR; if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR; if (H5Pclose(fapl_id) < 0) ERR; if (H5Pclose(fcpl_id) < 0) ERR; /* Open the root group. */ if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR; /* Create a vlen type. Its a vlen of int. */ if ((vlen_typeid = H5Tvlen_create(H5T_NATIVE_INT)) < 0) ERR; if (H5Tcommit(grpid, VLEN_TYPE_NAME, vlen_typeid) < 0) ERR; /* Create an attribute of this new type. */ if ((spaceid = H5Screate_simple(1, dims, NULL)) < 0) ERR; if ((attid = H5Acreate(grpid, ATT_NAME, vlen_typeid, spaceid, H5P_DEFAULT)) < 0) ERR; if (H5Awrite(attid, vlen_typeid, vc_out) < 0) ERR; /* Close the type. */ if (H5Tclose(vlen_typeid) < 0) ERR; /* Close the att. */ if (H5Aclose(attid) < 0) ERR; /* Close the space. */ if (H5Sclose(spaceid) < 0) ERR; /* Close the group and file. */ if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Reopen the file. */ if ((fileid = H5Fopen(FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) ERR; if ((grpid = H5Gopen(fileid, "/")) < 0) ERR; /* How many objects in this group? (There should be 2, the * types. Atts don't count as objects to HDF5.) */ if (H5Gget_num_objs(grpid, &num_obj) < 0) ERR; if (num_obj != NUM_OBJ_1) ERR; /* For each object in the group... */ for (i = 0; i < num_obj; i++) { /* Get the name, and make sure this is a type. */ if (H5Oget_info_by_idx(grpid, ".", H5_INDEX_CRT_ORDER, H5_ITER_INC, i, &obj_info, H5P_DEFAULT) < 0) ERR; if (H5Lget_name_by_idx(grpid, ".", H5_INDEX_NAME, H5_ITER_INC, i, obj_name, STR_LEN + 1, H5P_DEFAULT) < 0) ERR; if (obj_info.type != H5O_TYPE_NAMED_DATATYPE) ERR; /* Get the typeid and native typeid. */ if ((file_typeid1[i] = H5Topen2(grpid, obj_name, H5P_DEFAULT)) < 0) ERR; if ((native_typeid1[i] = H5Tget_native_type(file_typeid1[i], H5T_DIR_DEFAULT)) < 0) ERR; } /* There is one att: open it by index. */ if ((attid = H5Aopen_idx(grpid, 0)) < 0) ERR; /* Get file and native typeids of the att. */ if ((file_typeid2 = H5Aget_type(attid)) < 0) ERR; if ((native_typeid2 = H5Tget_native_type(file_typeid2, H5T_DIR_DEFAULT)) < 0) ERR; /* Close the attribute. */ if (H5Aclose(attid) < 0) ERR; /* Close the typeids. */ for (i = 0; i < NUM_OBJ_1; i++) { if (H5Tclose(file_typeid1[i]) < 0) ERR; if (H5Tclose(native_typeid1[i]) < 0) ERR; } if (H5Tclose(file_typeid2) < 0) ERR; if (H5Tclose(native_typeid2) < 0) ERR; /* Close the group and file. */ if (H5Gclose(grpid) < 0 || H5Fclose(fileid) < 0) ERR; /* Deallocate our vlens. */ for (i = 0; i < ATT_LEN; i++) free(vc_out[i].p); } SUMMARIZE_ERR; FINAL_RESULTS; }
static void _merge_node_totals(hid_t jg_node, hid_t nsg_node) { hid_t jg_totals, nsg_totals, g_total; hsize_t nobj; int i, len, size_data; void *data; uint32_t type; char buf[MAX_GROUP_NAME+1]; H5G_info_t group_info; if (jg_node < 0) { info("Job Node is not HDF5 object"); return; } if (nsg_node < 0) { info("Node-Step is not HDF5 object"); return; } jg_totals = H5Gcreate(jg_node, GRP_TOTALS, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); if (jg_totals < 0) { info("Failed to create job node totals"); return; } nsg_totals = get_group(nsg_node, GRP_TOTALS); if (nsg_totals < 0) { H5Gclose(jg_totals); return; } H5Gget_info(nsg_totals, &group_info); nobj = group_info.nlinks; for (i = 0; (nobj>0) && (i<nobj); i++) { // Get the name of the group. len = H5Lget_name_by_idx(nsg_totals, ".", H5_INDEX_NAME, H5_ITER_INC, i, buf, MAX_GROUP_NAME, H5P_DEFAULT); if (len<1 || len>MAX_GROUP_NAME) { info("invalid group name %s", buf); continue; } g_total = H5Gopen(nsg_totals, buf, H5P_DEFAULT); if (g_total < 0) { info("Failed to open %s", buf); continue; } type = get_uint32_attribute(g_total, ATTR_DATATYPE); if (!type) { H5Gclose(g_total); info("No %s attribute", ATTR_DATATYPE); continue; } data = get_hdf5_data(g_total, type, buf, &size_data); if (data == NULL) { H5Gclose(g_total); info("Failed to get group %s type %s data", buf, acct_gather_profile_type_to_string(type)); continue; } put_hdf5_data(jg_totals, type, SUBDATA_DATA, buf, data, 1); xfree(data); H5Gclose(g_total); } H5Gclose(nsg_totals); H5Gclose(jg_totals); return; }
static void _extract_node_level(FILE* fp, int stepx, hid_t jgid_nodes, int nnodes, bool header, char* data_set_name) { hid_t jgid_node, gid_level, gid_series; int nodex, len, size_data; void *data; uint32_t type; char *data_type, *subtype; char jgrp_node_name[MAX_GROUP_NAME+1]; hdf5_api_ops_t* ops; for (nodex=0; nodex<nnodes; nodex++) { len = H5Lget_name_by_idx(jgid_nodes, ".", H5_INDEX_NAME, H5_ITER_INC, nodex, jgrp_node_name, MAX_GROUP_NAME, H5P_DEFAULT); if ((len < 0) || (len > MAX_GROUP_NAME)) { info("Invalid node name=%s", jgrp_node_name); continue; } jgid_node = get_group(jgid_nodes, jgrp_node_name); if (jgid_node < 0) { info("Failed to open group %s", jgrp_node_name); continue; } if (params.node && strcmp(params.node, "*") && strcmp(params.node, jgrp_node_name)) continue; gid_level = _get_series_parent(jgid_node); if (gid_level == -1) { H5Gclose(jgid_node); continue; } gid_series = get_group(gid_level, data_set_name); if (gid_series < 0) { // This is okay, may not have ran long enough for // a sample (hostname????) H5Gclose(gid_level); H5Gclose(jgid_node); continue; } data_type = get_string_attribute(gid_series, ATTR_DATATYPE); if (!data_type) { H5Gclose(gid_series); H5Gclose(gid_level); H5Gclose(jgid_node); info("No datatype in %s", data_set_name); continue; } type = acct_gather_profile_type_from_string(data_type); xfree(data_type); subtype = get_string_attribute(gid_series, ATTR_SUBDATATYPE); if (subtype == NULL) { H5Gclose(gid_series); H5Gclose(gid_level); H5Gclose(jgid_node); info("No %s attribute", ATTR_SUBDATATYPE); continue; } ops = profile_factory(type); if (ops == NULL) { xfree(subtype); H5Gclose(gid_series); H5Gclose(gid_level); H5Gclose(jgid_node); info("Failed to create operations for %s", acct_gather_profile_type_to_string(type)); continue; } data = get_hdf5_data( gid_series, type, data_set_name, &size_data); if (data) { if (strcmp(subtype,SUBDATA_SUMMARY) != 0) (*(ops->extract_series)) (fp, header, params.job_id, stepx, jgrp_node_name, data_set_name, data, size_data); else (*(ops->extract_total)) (fp, header, params.job_id, stepx, jgrp_node_name, data_set_name, data, size_data); header = false; xfree(data); } else { fprintf(fp, "%d,%d,%s,No %s Data\n", params.job_id, stepx, jgrp_node_name, data_set_name); } xfree(ops); H5Gclose(gid_series); H5Gclose(gid_level); H5Gclose(jgid_node); } }