static void e5_dump_grid_list(const char* group, e5_grid_dataset* grid_list) { long d; int i; if(group) printf("GROUP '%s' {\n\t", group); for(i = 0; grid_list && grid_list[i].name != 0; i++) { e5_grid_dataset* grid = &grid_list[i]; const char* data_type_name = e5_typename(grid->type); printf("(%03d) DATASET '%s' [type '%s' scale '%s'] {", i, grid->name, data_type_name, grid->scale == E5_VALUE_SCALE_LOG10 ? "log10" : "linear"); for(d = 0; d < (long)(grid->dim[0] * grid->dim[1] * grid->dim[2]); d++) { if((d % 8) == 0) printf("%s[%03li] ", group ? "\n\t\t" : "\n\t", d); if(grid->type == E5_TYPE_FLOAT) printf("%8.3f ", ((float*)(grid->data))[d]); else if(grid->type == E5_TYPE_DOUBLE) printf("%8.3f ", ((double*)(grid->data))[d]); } printf("\n%s}%s", group ? "\n\t" : "\n", group ? "\n\t" : "\n"); } if(group) printf("\n}\n"); return; }
estatus_t e5_read_attr_list( hid_t e5_group_id, e5_attr* e5_attr_list) { int i; estatus_t status = E5_SUCCESS; hid_t h5_type; hid_t h5_native_type; hid_t h5_space; hid_t e5_attribute_id; for(i = 0; e5_attr_list && e5_attr_list[i].name != 0; i++) { e5_attr *attr = &e5_attr_list[i]; if(attr->name == 0 || strlen(attr->name) < 1) continue; e5_info(e5_group_id, "Reading attribute [name='%s']\n", attr->name); if(H5Aexists(e5_group_id, attr->name) <= 0) { status = E5_INVALID_ATTRIBUTE; e5_error(e5_group_id, status, "Invalid name requested for attribute [type='%d', name='%s', value='%p']\n", attr->type, attr->name, attr->value); continue; } e5_attribute_id = H5Aopen(e5_group_id, attr->name, H5P_DEFAULT); h5_type = H5Aget_type(e5_attribute_id); h5_space = H5Aget_space(e5_attribute_id); h5_native_type = e5_get_native_h5_type(h5_type); attr->type = e5_convert_hdf_type(h5_type); if(e5_is_valid_type(attr->type) != E5_TRUE) { status = E5_INVALID_TYPE; e5_error(e5_group_id, status, "Invalid type requested for attribute [type='%d', name='%s', value='%p']\n", attr->type, attr->name, attr->value); continue; } H5Aread(e5_attribute_id, h5_native_type, attr->value); H5Aclose(e5_attribute_id); e5_info(e5_group_id, "Read attribute [type='%s', name='%s', value='%p']\n", e5_typename(attr->type), attr->name, attr->value); } return status; }
estatus_t e5_read_data_info_list( eid_t e5_group_id, const char* list_name, e5_data_info* info_list) { int i; int d; int log_scale; int close_group; estatus_t status; hsize_t h5_min_dim[3]; hsize_t h5_max_dim[3]; eid_t e5_list_group_id; eid_t e5_type_id; eid_t e5_dataset_id; eid_t e5_dataspace_id; status = E5_SUCCESS; if(list_name && strlen(list_name)) { e5_list_group_id = e5_create_group(e5_group_id, list_name); close_group = 1; } else { e5_list_group_id = e5_group_id; close_group = 0; } for(i = 0; info_list && info_list[i].name != 0; i++) { e5_data_info* info = &info_list[i]; e5_dataset_id = H5Dopen(e5_list_group_id, info->name); if (e5_dataset_id < 0) { status = E5_INVALID_DATASET; e5_error(e5_list_group_id, status, "Failed to open info for dataset '%s'\n", info->name); return status; } e5_dataspace_id = H5Dget_space(e5_dataset_id); e5_type_id = H5Dget_type(e5_dataset_id); H5Sget_simple_extent_dims(e5_dataspace_id, h5_min_dim, h5_max_dim); info->type = e5_convert_hdf_type(e5_type_id); for(d = 0; d < 3; d++) { info->dim[d] = h5_min_dim[d] >= h5_max_dim[d] ? h5_min_dim[d] : h5_max_dim[d]; info->dim[d] = info->dim[d] < 1 ? 1 : info->dim[d]; } log_scale = 0; if(e5_is_valid_attr(e5_group_id, "log10")) e5_read_attr_int(e5_dataset_id, "log10", &log_scale); info->scale = log_scale ? E5_VALUE_SCALE_LOG10 : E5_VALUE_SCALE_LINEAR; e5_info(e5_group_id, "Read data info [type='%s', name='%s', dim='%u %u %u']\n", e5_typename(info->type), info->name, info->dim[0], info->dim[1], info->dim[2]); H5Sclose(e5_dataspace_id); H5Dclose(e5_dataset_id); H5Tclose(e5_type_id); } if(close_group) e5_close_group(e5_list_group_id); return E5_SUCCESS; }