static int group_net_to_field(type_t type, netid_t nid) { int count = 0; if (type_is_record(type)) { const int nfields = type_fields(type); netid_t first = 0; for (int i = 0; i < nfields; i++) { tree_t field = type_field(type, i); type_t ftype = tree_type(field); const netid_t next = first + type_width(tree_type(field)); if (nid >= first && nid < next) { if (type_is_array(ftype) || type_is_record(ftype)) return count + group_net_to_field(ftype, nid - first); else return count; } first = next; count += type_width(ftype); } fatal_trace("group_net_to_field failed to find field for nid=%d type=%s", nid, type_pp(type)); } else if (type_is_array(type)) { type_t elem = type_elem(type); const int width = type_width(elem); if (type_is_record(elem)) return (nid / width) * width + group_net_to_field(elem, nid % width); else return group_net_to_field(elem, nid % width); } else return 0; }
void *heap_min(heap_t h) { if (unlikely(h->size < 1)) fatal_trace("heap underflow"); return USER(h, 1); }
void item_without_type(imask_t mask) { int item; for (item = 0; (mask & (1ull << item)) == 0; item++) ; assert(item < ARRAY_LEN(item_text_map)); fatal_trace("item %s does not have a type", item_text_map[item]); }
void *heap_extract_min(heap_t h) { if (unlikely(h->size < 1)) fatal_trace("heap underflow"); void *min = USER(h, 1); NODE(h, 1) = NODE(h, h->size); --(h->size); min_heapify(h, 1); return min; }
void object_lookup_failed(const char *name, const char **kind_text_map, int kind, imask_t mask) { int item; for (item = 0; (mask & (1ull << item)) == 0; item++) ; assert(item < ARRAY_LEN(item_text_map)); fatal_trace("%s kind %s does not have item %s", name, kind_text_map[kind], item_text_map[item]); }
static void group_unlink(group_nets_ctx_t *ctx, group_t *where) { where->gid = GROUPID_INVALID; if (where == ctx->groups) ctx->groups = where->next; else { for (group_t *it = ctx->groups; it != NULL; it = it->next) { if (it->next == where) { it->next = where->next; return; } } fatal_trace("unlink group not in list"); } }
static void ungroup_name(tree_t name, group_nets_ctx_t *ctx) { switch (tree_kind(name)) { case T_ARRAY_REF: case T_ARRAY_SLICE: case T_RECORD_REF: ungroup_name(tree_value(name), ctx); break; case T_REF: ungroup_ref(name, ctx); break; default: fatal_trace("cannot handle tree type %s in ungroup_name", tree_kind_str(tree_kind(name))); } }
static item_t *lookup_item(type_t t, imask_t mask) { assert(t != NULL); assert((mask & (mask - 1)) == 0); const imask_t has = has_map[t->kind]; const int tzc = __builtin_ctz(mask); const int n = item_lookup[t->kind][tzc]; if (unlikely((has & mask) == 0)) { int item; for (item = 0; (mask & (1 << item)) == 0; item++) ; assert(item < ARRAY_LEN(item_text_map)); fatal_trace("type kind %s does not have item %s", kind_text_map[t->kind], item_text_map[item]); } return &(t->items[n]); }
static void group_target(tree_t t, group_nets_ctx_t *ctx) { switch (tree_kind(t)) { case T_REF: group_ref(t, ctx, 0, -1); break; case T_ARRAY_REF: case T_ARRAY_SLICE: case T_RECORD_REF: { type_t type = tree_type(t); if (!type_known_width(type)) ungroup_name(t, ctx); else if (!group_name(t, ctx, 0, type_width(type))) ungroup_name(t, ctx); } break; case T_LITERAL: case T_OPEN: // Constant folding can cause this to appear break; case T_AGGREGATE: { const int nassocs = tree_assocs(t); for (int i = 0; i < nassocs; i++) group_target(tree_value(tree_assoc(t, i)), ctx); } break; default: fmt_loc(stdout, tree_loc(t)); fatal_trace("Cannot handle tree kind %s in group_target", tree_kind_str(tree_kind(t))); } }
tree_t make_default_value(type_t type, const loc_t *loc) { type_t base = type_base_recur(type); switch (type_kind(base)) { case T_UARRAY: assert(type_kind(type) == T_SUBTYPE); // Fall-through case T_CARRAY: { tree_t def = NULL; const int ndims = type_dims(type); for (int i = ndims - 1; i >= 0; i--) { tree_t val = (def ? def : make_default_value(type_elem(base), loc)); def = tree_new(T_AGGREGATE); tree_set_type(def, array_aggregate_type(type, i)); tree_t a = tree_new(T_ASSOC); tree_set_subkind(a, A_OTHERS); tree_set_value(a, val); tree_add_assoc(def, a); } tree_set_type(def, type); tree_set_loc(def, loc); return def; } case T_INTEGER: case T_PHYSICAL: case T_REAL: return type_dim(type, 0).left; case T_ENUM: { int64_t val = 0; const bool folded = folded_int(type_dim(type, 0).left, &val); if (folded) return make_ref(type_enum_literal(base, (unsigned) val)); else return type_dim(type, 0).left; } case T_RECORD: { tree_t def = tree_new(T_AGGREGATE); tree_set_loc(def, loc); const int nfields = type_fields(base); for (int i = 0; i < nfields; i++) { tree_t field = type_field(base, i); tree_t a = tree_new(T_ASSOC); tree_set_subkind(a, A_POS); tree_set_value(a, make_default_value(tree_type(field), tree_loc(field))); tree_add_assoc(def, a); } tree_set_type(def, type); return def; } case T_ACCESS: { tree_t null = tree_new(T_LITERAL); tree_set_loc(null, loc); tree_set_subkind(null, L_NULL); tree_set_type(null, type); return null; } case T_UNRESOLVED: return NULL; default: fatal_trace("cannot handle type %s in %s", type_kind_str(type_kind(base)), __func__); } }