static void macro_define_object_internal (struct macro_source_file *source, int line, const char *name, const char *replacement, enum macro_special_kind kind) { struct macro_table *t = source->table; struct macro_key *k = NULL; struct macro_definition *d; if (! t->redef_ok) k = check_for_redefinition (source, line, name, macro_object_like, 0, 0, replacement); /* If we're redefining a symbol, and the existing key would be identical to our new key, then the splay_tree_insert function will try to delete the old definition. When the definition is living on an obstack, this isn't a happy thing. Since this only happens in the presence of questionable debug info, we just ignore all definitions after the first. The only case I know of where this arises is in GCC's output for predefined macros, and all the definitions are the same in that case. */ if (k && ! key_compare (k, name, source, line)) return; k = new_macro_key (t, name, source, line); d = new_macro_definition (t, macro_object_like, kind, 0, replacement); splay_tree_insert (t->definitions, (splay_tree_key) k, (splay_tree_value) d); }
void macro_define_function (struct macro_source_file *source, int line, const char *name, int argc, const char **argv, const char *replacement) { struct macro_table *t = source->table; struct macro_key *k = NULL; struct macro_definition *d; if (! t->redef_ok) k = check_for_redefinition (source, line, name, macro_function_like, argc, argv, replacement); /* See comments about duplicate keys in macro_define_object. */ if (k && ! key_compare (k, name, source, line)) return; /* We should also check here that all the argument names in ARGV are distinct. */ k = new_macro_key (t, name, source, line); d = new_macro_definition (t, macro_function_like, argc, argv, replacement); splay_tree_insert (t->definitions, (splay_tree_key) k, (splay_tree_value) d); }
static void addrmap_splay_tree_insert (struct addrmap_mutable *map, CORE_ADDR key, void *value) { splay_tree_insert (map->tree, allocate_key (map, key), (splay_tree_value) value); }
static inline void add_static_var (tree var) { int uid = DECL_UID (var); if (!bitmap_bit_p (all_module_statics, uid)) { splay_tree_insert (reference_vars_to_consider, uid, (splay_tree_value)var); bitmap_set_bit (all_module_statics, uid); } }
static void omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl) { splay_tree_node n = splay_tree_lookup (omp_ctx->variables, (splay_tree_key) decl); if (n == NULL) { int flags = OMP_CLAUSE_DEFAULT_SHARED; if (omp_ctx->outer) omp_cxx_notice_variable (omp_ctx->outer, decl); if (!omp_ctx->default_shared) { struct cp_genericize_omp_taskreg *octx; for (octx = omp_ctx->outer; octx; octx = octx->outer) { n = splay_tree_lookup (octx->variables, (splay_tree_key) decl); if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED) { flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; break; } if (octx->is_parallel) break; } if (octx == NULL && (TREE_CODE (decl) == PARM_DECL || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl)) && DECL_CONTEXT (decl) == current_function_decl))) flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE) { /* DECL is implicitly determined firstprivate in the current task construct. Ensure copy ctor and dtor are instantiated, because during gimplification it will be already too late. */ tree type = TREE_TYPE (decl); if (is_invisiref_parm (decl)) type = TREE_TYPE (type); while (TREE_CODE (type) == ARRAY_TYPE) type = TREE_TYPE (type); get_copy_ctor (type, tf_none); get_dtor (type, tf_none); } } splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags); } }
gfc_constructor * gfc_constructor_insert (gfc_constructor_base *base, gfc_constructor *c, int n) { splay_tree_node node; if (*base == NULL) *base = splay_tree_new (splay_tree_compare_ints, NULL, node_free); c->base = *base; mpz_set_si (c->offset, n); node = splay_tree_insert (*base, (splay_tree_key) n, (splay_tree_value) c); gcc_assert (node); return (gfc_constructor*)node->value; }
static struct dcache_block * dcache_alloc (DCACHE *dcache, CORE_ADDR addr) { struct dcache_block *db; if (dcache->size >= DCACHE_SIZE) { /* Evict the least recently used line. */ db = dcache->oldest; dcache->oldest = db->newer; splay_tree_remove (dcache->tree, (splay_tree_key) db->addr); } else { db = dcache->freelist; if (db) dcache->freelist = db->newer; else db = xmalloc (sizeof (struct dcache_block)); dcache->size++; } db->addr = MASK (addr); db->newer = NULL; db->refs = 0; if (dcache->newest) dcache->newest->newer = db; dcache->newest = db; if (!dcache->oldest) dcache->oldest = db; splay_tree_insert (dcache->tree, (splay_tree_key) db->addr, (splay_tree_value) db); return db; }
int ipa_reduced_postorder (struct cgraph_node **order, bool reduce, bool allow_overwritable, bool (*ignore_edge) (struct cgraph_edge *)) { struct cgraph_node *node; struct searchc_env env; splay_tree_node result; env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); env.stack_size = 0; env.result = order; env.order_pos = 0; env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0); env.count = 1; env.reduce = reduce; env.allow_overwritable = allow_overwritable; FOR_EACH_DEFINED_FUNCTION (node) { enum availability avail = cgraph_function_body_availability (node); if (avail > AVAIL_OVERWRITABLE || (allow_overwritable && (avail == AVAIL_OVERWRITABLE))) { /* Reuse the info if it is already there. */ struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux; if (!info) info = XCNEW (struct ipa_dfs_info); info->new_node = true; info->on_stack = false; info->next_cycle = NULL; node->aux = info; splay_tree_insert (env.nodes_marked_new, (splay_tree_key)node->uid, (splay_tree_value)node); } else node->aux = NULL; }
struct c_fileinfo * get_fileinfo (const char *name) { splay_tree_node n; struct c_fileinfo *fi; if (!file_info_tree) file_info_tree = splay_tree_new ((splay_tree_compare_fn) strcmp, 0, (splay_tree_delete_value_fn) free); n = splay_tree_lookup (file_info_tree, (splay_tree_key) name); if (n) return (struct c_fileinfo *) n->value; fi = XNEW (struct c_fileinfo); fi->time = 0; fi->interface_only = 0; fi->interface_unknown = 1; splay_tree_insert (file_info_tree, (splay_tree_key) name, (splay_tree_value) fi); return fi; }
static unsigned int queue (dump_info_p di, const_tree t, int flags) { dump_queue_p dq; dump_node_info_p dni; unsigned int index; /* Assign the next available index to T. */ index = ++di->index; /* Obtain a new queue node. */ if (di->free_list) { dq = di->free_list; di->free_list = dq->next; } else dq = XNEW (struct dump_queue); /* Create a new entry in the splay-tree. */ dni = XNEW (struct dump_node_info); dni->index = index; dni->binfo_p = ((flags & DUMP_BINFO) != 0); dq->node = splay_tree_insert (di->nodes, (splay_tree_key) t, (splay_tree_value) dni); /* Add it to the end of the queue. */ dq->next = 0; if (!di->queue_end) di->queue = dq; else di->queue_end->next = dq; di->queue_end = dq; /* Return the index. */ return index; }
static struct dcache_block * dcache_alloc (DCACHE *dcache, CORE_ADDR addr) { struct dcache_block *db; if (dcache->size >= dcache_size) { /* Evict the least recently allocated line. */ db = dcache->oldest; remove_block (&dcache->oldest, db); splay_tree_remove (dcache->tree, (splay_tree_key) db->addr); } else { db = dcache->freelist; if (db) remove_block (&dcache->freelist, db); else db = xmalloc (offsetof (struct dcache_block, data) + dcache->line_size); dcache->size++; } db->addr = MASK (dcache, addr); db->refs = 0; /* Put DB at the end of the list, it's the newest. */ append_block (&dcache->oldest, db); splay_tree_insert (dcache->tree, (splay_tree_key) db->addr, (splay_tree_value) db); return db; }
void gfc_assign_data_value_range (gfc_expr *lvalue, gfc_expr *rvalue, mpz_t index, mpz_t repeat) { gfc_ref *ref; gfc_expr *init, *expr; gfc_constructor *con, *last_con; gfc_constructor *pred; gfc_symbol *symbol; gfc_typespec *last_ts; mpz_t offset; splay_tree spt; splay_tree_node sptn; symbol = lvalue->symtree->n.sym; init = symbol->value; last_ts = &symbol->ts; last_con = NULL; mpz_init_set_si (offset, 0); /* Find/create the parent expressions for subobject references. */ for (ref = lvalue->ref; ref; ref = ref->next) { /* Use the existing initializer expression if it exists. Otherwise create a new one. */ if (init == NULL) expr = gfc_get_expr (); else expr = init; /* Find or create this element. */ switch (ref->type) { case REF_ARRAY: if (init == NULL) { /* The element typespec will be the same as the array typespec. */ expr->ts = *last_ts; /* Setup the expression to hold the constructor. */ expr->expr_type = EXPR_ARRAY; expr->rank = ref->u.ar.as->rank; } else gcc_assert (expr->expr_type == EXPR_ARRAY); if (ref->u.ar.type == AR_ELEMENT) { get_array_index (&ref->u.ar, &offset); /* This had better not be the bottom of the reference. We can still get to a full array via a component. */ gcc_assert (ref->next != NULL); } else { mpz_set (offset, index); /* We're at a full array or an array section. This means that we've better have found a full array, and that we're at the bottom of the reference. */ gcc_assert (ref->u.ar.type == AR_FULL); gcc_assert (ref->next == NULL); } /* Find the same element in the existing constructor. */ /* Splay tree containing offset and gfc_constructor. */ spt = expr->con_by_offset; if (spt == NULL) { spt = splay_tree_new (splay_tree_compare_ints, NULL, NULL); expr->con_by_offset = spt; con = NULL; } else con = find_con_by_offset (spt, offset); if (con == NULL) { splay_tree_key j; /* Create a new constructor. */ con = gfc_get_constructor (); mpz_set (con->n.offset, offset); j = (splay_tree_key) mpz_get_si (offset); if (ref->next == NULL) mpz_set (con->repeat, repeat); sptn = splay_tree_insert (spt, j, (splay_tree_value) con); /* Fix up the linked list. */ sptn = splay_tree_predecessor (spt, j); if (sptn == NULL) { /* Insert at the head. */ con->next = expr->value.constructor; expr->value.constructor = con; } else { /* Insert in the chain. */ pred = (gfc_constructor*) sptn->value; con->next = pred->next; pred->next = con; } } else gcc_assert (ref->next != NULL); break; case REF_COMPONENT: if (init == NULL) { /* Setup the expression to hold the constructor. */ expr->expr_type = EXPR_STRUCTURE; expr->ts.type = BT_DERIVED; expr->ts.derived = ref->u.c.sym; } else gcc_assert (expr->expr_type == EXPR_STRUCTURE); last_ts = &ref->u.c.component->ts; /* Find the same element in the existing constructor. */ con = expr->value.constructor; con = find_con_by_component (ref->u.c.component, con); if (con == NULL) { /* Create a new constructor. */ con = gfc_get_constructor (); con->n.component = ref->u.c.component; con->next = expr->value.constructor; expr->value.constructor = con; } /* Since we're only intending to initialize arrays here, there better be an inner reference. */ gcc_assert (ref->next != NULL); break; case REF_SUBSTRING: default: gcc_unreachable (); } if (init == NULL) { /* Point the container at the new expression. */ if (last_con == NULL) symbol->value = expr; else last_con->expr = expr; } init = con->expr; last_con = con; } if (last_ts->type == BT_CHARACTER) expr = create_character_intializer (init, last_ts, NULL, rvalue); else { /* We should never be overwriting an existing initializer. */ gcc_assert (!init); expr = gfc_copy_expr (rvalue); if (!gfc_compare_types (&lvalue->ts, &expr->ts)) gfc_convert_type (expr, &lvalue->ts, 0); } if (last_con == NULL) symbol->value = expr; else last_con->expr = expr; }
static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; struct pointer_set_t *p_set = wtd->p_set; /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && (VAR_P (stmt) || TREE_CODE (stmt) == PARM_DECL || TREE_CODE (stmt) == RESULT_DECL) && omp_var_to_track (stmt)) omp_cxx_notice_variable (wtd->omp_ctx, stmt); if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && VAR_OR_FUNCTION_DECL_P (stmt) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && omp_var_to_track (TREE_OPERAND (stmt, 0))) omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, data, NULL); } break; case OMP_CLAUSE_PRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; else if (wtd->omp_ctx != NULL) { /* Private clause doesn't cause any references to the var in outer contexts, avoid calling omp_cxx_notice_variable for it. */ struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; wtd->omp_ctx = NULL; cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = old; *walk_subtrees = 0; } break; case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: /* Don't dereference an invisiref in reduction clause's OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} still needs to be genericized. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_REDUCTION_INIT (stmt)) cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), cp_genericize_r, data, NULL); if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), cp_genericize_r, data, NULL); } break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2_loc (EXPR_LOCATION (stmt), CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields. Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } else if (TREE_CODE (stmt) == BIND_EXPR) { if (__builtin_expect (wtd->omp_ctx != NULL, 0)) { tree decl; for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) if (VAR_P (decl) && !DECL_EXTERNAL (decl) && omp_var_to_track (decl)) { splay_tree_node n = splay_tree_lookup (wtd->omp_ctx->variables, (splay_tree_key) decl); if (n == NULL) splay_tree_insert (wtd->omp_ctx->variables, (splay_tree_key) decl, TREE_STATIC (decl) ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); } } wtd->bind_expr_stack.safe_push (stmt); cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); wtd->bind_expr_stack.pop (); } else if (TREE_CODE (stmt) == USING_STMT) { tree block = NULL_TREE; /* Get the innermost inclosing GIMPLE_BIND that has a non NULL BLOCK, and append an IMPORTED_DECL to its BLOCK_VARS chained list. */ if (wtd->bind_expr_stack.exists ()) { int i; for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) break; } if (block) { tree using_directive; gcc_assert (TREE_OPERAND (stmt, 0)); using_directive = make_node (IMPORTED_DECL); TREE_TYPE (using_directive) = void_type_node; IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = TREE_OPERAND (stmt, 0); DECL_CHAIN (using_directive) = BLOCK_VARS (block); BLOCK_VARS (block) = using_directive; } /* The USING_STMT won't appear in GENERIC. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == DECL_EXPR && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { /* Using decls inside DECL_EXPRs are just dropped on the floor. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK) { struct cp_genericize_omp_taskreg omp_ctx; tree c, decl; splay_tree_node n; *walk_subtrees = 0; cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; omp_ctx.default_shared = omp_ctx.is_parallel; omp_ctx.outer = wtd->omp_ctx; omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); wtd->omp_ctx = &omp_ctx; for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) switch (OMP_CLAUSE_CODE (c)) { case OMP_CLAUSE_SHARED: case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_LASTPRIVATE: decl = OMP_CLAUSE_DECL (c); if (decl == error_mark_node || !omp_var_to_track (decl)) break; n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); if (n != NULL) break; splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) omp_cxx_notice_variable (omp_ctx.outer, decl); break; case OMP_CLAUSE_DEFAULT: if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) omp_ctx.default_shared = true; default: break; } cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = omp_ctx.outer; splay_tree_delete (omp_ctx.variables); } else if (TREE_CODE (stmt) == CONVERT_EXPR) gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); else if (TREE_CODE (stmt) == FOR_STMT) genericize_for_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == WHILE_STMT) genericize_while_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == DO_STMT) genericize_do_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == SWITCH_STMT) genericize_switch_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == CONTINUE_STMT) genericize_continue_stmt (stmt_p); else if (TREE_CODE (stmt) == BREAK_STMT) genericize_break_stmt (stmt_p); else if (TREE_CODE (stmt) == OMP_FOR || TREE_CODE (stmt) == OMP_SIMD || TREE_CODE (stmt) == OMP_DISTRIBUTE) genericize_omp_for_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == SIZEOF_EXPR) { if (SIZEOF_EXPR_TYPE_P (stmt)) *stmt_p = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)), SIZEOF_EXPR, false); else if (TYPE_P (TREE_OPERAND (stmt, 0))) *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0), SIZEOF_EXPR, false); else *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0), SIZEOF_EXPR, false); if (*stmt_p == error_mark_node) *stmt_p = size_one_node; return NULL; } pointer_set_insert (p_set, *stmt_p); return NULL; }
void interval_tree_delete(interval_tree tree, uint64_t start, uint64_t end) { splay_tree_node node, prev, next; CHECK_MUTEX_LOCKED(tree->mutex); if ((node = splay_tree_lookup(tree->splay, start)) != NULL) { tree->deleted = true; if (INTERVAL_END(node) > end) { /* We are shortening the interval NODE. */ INTERVAL_START(node) = end; return; } else { splay_tree_delete(tree->splay, start); tree->size--; } } else { prev = splay_tree_predecessor(tree->splay, start); if (prev && start < INTERVAL_END(prev)) { tree->deleted = true; if (INTERVAL_END(prev) > end) { /* We are cutting a subinterval from interval PREV. */ splay_tree_insert(tree->splay, end, INTERVAL_END(prev)); tree->size++; INTERVAL_END(prev) = start; return; } else { /* We are shortening the interval PREV. */ INTERVAL_END(prev) = start; } } } /* Delete rest intervals which intersect [START, END). */ while (1) { next = splay_tree_successor(tree->splay, start); if (!next || INTERVAL_START(next) >= end) break; tree->deleted = true; if (INTERVAL_END(next) <= end) { splay_tree_delete(tree->splay, INTERVAL_START(next)); tree->size--; } else { INTERVAL_START(next) = end; return; } } }
interval_tree_node interval_tree_insert(interval_tree tree, uint64_t start, uint64_t end) { splay_tree_node node, prev, next; CHECK_MUTEX_LOCKED(tree->mutex); if ((node = splay_tree_lookup(tree->splay, start)) != NULL) { /* The START of interval is already in the tree. */ if (INTERVAL_END(node) >= end) { /* There already is a larger interval starting in START so we have nothing to do. */ return node; } INTERVAL_END(node) = end; } else { /* Lookup the predecessor and successor of key START. */ prev = splay_tree_predecessor(tree->splay, start); next = splay_tree_successor(tree->splay, start); if (prev && INTERVAL_END(prev) >= start) { /* We are extending PREV. */ node = prev; if (INTERVAL_END(node) < end) INTERVAL_END(node) = end; } else if (next && INTERVAL_START(next) <= end) { /* We are extending NEXT. */ node = next; if (INTERVAL_START(node) > start) INTERVAL_START(node) = start; if (INTERVAL_END(node) < end) INTERVAL_END(node) = end; } else { /* We are really inserting a new node. */ node = splay_tree_insert(tree->splay, start, end); tree->size++; } } /* Merge the successors if they are covered by [START, END). */ while ((next = splay_tree_successor(tree->splay, INTERVAL_START(node))) != NULL) { if (INTERVAL_START(next) <= INTERVAL_END(node)) { if (INTERVAL_END(node) < INTERVAL_END(next)) INTERVAL_END(node) = INTERVAL_END(next); splay_tree_delete(tree->splay, INTERVAL_START(next)); tree->size--; } else break; } return node; }
bool maybe_clone_body (tree fn) { tree clone; bool first = true; /* APPLE LOCAL begin ARM structor thunks */ tree clone_to_call; struct clone_info info; /* APPLE LOCAL end ARM structor thunks */ /* We only clone constructors and destructors. */ if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn) && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn)) return 0; /* Emit the DWARF1 abstract instance. */ (*debug_hooks->deferred_inline_function) (fn); /* APPLE LOCAL begin ARM structor thunks */ /* Figure out whether we can use the 'thunk' implementation, and if so on which clones. */ info.next_clone = 0; info.which_thunks_ok = compute_use_thunks (fn); /* APPLE LOCAL end ARM structor thunks */ /* We know that any clones immediately follow FN in the TYPE_METHODS list. */ push_to_top_level (); FOR_EACH_CLONE (clone, fn) { tree parm; tree clone_parm; int parmno; splay_tree decl_map; /* Update CLONE's source position information to match FN's. */ DECL_SOURCE_LOCATION (clone) = DECL_SOURCE_LOCATION (fn); DECL_INLINE (clone) = DECL_INLINE (fn); DECL_DECLARED_INLINE_P (clone) = DECL_DECLARED_INLINE_P (fn); /* LLVM LOCAL begin inlinehint attribute */ DECL_EXPLICIT_INLINE_P (clone) = DECL_EXPLICIT_INLINE_P (fn); /* LLVM LOCAL end inlinehint attribute */ DECL_COMDAT (clone) = DECL_COMDAT (fn); DECL_WEAK (clone) = DECL_WEAK (fn); DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn); DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn); DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn); DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn); DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn); DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn); TREE_PUBLIC (clone) = TREE_PUBLIC (fn); DECL_VISIBILITY (clone) = DECL_VISIBILITY (fn); DECL_VISIBILITY_SPECIFIED (clone) = DECL_VISIBILITY_SPECIFIED (fn); /* Adjust the parameter names and locations. */ parm = DECL_ARGUMENTS (fn); clone_parm = DECL_ARGUMENTS (clone); /* Update the `this' parameter, which is always first. */ update_cloned_parm (parm, clone_parm, first); parm = TREE_CHAIN (parm); clone_parm = TREE_CHAIN (clone_parm); if (DECL_HAS_IN_CHARGE_PARM_P (fn)) parm = TREE_CHAIN (parm); if (DECL_HAS_VTT_PARM_P (fn)) parm = TREE_CHAIN (parm); if (DECL_HAS_VTT_PARM_P (clone)) clone_parm = TREE_CHAIN (clone_parm); for (; parm; parm = TREE_CHAIN (parm), clone_parm = TREE_CHAIN (clone_parm)) /* Update this parameter. */ update_cloned_parm (parm, clone_parm, first); /* Start processing the function. */ start_preparsed_function (clone, NULL_TREE, SF_PRE_PARSED); /* Remap the parameters. */ decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL); for (parmno = 0, parm = DECL_ARGUMENTS (fn), clone_parm = DECL_ARGUMENTS (clone); parm; ++parmno, parm = TREE_CHAIN (parm)) { /* Map the in-charge parameter to an appropriate constant. */ if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1) { tree in_charge; in_charge = in_charge_arg_for_name (DECL_NAME (clone)); splay_tree_insert (decl_map, (splay_tree_key) parm, (splay_tree_value) in_charge); /* APPLE LOCAL ARM structor thunks */ info.in_charge_value [info.next_clone] = in_charge; } else if (DECL_ARTIFICIAL (parm) && DECL_NAME (parm) == vtt_parm_identifier) { /* For a subobject constructor or destructor, the next argument is the VTT parameter. Remap the VTT_PARM from the CLONE to this parameter. */ if (DECL_HAS_VTT_PARM_P (clone)) { DECL_ABSTRACT_ORIGIN (clone_parm) = parm; splay_tree_insert (decl_map, (splay_tree_key) parm, (splay_tree_value) clone_parm); clone_parm = TREE_CHAIN (clone_parm); } /* Otherwise, map the VTT parameter to `NULL'. */ else { splay_tree_insert (decl_map, (splay_tree_key) parm, (splay_tree_value) null_pointer_node); } } /* Map other parameters to their equivalents in the cloned function. */ else { splay_tree_insert (decl_map, (splay_tree_key) parm, (splay_tree_value) clone_parm); clone_parm = TREE_CHAIN (clone_parm); } } if (targetm.cxx.cdtor_returns_this ()) { parm = DECL_RESULT (fn); clone_parm = DECL_RESULT (clone); splay_tree_insert (decl_map, (splay_tree_key) parm, (splay_tree_value) clone_parm); } /* APPLE LOCAL begin ARM structor thunks */ clone_to_call = find_earlier_clone (&info); if (clone_to_call) /* Bodies are identical; replace later one with call to an earlier one. */ thunk_body (clone, fn, clone_to_call); else /* Clone the body. */ clone_body (clone, fn, decl_map); /* APPLE LOCAL end ARM structor thunks */ /* The clone can throw iff the original function can throw. */ cp_function_chain->can_throw = !TREE_NOTHROW (fn); /* Now, expand this function into RTL, if appropriate. */ finish_function (0); BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn); expand_or_defer_fn (clone); first = false; /* APPLE LOCAL begin ARM structor thunks */ info.clones [info.next_clone] = clone; info.next_clone++; /* APPLE LOCAL end ARM structor thunks */ }
gfc_try gfc_assign_data_value (gfc_expr *lvalue, gfc_expr *rvalue, mpz_t index) { gfc_ref *ref; gfc_expr *init; gfc_expr *expr; gfc_constructor *con; gfc_constructor *last_con; gfc_constructor *pred; gfc_symbol *symbol; gfc_typespec *last_ts; mpz_t offset; splay_tree spt; splay_tree_node sptn; symbol = lvalue->symtree->n.sym; init = symbol->value; last_ts = &symbol->ts; last_con = NULL; mpz_init_set_si (offset, 0); /* Find/create the parent expressions for subobject references. */ for (ref = lvalue->ref; ref; ref = ref->next) { /* Break out of the loop if we find a substring. */ if (ref->type == REF_SUBSTRING) { /* A substring should always be the last subobject reference. */ gcc_assert (ref->next == NULL); break; } /* Use the existing initializer expression if it exists. Otherwise create a new one. */ if (init == NULL) expr = gfc_get_expr (); else expr = init; /* Find or create this element. */ switch (ref->type) { case REF_ARRAY: if (init && expr->expr_type != EXPR_ARRAY) { gfc_error ("'%s' at %L already is initialized at %L", lvalue->symtree->n.sym->name, &lvalue->where, &init->where); return FAILURE; } if (init == NULL) { /* The element typespec will be the same as the array typespec. */ expr->ts = *last_ts; /* Setup the expression to hold the constructor. */ expr->expr_type = EXPR_ARRAY; expr->rank = ref->u.ar.as->rank; } if (ref->u.ar.type == AR_ELEMENT) get_array_index (&ref->u.ar, &offset); else mpz_set (offset, index); /* Check the bounds. */ if (mpz_cmp_si (offset, 0) < 0) { gfc_error ("Data element below array lower bound at %L", &lvalue->where); return FAILURE; } else { mpz_t size; if (spec_size (ref->u.ar.as, &size) == SUCCESS) { if (mpz_cmp (offset, size) >= 0) { mpz_clear (size); gfc_error ("Data element above array upper bound at %L", &lvalue->where); return FAILURE; } mpz_clear (size); } } /* Splay tree containing offset and gfc_constructor. */ spt = expr->con_by_offset; if (spt == NULL) { spt = splay_tree_new (splay_tree_compare_ints, NULL, NULL); expr->con_by_offset = spt; con = NULL; } else con = find_con_by_offset (spt, offset); if (con == NULL) { splay_tree_key j; /* Create a new constructor. */ con = gfc_get_constructor (); mpz_set (con->n.offset, offset); j = (splay_tree_key) mpz_get_si (offset); sptn = splay_tree_insert (spt, j, (splay_tree_value) con); /* Fix up the linked list. */ sptn = splay_tree_predecessor (spt, j); if (sptn == NULL) { /* Insert at the head. */ con->next = expr->value.constructor; expr->value.constructor = con; } else { /* Insert in the chain. */ pred = (gfc_constructor*) sptn->value; con->next = pred->next; pred->next = con; } } break; case REF_COMPONENT: if (init == NULL) { /* Setup the expression to hold the constructor. */ expr->expr_type = EXPR_STRUCTURE; expr->ts.type = BT_DERIVED; expr->ts.derived = ref->u.c.sym; } else gcc_assert (expr->expr_type == EXPR_STRUCTURE); last_ts = &ref->u.c.component->ts; /* Find the same element in the existing constructor. */ con = expr->value.constructor; con = find_con_by_component (ref->u.c.component, con); if (con == NULL) { /* Create a new constructor. */ con = gfc_get_constructor (); con->n.component = ref->u.c.component; con->next = expr->value.constructor; expr->value.constructor = con; } break; default: gcc_unreachable (); } if (init == NULL) { /* Point the container at the new expression. */ if (last_con == NULL) symbol->value = expr; else last_con->expr = expr; } init = con->expr; last_con = con; } if (ref || last_ts->type == BT_CHARACTER) expr = create_character_intializer (init, last_ts, ref, rvalue); else { /* Overwriting an existing initializer is non-standard but usually only provokes a warning from other compilers. */ if (init != NULL) { /* Order in which the expressions arrive here depends on whether they are from data statements or F95 style declarations. Therefore, check which is the most recent. */ expr = (LOCATION_LINE (init->where.lb->location) > LOCATION_LINE (rvalue->where.lb->location)) ? init : rvalue; gfc_notify_std (GFC_STD_GNU, "Extension: re-initialization " "of '%s' at %L", symbol->name, &expr->where); } expr = gfc_copy_expr (rvalue); if (!gfc_compare_types (&lvalue->ts, &expr->ts)) gfc_convert_type (expr, &lvalue->ts, 0); } if (last_con == NULL) symbol->value = expr; else last_con->expr = expr; return SUCCESS; }