static bool reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) { VEC (edge, heap) *stack = NULL; edge e; edge_iterator ei; sbitmap visited; bool ret; if (va_arg_bb == va_start_bb) return true; if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) return false; visited = sbitmap_alloc (last_basic_block); sbitmap_zero (visited); ret = true; FOR_EACH_EDGE (e, ei, va_arg_bb->preds) VEC_safe_push (edge, heap, stack, e); while (! VEC_empty (edge, stack)) { basic_block src; e = VEC_pop (edge, stack); src = e->src; if (e->flags & EDGE_COMPLEX) { ret = false; break; } if (src == va_start_bb) continue; /* va_arg_bb can be executed more times than va_start_bb. */ if (src == va_arg_bb) { ret = false; break; } gcc_assert (src != ENTRY_BLOCK_PTR); if (! TEST_BIT (visited, src->index)) { SET_BIT (visited, src->index); FOR_EACH_EDGE (e, ei, src->preds) VEC_safe_push (edge, heap, stack, e); } } VEC_free (edge, heap, stack); sbitmap_free (visited); return ret; }
struct ui_file * cli_out_set_stream (struct ui_out *uiout, struct ui_file *stream) { cli_out_data *data = ui_out_data (uiout); struct ui_file *old; old = VEC_pop (ui_filep, data->streams); VEC_quick_push (ui_filep, data->streams, stream); return old; }
static int cli_redirect (struct ui_out *uiout, struct ui_file *outstream) { cli_out_data *data = ui_out_data (uiout); if (outstream != NULL) VEC_safe_push (ui_filep, data->streams, outstream); else VEC_pop (ui_filep, data->streams); return 0; }
void fini_walk_dominator_tree (struct dom_walk_data *walk_data) { if (walk_data->initialize_block_local_data) { while (VEC_length (void_p, walk_data->free_block_data) > 0) free (VEC_pop (void_p, walk_data->free_block_data)); } VEC_free (void_p, heap, walk_data->free_block_data); VEC_free (void_p, heap, walk_data->block_data_stack); }
tree make_ssa_name_fn (struct function *fn, tree var, gimple stmt) { tree t; use_operand_p imm; gcc_assert (DECL_P (var)); /* If our free list has an element, then use it. */ if (!VEC_empty (tree, FREE_SSANAMES (fn))) { t = VEC_pop (tree, FREE_SSANAMES (fn)); #ifdef GATHER_STATISTICS ssa_name_nodes_reused++; #endif /* The node was cleared out when we put it on the free list, so there is no need to do so again here. */ gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL); VEC_replace (tree, SSANAMES (fn), SSA_NAME_VERSION (t), t); } else { t = make_node (SSA_NAME); SSA_NAME_VERSION (t) = VEC_length (tree, SSANAMES (fn)); VEC_safe_push (tree, gc, SSANAMES (fn), t); #ifdef GATHER_STATISTICS ssa_name_nodes_created++; #endif } TREE_TYPE (t) = TREE_TYPE (var); SSA_NAME_VAR (t) = var; SSA_NAME_DEF_STMT (t) = stmt; SSA_NAME_PTR_INFO (t) = NULL; SSA_NAME_IN_FREE_LIST (t) = 0; SSA_NAME_IS_DEFAULT_DEF (t) = 0; imm = &(SSA_NAME_IMM_USE_NODE (t)); imm->use = NULL; imm->prev = imm; imm->next = imm; imm->loc.ssa_name = t; return t; }
static inline gimple allocate_phi_node (size_t len) { gimple phi; size_t bucket = NUM_BUCKETS - 2; size_t size = sizeof (struct gimple_statement_phi) + (len - 1) * sizeof (struct phi_arg_d); if (free_phinode_count) for (bucket = len - 2; bucket < NUM_BUCKETS - 2; bucket++) if (free_phinodes[bucket]) break; /* If our free list has an element, then use it. */ if (bucket < NUM_BUCKETS - 2 && gimple_phi_capacity (VEC_index (gimple, free_phinodes[bucket], 0)) >= len) { free_phinode_count--; phi = VEC_pop (gimple, free_phinodes[bucket]); if (VEC_empty (gimple, free_phinodes[bucket])) VEC_free (gimple, gc, free_phinodes[bucket]); #ifdef GATHER_STATISTICS phi_nodes_reused++; #endif } else { phi = (gimple) ggc_alloc (size); #ifdef GATHER_STATISTICS phi_nodes_created++; { enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI); gimple_alloc_counts[(int) kind]++; gimple_alloc_sizes[(int) kind] += size; } #endif } return phi; }
void walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb) { void *bd = NULL; basic_block dest; block_stmt_iterator bsi; bool is_interesting; basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2); int sp = 0; while (true) { /* Don't worry about unreachable blocks. */ if (EDGE_COUNT (bb->preds) > 0 || bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR) { /* If block BB is not interesting to the caller, then none of the callbacks that walk the statements in BB are going to be executed. */ is_interesting = walk_data->interesting_blocks == NULL || TEST_BIT (walk_data->interesting_blocks, bb->index); /* Callback to initialize the local data structure. */ if (walk_data->initialize_block_local_data) { bool recycled; /* First get some local data, reusing any local data pointer we may have saved. */ if (VEC_length (void_p, walk_data->free_block_data) > 0) { bd = VEC_pop (void_p, walk_data->free_block_data); recycled = 1; } else { bd = xcalloc (1, walk_data->block_local_data_size); recycled = 0; } /* Push the local data into the local data stack. */ VEC_safe_push (void_p, heap, walk_data->block_data_stack, bd); /* Call the initializer. */ walk_data->initialize_block_local_data (walk_data, bb, recycled); } /* Callback for operations to execute before we have walked the dominator children, but before we walk statements. */ if (walk_data->before_dom_children_before_stmts) (*walk_data->before_dom_children_before_stmts) (walk_data, bb); /* Statement walk before walking dominator children. */ if (is_interesting && walk_data->before_dom_children_walk_stmts) { if (walk_data->walk_stmts_backward) for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) (*walk_data->before_dom_children_walk_stmts) (walk_data, bb, bsi); else for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) (*walk_data->before_dom_children_walk_stmts) (walk_data, bb, bsi); } /* Callback for operations to execute before we have walked the dominator children, and after we walk statements. */ if (walk_data->before_dom_children_after_stmts) (*walk_data->before_dom_children_after_stmts) (walk_data, bb); /* Mark the current BB to be popped out of the recursion stack once childs are processed. */ worklist[sp++] = bb; worklist[sp++] = NULL; for (dest = first_dom_son (walk_data->dom_direction, bb); dest; dest = next_dom_son (walk_data->dom_direction, dest)) worklist[sp++] = dest; } /* NULL is used to signalize pop operation in recursion stack. */ while (sp > 0 && !worklist[sp - 1]) { --sp; bb = worklist[--sp]; is_interesting = walk_data->interesting_blocks == NULL || TEST_BIT (walk_data->interesting_blocks, bb->index); /* Callback for operations to execute after we have walked the dominator children, but before we walk statements. */ if (walk_data->after_dom_children_before_stmts) (*walk_data->after_dom_children_before_stmts) (walk_data, bb); /* Statement walk after walking dominator children. */ if (is_interesting && walk_data->after_dom_children_walk_stmts) { if (walk_data->walk_stmts_backward) for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi)) (*walk_data->after_dom_children_walk_stmts) (walk_data, bb, bsi); else for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) (*walk_data->after_dom_children_walk_stmts) (walk_data, bb, bsi); } /* Callback for operations to execute after we have walked the dominator children and after we have walked statements. */ if (walk_data->after_dom_children_after_stmts) (*walk_data->after_dom_children_after_stmts) (walk_data, bb); if (walk_data->initialize_block_local_data) { /* And finally pop the record off the block local data stack. */ bd = VEC_pop (void_p, walk_data->block_data_stack); /* And save the block data so that we can re-use it. */ VEC_safe_push (void_p, heap, walk_data->free_block_data, bd); } } if (sp) bb = worklist[--sp]; else break; } free (worklist); }
static int btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp) { struct btrace_thread_info *btinfo; struct btrace_function *last_bfun; struct btrace_insn *last_insn; btrace_block_s *first_new_block; btinfo = &tp->btrace; last_bfun = btinfo->end; gdb_assert (last_bfun != NULL); gdb_assert (!VEC_empty (btrace_block_s, btrace->blocks)); /* If the existing trace ends with a gap, we just glue the traces together. We need to drop the last (i.e. chronologically first) block of the new trace, though, since we can't fill in the start address.*/ if (VEC_empty (btrace_insn_s, last_bfun->insn)) { VEC_pop (btrace_block_s, btrace->blocks); return 0; } /* Beware that block trace starts with the most recent block, so the chronologically first block in the new trace is the last block in the new trace's block vector. */ first_new_block = VEC_last (btrace_block_s, btrace->blocks); last_insn = VEC_last (btrace_insn_s, last_bfun->insn); /* If the current PC at the end of the block is the same as in our current trace, there are two explanations: 1. we executed the instruction and some branch brought us back. 2. we have not made any progress. In the first case, the delta trace vector should contain at least two entries. In the second case, the delta trace vector should contain exactly one entry for the partial block containing the current PC. Remove it. */ if (first_new_block->end == last_insn->pc && VEC_length (btrace_block_s, btrace->blocks) == 1) { VEC_pop (btrace_block_s, btrace->blocks); return 0; } DEBUG ("stitching %s to %s", ftrace_print_insn_addr (last_insn), core_addr_to_string_nz (first_new_block->end)); /* Do a simple sanity check to make sure we don't accidentally end up with a bad block. This should not occur in practice. */ if (first_new_block->end < last_insn->pc) { warning (_("Error while trying to read delta trace. Falling back to " "a full read.")); return -1; } /* We adjust the last block to start at the end of our current trace. */ gdb_assert (first_new_block->begin == 0); first_new_block->begin = last_insn->pc; /* We simply pop the last insn so we can insert it again as part of the normal branch trace computation. Since instruction iterators are based on indices in the instructions vector, we don't leave any pointers dangling. */ DEBUG ("pruning insn at %s for stitching", ftrace_print_insn_addr (last_insn)); VEC_pop (btrace_insn_s, last_bfun->insn); /* The instructions vector may become empty temporarily if this has been the only instruction in this function segment. This violates the invariant but will be remedied shortly by btrace_compute_ftrace when we add the new trace. */ /* The only case where this would hurt is if the entire trace consisted of just that one instruction. If we remove it, we might turn the now empty btrace function segment into a gap. But we don't want gaps at the beginning. To avoid this, we remove the entire old trace. */ if (last_bfun == btinfo->begin && VEC_empty (btrace_insn_s, last_bfun->insn)) btrace_clear (tp); return 0; }
static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; struct pointer_set_t *p_set = wtd->p_set; if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, data, NULL); } break; case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt))); break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields.FILE * my_dump_begin (int phase, int *flag_ptr) Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } else if (TREE_CODE (stmt) == BIND_EXPR) { VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt); cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); VEC_pop (tree, wtd->bind_expr_stack); } else if (TREE_CODE (stmt) == USING_STMT) { tree block = NULL_TREE; /* Get the innermost inclosing GIMPLE_BIND that has a non NULL BLOCK, and append an IMPORTED_DECL to its BLOCK_VARS chained list. */ if (wtd->bind_expr_stack) { int i; for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--) if ((block = BIND_EXPR_BLOCK (VEC_index (tree, wtd->bind_expr_stack, i)))) break; } if (block) { tree using_directive; gcc_assert (TREE_OPERAND (stmt, 0)); using_directive = make_node (IMPORTED_DECL); TREE_TYPE (using_directive) = void_type_node; IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = TREE_OPERAND (stmt, 0); DECL_CHAIN (using_directive) = BLOCK_VARS (block); BLOCK_VARS (block) = using_directive; } /* The USING_STMT won't appear in GENERIC. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == DECL_EXPR && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { /* Using decls inside DECL_EXPRs are just dropped on the floor. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } pointer_set_insert (p_set, *stmt_p); return NULL; }
struct lto_out_decl_state * lto_pop_out_decl_state (void) { return VEC_pop (lto_out_decl_state_ptr, decl_state_stack); }
static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; struct pointer_set_t *p_set = wtd->p_set; /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && (TREE_CODE (stmt) == VAR_DECL || TREE_CODE (stmt) == PARM_DECL || TREE_CODE (stmt) == RESULT_DECL) && omp_var_to_track (stmt)) omp_cxx_notice_variable (wtd->omp_ctx, stmt); if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && omp_var_to_track (TREE_OPERAND (stmt, 0))) omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, data, NULL); } break; case OMP_CLAUSE_PRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; else if (wtd->omp_ctx != NULL) { /* Private clause doesn't cause any references to the var in outer contexts, avoid calling omp_cxx_notice_variable for it. */ struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; wtd->omp_ctx = NULL; cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = old; *walk_subtrees = 0; } break; case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt))); break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields. Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } else if (TREE_CODE (stmt) == BIND_EXPR) { if (__builtin_expect (wtd->omp_ctx != NULL, 0)) { tree decl; for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl) && omp_var_to_track (decl)) { splay_tree_node n = splay_tree_lookup (wtd->omp_ctx->variables, (splay_tree_key) decl); if (n == NULL) splay_tree_insert (wtd->omp_ctx->variables, (splay_tree_key) decl, TREE_STATIC (decl) ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); } } VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt); cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); VEC_pop (tree, wtd->bind_expr_stack); } else if (TREE_CODE (stmt) == USING_STMT) { tree block = NULL_TREE; /* Get the innermost inclosing GIMPLE_BIND that has a non NULL BLOCK, and append an IMPORTED_DECL to its BLOCK_VARS chained list. */ if (wtd->bind_expr_stack) { int i; for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--) if ((block = BIND_EXPR_BLOCK (VEC_index (tree, wtd->bind_expr_stack, i)))) break; } if (block) { tree using_directive; gcc_assert (TREE_OPERAND (stmt, 0)); using_directive = make_node (IMPORTED_DECL); TREE_TYPE (using_directive) = void_type_node; IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = TREE_OPERAND (stmt, 0); DECL_CHAIN (using_directive) = BLOCK_VARS (block); BLOCK_VARS (block) = using_directive; } /* The USING_STMT won't appear in GENERIC. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == DECL_EXPR && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { /* Using decls inside DECL_EXPRs are just dropped on the floor. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK) { struct cp_genericize_omp_taskreg omp_ctx; tree c, decl; splay_tree_node n; *walk_subtrees = 0; cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; omp_ctx.default_shared = omp_ctx.is_parallel; omp_ctx.outer = wtd->omp_ctx; omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); wtd->omp_ctx = &omp_ctx; for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) switch (OMP_CLAUSE_CODE (c)) { case OMP_CLAUSE_SHARED: case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_LASTPRIVATE: decl = OMP_CLAUSE_DECL (c); if (decl == error_mark_node || !omp_var_to_track (decl)) break; n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); if (n != NULL) break; splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) omp_cxx_notice_variable (omp_ctx.outer, decl); break; case OMP_CLAUSE_DEFAULT: if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) omp_ctx.default_shared = true; default: break; } cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = omp_ctx.outer; splay_tree_delete (omp_ctx.variables); } else if (TREE_CODE (stmt) == CONVERT_EXPR) gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); pointer_set_insert (p_set, *stmt_p); return NULL; }