void unlink_stmt_vdef (gimple stmt) { use_operand_p use_p; imm_use_iterator iter; gimple use_stmt; tree vdef = gimple_vdef (stmt); if (!vdef || TREE_CODE (vdef) != SSA_NAME) return; FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt)) { FOR_EACH_IMM_USE_ON_STMT (use_p, iter) SET_USE (use_p, gimple_vuse (stmt)); }
static inline void finalize_ssa_defs (struct function *fn, gimple *stmt) { /* Pre-pend the vdef we may have built. */ if (build_vdef != NULL_TREE) { tree oldvdef = gimple_vdef (stmt); if (oldvdef && TREE_CODE (oldvdef) == SSA_NAME) oldvdef = SSA_NAME_VAR (oldvdef); if (oldvdef != build_vdef) gimple_set_vdef (stmt, build_vdef); } /* Clear and unlink a no longer necessary VDEF. */ if (build_vdef == NULL_TREE && gimple_vdef (stmt) != NULL_TREE) { if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) { unlink_stmt_vdef (stmt); release_ssa_name_fn (fn, gimple_vdef (stmt)); } gimple_set_vdef (stmt, NULL_TREE); } /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) { fn->gimple_df->rename_vops = 1; fn->gimple_df->ssa_renaming_needed = 1; } }
static bool can_use_internal_fn (gcall *call) { /* Only replace calls that set errno. */ if (!gimple_vdef (call)) return false; /* See whether there is an internal function for this built-in. */ if (replacement_internal_fn (call) == IFN_LAST) return false; /* See whether we can catch all cases where errno would be set, while still avoiding the call in most cases. */ if (!can_test_argument_range (call) && !edom_only_function (call)) return false; return true; }
static bool dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) { gimple temp; unsigned cnt = 0; *use_stmt = NULL; /* Find the first dominated statement that clobbers (part of) the memory stmt stores to with no intermediate statement that may use part of the memory stmt stores. That is, find a store that may prove stmt to be a dead store. */ temp = stmt; do { gimple use_stmt; imm_use_iterator ui; bool fail = false; tree defvar; /* Limit stmt walking to be linear in the number of possibly dead stores. */ if (++cnt > 256) return false; if (gimple_code (temp) == GIMPLE_PHI) defvar = PHI_RESULT (temp); else defvar = gimple_vdef (temp); temp = NULL; FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar) { cnt++; /* If we ever reach our DSE candidate stmt again fail. We cannot handle dead stores in loops. */ if (use_stmt == stmt) { fail = true; BREAK_FROM_IMM_USE_STMT (ui); } /* In simple cases we can look through PHI nodes, but we have to be careful with loops and with memory references containing operands that are also operands of PHI nodes. See gcc.c-torture/execute/20051110-*.c. */ else if (gimple_code (use_stmt) == GIMPLE_PHI) { if (temp /* Make sure we are not in a loop latch block. */ || gimple_bb (stmt) == gimple_bb (use_stmt) || dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt), gimple_bb (use_stmt)) /* We can look through PHIs to regions post-dominating the DSE candidate stmt. */ || !dominated_by_p (CDI_POST_DOMINATORS, gimple_bb (stmt), gimple_bb (use_stmt))) { fail = true; BREAK_FROM_IMM_USE_STMT (ui); } temp = use_stmt; } /* If the statement is a use the store is not dead. */ else if (ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt))) { fail = true; BREAK_FROM_IMM_USE_STMT (ui); } /* If this is a store, remember it or bail out if we have multiple ones (the will be in different CFG parts then). */ else if (gimple_vdef (use_stmt)) { if (temp) { fail = true; BREAK_FROM_IMM_USE_STMT (ui); } temp = use_stmt; } } if (fail) return false; /* If we didn't find any definition this means the store is dead if it isn't a store to global reachable memory. In this case just pretend the stmt makes itself dead. Otherwise fail. */ if (!temp) { if (is_hidden_global_store (stmt)) return false; temp = stmt; break; } }
static void adjust_simduid_builtins (hash_table<simduid_to_vf> *htab) { basic_block bb; FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator i; for (i = gsi_start_bb (bb); !gsi_end_p (i); ) { unsigned int vf = 1; enum internal_fn ifn; gimple *stmt = gsi_stmt (i); tree t; if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt)) { gsi_next (&i); continue; } ifn = gimple_call_internal_fn (stmt); switch (ifn) { case IFN_GOMP_SIMD_LANE: case IFN_GOMP_SIMD_VF: case IFN_GOMP_SIMD_LAST_LANE: break; case IFN_GOMP_SIMD_ORDERED_START: case IFN_GOMP_SIMD_ORDERED_END: if (integer_onep (gimple_call_arg (stmt, 0))) { enum built_in_function bcode = (ifn == IFN_GOMP_SIMD_ORDERED_START ? BUILT_IN_GOMP_ORDERED_START : BUILT_IN_GOMP_ORDERED_END); gimple *g = gimple_build_call (builtin_decl_explicit (bcode), 0); tree vdef = gimple_vdef (stmt); gimple_set_vdef (g, vdef); SSA_NAME_DEF_STMT (vdef) = g; gimple_set_vuse (g, gimple_vuse (stmt)); gsi_replace (&i, g, true); continue; } gsi_remove (&i, true); unlink_stmt_vdef (stmt); continue; default: gsi_next (&i); continue; } tree arg = gimple_call_arg (stmt, 0); gcc_assert (arg != NULL_TREE); gcc_assert (TREE_CODE (arg) == SSA_NAME); simduid_to_vf *p = NULL, data; data.simduid = DECL_UID (SSA_NAME_VAR (arg)); /* Need to nullify loop safelen field since it's value is not valid after transformation. */ if (bb->loop_father && bb->loop_father->safelen > 0) bb->loop_father->safelen = 0; if (htab) { p = htab->find (&data); if (p) vf = p->vf; } switch (ifn) { case IFN_GOMP_SIMD_VF: t = build_int_cst (unsigned_type_node, vf); break; case IFN_GOMP_SIMD_LANE: t = build_int_cst (unsigned_type_node, 0); break; case IFN_GOMP_SIMD_LAST_LANE: t = gimple_call_arg (stmt, 1); break; default: gcc_unreachable (); } update_call_from_tree (&i, t); gsi_next (&i); } }
static void use_internal_fn (gcall *call) { /* We'll be inserting another call with the same arguments after the lhs has been set, so prevent any possible coalescing failure from having both values live at once. See PR 71020. */ replace_abnormal_ssa_names (call); unsigned nconds = 0; auto_vec<gimple *, 12> conds; if (can_test_argument_range (call)) { gen_shrink_wrap_conditions (call, conds, &nconds); gcc_assert (nconds != 0); } else gcc_assert (edom_only_function (call)); internal_fn ifn = replacement_internal_fn (call); gcc_assert (ifn != IFN_LAST); /* Construct the new call, with the same arguments as the original one. */ auto_vec <tree, 16> args; unsigned int nargs = gimple_call_num_args (call); for (unsigned int i = 0; i < nargs; ++i) args.safe_push (gimple_call_arg (call, i)); gcall *new_call = gimple_build_call_internal_vec (ifn, args); gimple_set_location (new_call, gimple_location (call)); gimple_call_set_nothrow (new_call, gimple_call_nothrow_p (call)); /* Transfer the LHS to the new call. */ tree lhs = gimple_call_lhs (call); gimple_call_set_lhs (new_call, lhs); gimple_call_set_lhs (call, NULL_TREE); SSA_NAME_DEF_STMT (lhs) = new_call; /* Insert the new call. */ gimple_stmt_iterator gsi = gsi_for_stmt (call); gsi_insert_before (&gsi, new_call, GSI_SAME_STMT); if (nconds == 0) { /* Skip the call if LHS == LHS. If we reach here, EDOM is the only valid errno value and it is used iff the result is NaN. */ conds.quick_push (gimple_build_cond (EQ_EXPR, lhs, lhs, NULL_TREE, NULL_TREE)); nconds++; /* Try replacing the original call with a direct assignment to errno, via an internal function. */ if (set_edom_supported_p () && !stmt_ends_bb_p (call)) { gimple_stmt_iterator gsi = gsi_for_stmt (call); gcall *new_call = gimple_build_call_internal (IFN_SET_EDOM, 0); gimple_set_vuse (new_call, gimple_vuse (call)); gimple_set_vdef (new_call, gimple_vdef (call)); SSA_NAME_DEF_STMT (gimple_vdef (new_call)) = new_call; gimple_set_location (new_call, gimple_location (call)); gsi_replace (&gsi, new_call, false); call = new_call; } } shrink_wrap_one_built_in_call_with_conds (call, conds, nconds); }
static inline void finalize_ssa_defs (gimple stmt) { unsigned new_i; struct def_optype_d new_list; def_optype_p old_ops, last; unsigned int num = VEC_length (tree, build_defs); /* There should only be a single real definition per assignment. */ gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); /* Pre-pend the vdef we may have built. */ if (build_vdef != NULL_TREE) { tree oldvdef = gimple_vdef (stmt); if (oldvdef && TREE_CODE (oldvdef) == SSA_NAME) oldvdef = SSA_NAME_VAR (oldvdef); if (oldvdef != build_vdef) gimple_set_vdef (stmt, build_vdef); VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt)); ++num; } new_list.next = NULL; last = &new_list; old_ops = gimple_def_ops (stmt); new_i = 0; /* Clear and unlink a no longer necessary VDEF. */ if (build_vdef == NULL_TREE && gimple_vdef (stmt) != NULL_TREE) { if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) { unlink_stmt_vdef (stmt); release_ssa_name (gimple_vdef (stmt)); } gimple_set_vdef (stmt, NULL_TREE); } /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) mark_sym_for_renaming (gimple_vdef (stmt)); /* Check for the common case of 1 def that hasn't changed. */ if (old_ops && old_ops->next == NULL && num == 1 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) return; /* If there is anything in the old list, free it. */ if (old_ops) { old_ops->next = gimple_ssa_operands (cfun)->free_defs; gimple_ssa_operands (cfun)->free_defs = old_ops; } /* If there is anything remaining in the build_defs list, simply emit it. */ for ( ; new_i < num; new_i++) last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); /* Now set the stmt's operands. */ gimple_set_def_ops (stmt, new_list.next); }