void add_phi_arg (gimple phi, tree def, edge e) { basic_block bb = e->dest; gcc_assert (bb == gimple_bb (phi)); /* We resize PHI nodes upon edge creation. We should always have enough room at this point. */ gcc_assert (gimple_phi_num_args (phi) <= gimple_phi_capacity (phi)); /* We resize PHI nodes upon edge creation. We should always have enough room at this point. */ gcc_assert (e->dest_idx < gimple_phi_num_args (phi)); /* Copy propagation needs to know what object occur in abnormal PHI nodes. This is a convenient place to record such information. */ if (e->flags & EDGE_ABNORMAL) { SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def) = 1; SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1; } SET_PHI_ARG_DEF (phi, e->dest_idx, def); }
tree degenerate_phi_result (gimple phi) { tree lhs = gimple_phi_result (phi); tree val = NULL; size_t i; /* Ignoring arguments which are the same as LHS, if all the remaining arguments are the same, then the PHI is a degenerate and has the value of that common argument. */ for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = gimple_phi_arg_def (phi, i); if (arg == lhs) continue; else if (!arg) break; else if (!val) val = arg; else if (arg == val) continue; /* We bring in some of operand_equal_p not only to speed things up, but also to avoid crashing when dereferencing the type of a released SSA name. */ else if (TREE_CODE (val) != TREE_CODE (arg) || TREE_CODE (val) == SSA_NAME || !operand_equal_p (arg, val, 0)) break; } return (i == gimple_phi_num_args (phi) ? val : NULL); }
static void prop_phis (basic_block b) { gimple_stmt_iterator psi; gimple_seq phis = phi_nodes (b); for (psi = gsi_start (phis); !gsi_end_p (psi); ) { gimple phi = gsi_stmt (psi); tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0); gcc_assert (gimple_phi_num_args (phi) == 1); if (!is_gimple_reg (def)) { imm_use_iterator iter; use_operand_p use_p; gimple stmt; FOR_EACH_IMM_USE_STMT (stmt, iter, def) FOR_EACH_IMM_USE_ON_STMT (use_p, iter) SET_USE (use_p, use); } else replace_uses_by (def, use); remove_phi_node (&psi, true); } }
static void remove_duplicate_close_phi (gphi *phi, gphi_iterator *gsi) { gimple *use_stmt; use_operand_p use_p; imm_use_iterator imm_iter; tree res = gimple_phi_result (phi); tree def = gimple_phi_result (gsi->phi ()); gcc_assert (same_close_phi_node (phi, gsi->phi ())); FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def) { FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) SET_USE (use_p, res); update_stmt (use_stmt); /* It is possible that we just created a duplicate close-phi for an already-processed containing loop. Check for this case and clean it up. */ if (gimple_code (use_stmt) == GIMPLE_PHI && gimple_phi_num_args (use_stmt) == 1) make_close_phi_nodes_unique (gimple_bb (use_stmt)); }
static void remove_phi_arg_num (gimple phi, int i) { int num_elem = gimple_phi_num_args (phi); gcc_assert (i < num_elem); /* Delink the item which is being removed. */ delink_imm_use (gimple_phi_arg_imm_use_ptr (phi, i)); /* If it is not the last element, move the last element to the element we want to delete, resetting all the links. */ if (i != num_elem - 1) { use_operand_p old_p, new_p; old_p = gimple_phi_arg_imm_use_ptr (phi, num_elem - 1); new_p = gimple_phi_arg_imm_use_ptr (phi, i); /* Set use on new node, and link into last element's place. */ *(new_p->use) = *(old_p->use); relink_imm_use (new_p, old_p); } /* Shrink the vector and return. Note that we do not have to clear PHI_ARG_DEF because the garbage collector will not look at those elements beyond the first PHI_NUM_ARGS elements of the array. */ phi->gimple_phi.nargs--; }
static gimple resize_phi_node (gimple phi, size_t len) { size_t old_size, i; gimple new_phi; gcc_assert (len > gimple_phi_capacity (phi)); /* The garbage collector will not look at the PHI node beyond the first PHI_NUM_ARGS elements. Therefore, all we have to copy is a portion of the PHI node currently in use. */ old_size = sizeof (struct gimple_statement_phi) + (gimple_phi_num_args (phi) - 1) * sizeof (struct phi_arg_d); new_phi = allocate_phi_node (len); memcpy (new_phi, phi, old_size); for (i = 0; i < gimple_phi_num_args (new_phi); i++) { use_operand_p imm, old_imm; imm = gimple_phi_arg_imm_use_ptr (new_phi, i); old_imm = gimple_phi_arg_imm_use_ptr (phi, i); imm->use = gimple_phi_arg_def_ptr (new_phi, i); relink_imm_use_stmt (imm, old_imm, new_phi); } new_phi->gimple_phi.capacity = len; for (i = gimple_phi_num_args (new_phi); i < len; i++) { use_operand_p imm; gimple_phi_arg_set_location (new_phi, i, UNKNOWN_LOCATION); imm = gimple_phi_arg_imm_use_ptr (new_phi, i); imm->use = gimple_phi_arg_def_ptr (new_phi, i); imm->prev = NULL; imm->next = NULL; imm->loc.stmt = new_phi; } return new_phi; }
static void output_phi (struct output_block *ob, gimple phi) { unsigned i, len = gimple_phi_num_args (phi); streamer_write_record_start (ob, lto_gimple_code_to_tag (GIMPLE_PHI)); streamer_write_uhwi (ob, SSA_NAME_VERSION (PHI_RESULT (phi))); for (i = 0; i < len; i++) { stream_write_tree (ob, gimple_phi_arg_def (phi, i), true); streamer_write_uhwi (ob, gimple_phi_arg_edge (phi, i)->src->index); lto_output_location (ob, gimple_phi_arg_location (phi, i)); } }
static basic_block find_bb_for_arg (gphi *phi, tree def) { size_t i; bool foundone = false; basic_block result = NULL; for (i = 0; i < gimple_phi_num_args (phi); i++) if (PHI_ARG_DEF (phi, i) == def) { if (foundone) return NULL; foundone = true; result = gimple_phi_arg_edge (phi, i)->src; } return result; }
static bool check_final_bb (void) { gimple_stmt_iterator gsi; info.phi_count = 0; for (gsi = gsi_start_phis (info.final_bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple phi = gsi_stmt (gsi); unsigned int i; info.phi_count++; for (i = 0; i < gimple_phi_num_args (phi); i++) { basic_block bb = gimple_phi_arg_edge (phi, i)->src; if (bb == info.switch_bb || (single_pred_p (bb) && single_pred (bb) == info.switch_bb)) { tree reloc, val; val = gimple_phi_arg_def (phi, i); if (!is_gimple_ip_invariant (val)) { info.reason = " Non-invariant value from a case\n"; return false; /* Non-invariant argument. */ } reloc = initializer_constant_valid_p (val, TREE_TYPE (val)); if ((flag_pic && reloc != null_pointer_node) || (!flag_pic && reloc == NULL_TREE)) { if (reloc) info.reason = " Value from a case would need runtime relocations\n"; else info.reason = " Value from a case is not a valid initializer\n"; return false; } } } } return true; }
static bool afdo_set_bb_count (basic_block bb, const stmt_set &promoted) { gimple_stmt_iterator gsi; edge e; edge_iterator ei; gcov_type max_count = 0; bool has_annotated = false; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { count_info info; gimple *stmt = gsi_stmt (gsi); if (gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; if (afdo_source_profile->get_count_info (stmt, &info)) { if (info.count > max_count) max_count = info.count; has_annotated = true; if (info.targets.size () > 0 && promoted.find (stmt) == promoted.end ()) afdo_vpt (&gsi, info.targets, false); } } if (!has_annotated) return false; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) afdo_source_profile->mark_annotated (gimple_location (gsi_stmt (gsi))); for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); gsi_next (&gpi)) { gphi *phi = gpi.phi (); size_t i; for (i = 0; i < gimple_phi_num_args (phi); i++) afdo_source_profile->mark_annotated (gimple_phi_arg_location (phi, i)); } FOR_EACH_EDGE (e, ei, bb->succs) afdo_source_profile->mark_annotated (e->goto_locus); bb->count = profile_count::from_gcov_type (max_count).afdo (); return true; }
void release_phi_node (gimple phi) { size_t bucket; size_t len = gimple_phi_capacity (phi); size_t x; for (x = 0; x < gimple_phi_num_args (phi); x++) { use_operand_p imm; imm = gimple_phi_arg_imm_use_ptr (phi, x); delink_imm_use (imm); } bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len; bucket -= 2; VEC_safe_push (gimple, gc, free_phinodes[bucket], phi); free_phinode_count++; }
DEBUG_FUNCTION void verify_ssaname_freelists (struct function *fun) { if (!gimple_in_ssa_p (fun)) return; bitmap names_in_il = BITMAP_ALLOC (NULL); /* Walk the entire IL noting every SSA_NAME we see. */ basic_block bb; FOR_EACH_BB_FN (bb, fun) { tree t; /* First note the result and arguments of PHI nodes. */ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gphi *phi = gsi.phi (); t = gimple_phi_result (phi); bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t)); for (unsigned int i = 0; i < gimple_phi_num_args (phi); i++) { t = gimple_phi_arg_def (phi, i); if (TREE_CODE (t) == SSA_NAME) bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t)); } } /* Then note the operands of each statement. */ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { ssa_op_iter iter; gimple *stmt = gsi_stmt (gsi); FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS) bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t)); } }
static bool factor_out_conditional_conversion (edge e0, edge e1, gphi *phi, tree arg0, tree arg1) { gimple arg0_def_stmt = NULL, arg1_def_stmt = NULL, new_stmt; tree new_arg0 = NULL_TREE, new_arg1 = NULL_TREE; tree temp, result; gphi *newphi; gimple_stmt_iterator gsi, gsi_for_def; source_location locus = gimple_location (phi); enum tree_code convert_code; /* Handle only PHI statements with two arguments. TODO: If all other arguments to PHI are INTEGER_CST or if their defining statement have the same unary operation, we can handle more than two arguments too. */ if (gimple_phi_num_args (phi) != 2) return false; /* First canonicalize to simplify tests. */ if (TREE_CODE (arg0) != SSA_NAME) { std::swap (arg0, arg1); std::swap (e0, e1); } if (TREE_CODE (arg0) != SSA_NAME || (TREE_CODE (arg1) != SSA_NAME && TREE_CODE (arg1) != INTEGER_CST)) return false; /* Check if arg0 is an SSA_NAME and the stmt which defines arg0 is a conversion. */ arg0_def_stmt = SSA_NAME_DEF_STMT (arg0); if (!is_gimple_assign (arg0_def_stmt) || !gimple_assign_cast_p (arg0_def_stmt)) return false; /* Use the RHS as new_arg0. */ convert_code = gimple_assign_rhs_code (arg0_def_stmt); new_arg0 = gimple_assign_rhs1 (arg0_def_stmt); if (convert_code == VIEW_CONVERT_EXPR) new_arg0 = TREE_OPERAND (new_arg0, 0); if (TREE_CODE (arg1) == SSA_NAME) { /* Check if arg1 is an SSA_NAME and the stmt which defines arg1 is a conversion. */ arg1_def_stmt = SSA_NAME_DEF_STMT (arg1); if (!is_gimple_assign (arg1_def_stmt) || gimple_assign_rhs_code (arg1_def_stmt) != convert_code) return false; /* Use the RHS as new_arg1. */ new_arg1 = gimple_assign_rhs1 (arg1_def_stmt); if (convert_code == VIEW_CONVERT_EXPR) new_arg1 = TREE_OPERAND (new_arg1, 0); } else { /* If arg1 is an INTEGER_CST, fold it to new type. */ if (INTEGRAL_TYPE_P (TREE_TYPE (new_arg0)) && int_fits_type_p (arg1, TREE_TYPE (new_arg0))) { if (gimple_assign_cast_p (arg0_def_stmt)) new_arg1 = fold_convert (TREE_TYPE (new_arg0), arg1); else return false; } else return false; } /* If arg0/arg1 have > 1 use, then this transformation actually increases the number of expressions evaluated at runtime. */ if (!has_single_use (arg0) || (arg1_def_stmt && !has_single_use (arg1))) return false; /* If types of new_arg0 and new_arg1 are different bailout. */ if (!types_compatible_p (TREE_TYPE (new_arg0), TREE_TYPE (new_arg1))) return false; /* Create a new PHI stmt. */ result = PHI_RESULT (phi); temp = make_ssa_name (TREE_TYPE (new_arg0), NULL); newphi = create_phi_node (temp, gimple_bb (phi)); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "PHI "); print_generic_expr (dump_file, gimple_phi_result (phi), 0); fprintf (dump_file, " changed to factor conversion out from COND_EXPR.\n"); fprintf (dump_file, "New stmt with CAST that defines "); print_generic_expr (dump_file, result, 0); fprintf (dump_file, ".\n"); } /* Remove the old cast(s) that has single use. */ gsi_for_def = gsi_for_stmt (arg0_def_stmt); gsi_remove (&gsi_for_def, true); if (arg1_def_stmt) { gsi_for_def = gsi_for_s
static unsigned int rename_ssa_copies (void) { var_map map; basic_block bb; gimple_stmt_iterator gsi; tree var, part_var; gimple stmt, phi; unsigned x; FILE *debug; bool updated = false; memset (&stats, 0, sizeof (stats)); if (dump_file && (dump_flags & TDF_DETAILS)) debug = dump_file; else debug = NULL; map = init_var_map (num_ssa_names); FOR_EACH_BB (bb) { /* Scan for real copies. */ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { stmt = gsi_stmt (gsi); if (gimple_assign_ssa_name_copy_p (stmt)) { tree lhs = gimple_assign_lhs (stmt); tree rhs = gimple_assign_rhs1 (stmt); updated |= copy_rename_partition_coalesce (map, lhs, rhs, debug); } } } FOR_EACH_BB (bb) { /* Treat PHI nodes as copies between the result and each argument. */ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { size_t i; tree res; phi = gsi_stmt (gsi); res = gimple_phi_result (phi); /* Do not process virtual SSA_NAMES. */ if (virtual_operand_p (res)) continue; /* Make sure to only use the same partition for an argument as the result but never the other way around. */ if (SSA_NAME_VAR (res) && !DECL_IGNORED_P (SSA_NAME_VAR (res))) for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = PHI_ARG_DEF (phi, i); if (TREE_CODE (arg) == SSA_NAME) updated |= copy_rename_partition_coalesce (map, res, arg, debug); } /* Else if all arguments are in the same partition try to merge it with the result. */ else { int all_p_same = -1; int p = -1; for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = PHI_ARG_DEF (phi, i); if (TREE_CODE (arg) != SSA_NAME) { all_p_same = 0; break; } else if (all_p_same == -1) { p = partition_find (map->var_partition, SSA_NAME_VERSION (arg)); all_p_same = 1; } else if (all_p_same == 1 && p != partition_find (map->var_partition, SSA_NAME_VERSION (arg))) { all_p_same = 0; break; } } if (all_p_same == 1) updated |= copy_rename_partition_coalesce (map, res, PHI_ARG_DEF (phi, 0), debug); } } } if (debug) dump_var_map (debug, map); /* Now one more pass to make all elements of a partition share the same root variable. */ for (x = 1; x < num_ssa_names; x++) { part_var = partition_to_var (map, x); if (!part_var) continue; var = ssa_name (x); if (SSA_NAME_VAR (var) == SSA_NAME_VAR (part_var)) continue; if (debug) { fprintf (debug, "Coalesced "); print_generic_expr (debug, var, TDF_SLIM); fprintf (debug, " to "); print_generic_expr (debug, part_var, TDF_SLIM); fprintf (debug, "\n"); } stats.coalesced++; replace_ssa_name_symbol (var, SSA_NAME_VAR (part_var)); } statistics_counter_event (cfun, "copies coalesced", stats.coalesced); delete_var_map (map); return updated ? TODO_remove_unused_locals : 0; }
static enum ssa_prop_result copy_prop_visit_phi_node (gimple phi) { enum ssa_prop_result retval; unsigned i; prop_value_t phi_val = { NULL_TREE }; tree lhs = gimple_phi_result (phi); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\nVisiting PHI node: "); print_gimple_stmt (dump_file, phi, 0, dump_flags); } for (i = 0; i < gimple_phi_num_args (phi); i++) { prop_value_t *arg_val; tree arg_value; tree arg = gimple_phi_arg_def (phi, i); edge e = gimple_phi_arg_edge (phi, i); /* We don't care about values flowing through non-executable edges. */ if (!(e->flags & EDGE_EXECUTABLE)) continue; /* Names that flow through abnormal edges cannot be used to derive copies. */ if (TREE_CODE (arg) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg)) { phi_val.value = lhs; break; } if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\tArgument #%d: ", i); dump_copy_of (dump_file, arg); fprintf (dump_file, "\n"); } if (TREE_CODE (arg) == SSA_NAME) { arg_val = get_copy_of_val (arg); /* If we didn't visit the definition of arg yet treat it as UNDEFINED. This also handles PHI arguments that are the same as lhs. We'll come here again. */ if (!arg_val->value) continue; arg_value = arg_val->value; } else arg_value = valueize_val (arg); /* Avoid copy propagation from an inner into an outer loop. Otherwise, this may move loop variant variables outside of their loops and prevent coalescing opportunities. If the value was loop invariant, it will be hoisted by LICM and exposed for copy propagation. ??? The value will be always loop invariant. In loop-closed SSA form do not copy-propagate through PHI nodes in blocks with a loop exit edge predecessor. */ if (current_loops && TREE_CODE (arg_value) == SSA_NAME && (loop_depth_of_name (arg_value) > loop_depth_of_name (lhs) || (loops_state_satisfies_p (LOOP_CLOSED_SSA) && loop_exit_edge_p (e->src->loop_father, e)))) { phi_val.value = lhs; break; } /* If the LHS didn't have a value yet, make it a copy of the first argument we find. */ if (phi_val.value == NULL_TREE) { phi_val.value = arg_value; continue; } /* If PHI_VAL and ARG don't have a common copy-of chain, then this PHI node cannot be a copy operation. */ if (phi_val.value != arg_value && !operand_equal_p (phi_val.value, arg_value, 0)) { phi_val.value = lhs; break; } } if (phi_val.value && may_propagate_copy (lhs, phi_val.value) && set_copy_of_val (lhs, phi_val.value)) retval = (phi_val.value != lhs) ? SSA_PROP_INTERESTING : SSA_PROP_VARYING; else retval = SSA_PROP_NOT_INTERESTING; if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "PHI node "); dump_copy_of (dump_file, lhs); fprintf (dump_file, "\nTelling the propagator to "); if (retval == SSA_PROP_INTERESTING) fprintf (dump_file, "add SSA edges out of this PHI and continue."); else if (retval == SSA_PROP_VARYING) fprintf (dump_file, "add SSA edges out of this PHI and never visit again."); else fprintf (dump_file, "do nothing with SSA edges and keep iterating."); fprintf (dump_file, "\n\n"); } return retval; }
static void init_copy_prop (void) { basic_block bb; copy_of = XCNEWVEC (prop_value_t, num_ssa_names); cached_last_copy_of = XCNEWVEC (tree, num_ssa_names); FOR_EACH_BB (bb) { gimple_stmt_iterator si; int depth = bb->loop_depth; for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) { gimple stmt = gsi_stmt (si); ssa_op_iter iter; tree def; /* The only statements that we care about are those that may generate useful copies. We also need to mark conditional jumps so that their outgoing edges are added to the work lists of the propagator. Avoid copy propagation from an inner into an outer loop. Otherwise, this may move loop variant variables outside of their loops and prevent coalescing opportunities. If the value was loop invariant, it will be hoisted by LICM and exposed for copy propagation. */ if (stmt_ends_bb_p (stmt)) prop_set_simulate_again (stmt, true); else if (stmt_may_generate_copy (stmt) /* Since we are iterating over the statements in BB, not the phi nodes, STMT will always be an assignment. */ && loop_depth_of_name (gimple_assign_rhs1 (stmt)) <= depth) prop_set_simulate_again (stmt, true); else prop_set_simulate_again (stmt, false); /* Mark all the outputs of this statement as not being the copy of anything. */ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) if (!prop_simulate_again_p (stmt)) set_copy_of_val (def, def); else cached_last_copy_of[SSA_NAME_VERSION (def)] = def; } for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) { gimple phi = gsi_stmt (si); tree def; def = gimple_phi_result (phi); if (!is_gimple_reg (def) /* In loop-closed SSA form do not copy-propagate through PHI nodes. Technically this is only needed for loop exit PHIs, but this is difficult to query. */ || (current_loops && gimple_phi_num_args (phi) == 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))) prop_set_simulate_again (phi, false); else prop_set_simulate_again (phi, true); if (!prop_simulate_again_p (phi)) set_copy_of_val (def, def); else cached_last_copy_of[SSA_NAME_VERSION (def)] = def; } } }
static enum ssa_prop_result copy_prop_visit_phi_node (gimple phi) { enum ssa_prop_result retval; unsigned i; prop_value_t phi_val = { 0, NULL_TREE }; tree lhs = gimple_phi_result (phi); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\nVisiting PHI node: "); print_gimple_stmt (dump_file, phi, 0, dump_flags); fprintf (dump_file, "\n\n"); } for (i = 0; i < gimple_phi_num_args (phi); i++) { prop_value_t *arg_val; tree arg = gimple_phi_arg_def (phi, i); edge e = gimple_phi_arg_edge (phi, i); /* We don't care about values flowing through non-executable edges. */ if (!(e->flags & EDGE_EXECUTABLE)) continue; /* Constants in the argument list never generate a useful copy. Similarly, names that flow through abnormal edges cannot be used to derive copies. */ if (TREE_CODE (arg) != SSA_NAME || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg)) { phi_val.value = lhs; break; } /* Avoid copy propagation from an inner into an outer loop. Otherwise, this may move loop variant variables outside of their loops and prevent coalescing opportunities. If the value was loop invariant, it will be hoisted by LICM and exposed for copy propagation. Not a problem for virtual operands though. */ if (is_gimple_reg (lhs) && loop_depth_of_name (arg) > loop_depth_of_name (lhs)) { phi_val.value = lhs; break; } /* If the LHS appears in the argument list, ignore it. It is irrelevant as a copy. */ if (arg == lhs || get_last_copy_of (arg) == lhs) continue; if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\tArgument #%d: ", i); dump_copy_of (dump_file, arg); fprintf (dump_file, "\n"); } arg_val = get_copy_of_val (arg); /* If the LHS didn't have a value yet, make it a copy of the first argument we find. Notice that while we make the LHS be a copy of the argument itself, we take the memory reference from the argument's value so that we can compare it to the memory reference of all the other arguments. */ if (phi_val.value == NULL_TREE) { phi_val.value = arg_val->value ? arg_val->value : arg; continue; } /* If PHI_VAL and ARG don't have a common copy-of chain, then this PHI node cannot be a copy operation. Also, if we are copy propagating stores and these two arguments came from different memory references, they cannot be considered copies. */ if (get_last_copy_of (phi_val.value) != get_last_copy_of (arg)) { phi_val.value = lhs; break; } } if (phi_val.value && may_propagate_copy (lhs, phi_val.value) && set_copy_of_val (lhs, phi_val.value)) retval = (phi_val.value != lhs) ? SSA_PROP_INTERESTING : SSA_PROP_VARYING; else retval = SSA_PROP_NOT_INTERESTING; if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\nPHI node "); dump_copy_of (dump_file, lhs); fprintf (dump_file, "\nTelling the propagator to "); if (retval == SSA_PROP_INTERESTING) fprintf (dump_file, "add SSA edges out of this PHI and continue."); else if (retval == SSA_PROP_VARYING) fprintf (dump_file, "add SSA edges out of this PHI and never visit again."); else fprintf (dump_file, "do nothing with SSA edges and keep iterating."); fprintf (dump_file, "\n\n"); } return retval; }
/* Look for PHI nodes which feed statements in the same block where the value of the PHI node implies the statement is erroneous. For example, a NULL PHI arg value which then feeds a pointer dereference. When found isolate and optimize the path associated with the PHI argument feeding the erroneous statement. */ static void find_implicit_erroneous_behaviour (void) { basic_block bb; FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator si; /* Out of an abundance of caution, do not isolate paths to a block where the block has any abnormal outgoing edges. We might be able to relax this in the future. We have to detect when we have to split the block with the NULL dereference and the trap we insert. We have to preserve abnormal edges out of the isolated block which in turn means updating PHIs at the targets of those abnormal outgoing edges. */ if (has_abnormal_or_eh_outgoing_edge_p (bb)) continue; /* First look for a PHI which sets a pointer to NULL and which is then dereferenced within BB. This is somewhat overly conservative, but probably catches most of the interesting cases. */ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) { gimple phi = gsi_stmt (si); tree lhs = gimple_phi_result (phi); /* If the result is not a pointer, then there is no need to examine the arguments. */ if (!POINTER_TYPE_P (TREE_TYPE (lhs))) continue; /* PHI produces a pointer result. See if any of the PHI's arguments are NULL. When we remove an edge, we want to reprocess the current index, hence the ugly way we update I for each iteration. */ basic_block duplicate = NULL; for (unsigned i = 0, next_i = 0; i < gimple_phi_num_args (phi); i = next_i) { tree op = gimple_phi_arg_def (phi, i); next_i = i + 1; if (!integer_zerop (op)) continue; edge e = gimple_phi_arg_edge (phi, i); imm_use_iterator iter; gimple use_stmt; /* We've got a NULL PHI argument. Now see if the PHI's result is dereferenced within BB. */ FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) { /* We only care about uses in BB. Catching cases in in other blocks would require more complex path isolation code. */ if (gimple_bb (use_stmt) != bb) continue; if (infer_nonnull_range (use_stmt, lhs, flag_isolate_erroneous_paths_dereference, flag_isolate_erroneous_paths_attribute)) { duplicate = isolate_path (bb, duplicate, e, use_stmt, lhs); /* When we remove an incoming edge, we need to reprocess the Ith element. */ next_i = i; cfg_altered = true; } } } } }
static unsigned int rename_ssa_copies (void) { var_map map; basic_block bb; gimple_stmt_iterator gsi; tree var, part_var; gimple stmt, phi; unsigned x; FILE *debug; bool updated = false; if (dump_file && (dump_flags & TDF_DETAILS)) debug = dump_file; else debug = NULL; map = init_var_map (num_ssa_names); FOR_EACH_BB (bb) { /* Scan for real copies. */ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { stmt = gsi_stmt (gsi); if (gimple_assign_ssa_name_copy_p (stmt)) { tree lhs = gimple_assign_lhs (stmt); tree rhs = gimple_assign_rhs1 (stmt); updated |= copy_rename_partition_coalesce (map, lhs, rhs, debug); } } } FOR_EACH_BB (bb) { /* Treat PHI nodes as copies between the result and each argument. */ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { size_t i; tree res; phi = gsi_stmt (gsi); res = gimple_phi_result (phi); /* Do not process virtual SSA_NAMES. */ if (!is_gimple_reg (SSA_NAME_VAR (res))) continue; for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = gimple_phi_arg (phi, i)->def; if (TREE_CODE (arg) == SSA_NAME) updated |= copy_rename_partition_coalesce (map, res, arg, debug); } } } if (debug) dump_var_map (debug, map); /* Now one more pass to make all elements of a partition share the same root variable. */ for (x = 1; x < num_ssa_names; x++) { part_var = partition_to_var (map, x); if (!part_var) continue; var = ssa_name (x); if (debug) { if (SSA_NAME_VAR (var) != SSA_NAME_VAR (part_var)) { fprintf (debug, "Coalesced "); print_generic_expr (debug, var, TDF_SLIM); fprintf (debug, " to "); print_generic_expr (debug, part_var, TDF_SLIM); fprintf (debug, "\n"); } } replace_ssa_name_symbol (var, SSA_NAME_VAR (part_var)); } delete_var_map (map); return updated ? TODO_remove_unused_locals : 0; }
expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs) { enum gimple_code code = gimple_code (stmt); struct hashable_expr *expr = this->expr (); if (code == GIMPLE_ASSIGN) { enum tree_code subcode = gimple_assign_rhs_code (stmt); switch (get_gimple_rhs_class (subcode)) { case GIMPLE_SINGLE_RHS: expr->kind = EXPR_SINGLE; expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt)); expr->ops.single.rhs = gimple_assign_rhs1 (stmt); break; case GIMPLE_UNARY_RHS: expr->kind = EXPR_UNARY; expr->type = TREE_TYPE (gimple_assign_lhs (stmt)); if (CONVERT_EXPR_CODE_P (subcode)) subcode = NOP_EXPR; expr->ops.unary.op = subcode; expr->ops.unary.opnd = gimple_assign_rhs1 (stmt); break; case GIMPLE_BINARY_RHS: expr->kind = EXPR_BINARY; expr->type = TREE_TYPE (gimple_assign_lhs (stmt)); expr->ops.binary.op = subcode; expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt); expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt); break; case GIMPLE_TERNARY_RHS: expr->kind = EXPR_TERNARY; expr->type = TREE_TYPE (gimple_assign_lhs (stmt)); expr->ops.ternary.op = subcode; expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt); expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt); expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt); break; default: gcc_unreachable (); } } else if (code == GIMPLE_COND) { expr->type = boolean_type_node; expr->kind = EXPR_BINARY; expr->ops.binary.op = gimple_cond_code (stmt); expr->ops.binary.opnd0 = gimple_cond_lhs (stmt); expr->ops.binary.opnd1 = gimple_cond_rhs (stmt); } else if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) { size_t nargs = gimple_call_num_args (call_stmt); size_t i; gcc_assert (gimple_call_lhs (call_stmt)); expr->type = TREE_TYPE (gimple_call_lhs (call_stmt)); expr->kind = EXPR_CALL; expr->ops.call.fn_from = call_stmt; if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE)) expr->ops.call.pure = true; else expr->ops.call.pure = false; expr->ops.call.nargs = nargs; expr->ops.call.args = XCNEWVEC (tree, nargs); for (i = 0; i < nargs; i++) expr->ops.call.args[i] = gimple_call_arg (call_stmt, i); } else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt)) { expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt)); expr->kind = EXPR_SINGLE; expr->ops.single.rhs = gimple_switch_index (swtch_stmt); } else if (code == GIMPLE_GOTO) { expr->type = TREE_TYPE (gimple_goto_dest (stmt)); expr->kind = EXPR_SINGLE; expr->ops.single.rhs = gimple_goto_dest (stmt); } else if (code == GIMPLE_PHI) { size_t nargs = gimple_phi_num_args (stmt); size_t i; expr->type = TREE_TYPE (gimple_phi_result (stmt)); expr->kind = EXPR_PHI; expr->ops.phi.nargs = nargs; expr->ops.phi.args = XCNEWVEC (tree, nargs); for (i = 0; i < nargs; i++) expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i); } else gcc_unreachable (); m_lhs = orig_lhs; m_vop = gimple_vuse (stmt); m_hash = avail_expr_hash (this); m_stamp = this; }