bool may_propagate_copy (tree dest, tree orig) { tree type_d = TREE_TYPE (dest); tree type_o = TREE_TYPE (orig); /* If ORIG flows in from an abnormal edge, it cannot be propagated. */ if (TREE_CODE (orig) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)) return false; /* If DEST is an SSA_NAME that flows from an abnormal edge, then it cannot be replaced. */ if (TREE_CODE (dest) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest)) return false; /* Do not copy between types for which we *do* need a conversion. */ if (!useless_type_conversion_p (type_d, type_o)) return false; /* Propagating virtual operands is always ok. */ if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest)) { /* But only between virtual operands. */ gcc_assert (TREE_CODE (orig) == SSA_NAME && virtual_operand_p (orig)); return true; } /* Anything else is OK. */ return true; }
bool may_propagate_copy (tree dest, tree orig) { tree type_d = TREE_TYPE (dest); tree type_o = TREE_TYPE (orig); /* If ORIG flows in from an abnormal edge, it cannot be propagated. */ if (TREE_CODE (orig) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig) /* If it is the default definition and an automatic variable then we can though and it is important that we do to avoid uninitialized regular copies. */ && !(SSA_NAME_IS_DEFAULT_DEF (orig) && (SSA_NAME_VAR (orig) == NULL_TREE || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))) return false; /* If DEST is an SSA_NAME that flows from an abnormal edge, then it cannot be replaced. */ if (TREE_CODE (dest) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest)) return false; /* Do not copy between types for which we *do* need a conversion. */ if (!useless_type_conversion_p (type_d, type_o)) return false; /* Generally propagating virtual operands is not ok as that may create overlapping life-ranges. */ if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest)) return false; /* Anything else is OK. */ return true; }
bool may_propagate_copy (tree dest, tree orig) { tree type_d = TREE_TYPE (dest); tree type_o = TREE_TYPE (orig); /* If ORIG flows in from an abnormal edge, it cannot be propagated. */ if (TREE_CODE (orig) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)) return false; /* If DEST is an SSA_NAME that flows from an abnormal edge, then it cannot be replaced. */ if (TREE_CODE (dest) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest)) return false; /* Do not copy between types for which we *do* need a conversion. */ if (!useless_type_conversion_p (type_d, type_o)) return false; /* Generally propagating virtual operands is not ok as that may create overlapping life-ranges. */ if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest)) return false; /* Anything else is OK. */ return true; }
void output_bb (struct output_block *ob, basic_block bb, struct function *fn) { gimple_stmt_iterator bsi = gsi_start_bb (bb); streamer_write_record_start (ob, (!gsi_end_p (bsi)) || phi_nodes (bb) ? LTO_bb1 : LTO_bb0); streamer_write_uhwi (ob, bb->index); streamer_write_gcov_count (ob, bb->count); streamer_write_hwi (ob, bb->frequency); streamer_write_hwi (ob, bb->flags); if (!gsi_end_p (bsi) || phi_nodes (bb)) { /* Output the statements. The list of statements is terminated with a zero. */ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) { int region; gimple stmt = gsi_stmt (bsi); output_gimple_stmt (ob, stmt); /* Emit the EH region holding STMT. */ region = lookup_stmt_eh_lp_fn (fn, stmt); if (region != 0) { streamer_write_record_start (ob, LTO_eh_region); streamer_write_hwi (ob, region); } else streamer_write_record_start (ob, LTO_null); } streamer_write_record_start (ob, LTO_null); for (gphi_iterator psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi)) { gphi *phi = psi.phi (); /* Only emit PHIs for gimple registers. PHI nodes for .MEM will be filled in on reading when the SSA form is updated. */ if (!virtual_operand_p (gimple_phi_result (phi))) output_phi (ob, phi); } streamer_write_record_start (ob, LTO_null); } }
static void init_copy_prop (void) { basic_block bb; n_copy_of = num_ssa_names; copy_of = XCNEWVEC (prop_value_t, n_copy_of); FOR_EACH_BB_FN (bb, cfun) { for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) { gimple *stmt = gsi_stmt (si); ssa_op_iter iter; tree def; /* The only statements that we care about are those that may generate useful copies. We also need to mark conditional jumps so that their outgoing edges are added to the work lists of the propagator. */ if (stmt_ends_bb_p (stmt)) prop_set_simulate_again (stmt, true); else if (stmt_may_generate_copy (stmt)) prop_set_simulate_again (stmt, true); else prop_set_simulate_again (stmt, false); /* Mark all the outputs of this statement as not being the copy of anything. */ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) if (!prop_simulate_again_p (stmt)) set_copy_of_val (def, def); } for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) { gphi *phi = si.phi (); tree def; def = gimple_phi_result (phi); if (virtual_operand_p (def)) prop_set_simulate_again (phi, false); else prop_set_simulate_again (phi, true); if (!prop_simulate_again_p (phi)) set_copy_of_val (def, def); } } }
bool is_gimple_reg (tree t) { if (virtual_operand_p (t)) return false; if (TREE_CODE (t) == SSA_NAME) return true; if (!is_gimple_variable (t)) return false; if (!is_gimple_reg_type (TREE_TYPE (t))) return false; /* A volatile decl is not acceptable because we can't reuse it as needed. We need to copy it into a temp first. */ if (TREE_THIS_VOLATILE (t)) return false; /* We define "registers" as things that can be renamed as needed, which with our infrastructure does not apply to memory. */ if (needs_to_live_in_memory (t)) return false; /* Hard register variables are an interesting case. For those that are call-clobbered, we don't know where all the calls are, since we don't (want to) take into account which operations will turn into libcalls at the rtl level. For those that are call-saved, we don't currently model the fact that calls may in fact change global hard registers, nor do we examine ASM_CLOBBERS at the tree level, and so miss variable changes that might imply. All around, it seems safest to not do too much optimization with these at the tree level at all. We'll have to rely on the rtl optimizers to clean this up, as there we've got all the appropriate bits exposed. */ if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) return false; /* Complex and vector values must have been put into SSA-like form. That is, no assignments to the individual components. */ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) return DECL_GIMPLE_REG_P (t); return true; }
static unsigned int rename_ssa_copies (void) { var_map map; basic_block bb; gimple_stmt_iterator gsi; tree var, part_var; gimple stmt, phi; unsigned x; FILE *debug; bool updated = false; memset (&stats, 0, sizeof (stats)); if (dump_file && (dump_flags & TDF_DETAILS)) debug = dump_file; else debug = NULL; map = init_var_map (num_ssa_names); FOR_EACH_BB (bb) { /* Scan for real copies. */ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { stmt = gsi_stmt (gsi); if (gimple_assign_ssa_name_copy_p (stmt)) { tree lhs = gimple_assign_lhs (stmt); tree rhs = gimple_assign_rhs1 (stmt); updated |= copy_rename_partition_coalesce (map, lhs, rhs, debug); } } } FOR_EACH_BB (bb) { /* Treat PHI nodes as copies between the result and each argument. */ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { size_t i; tree res; phi = gsi_stmt (gsi); res = gimple_phi_result (phi); /* Do not process virtual SSA_NAMES. */ if (virtual_operand_p (res)) continue; /* Make sure to only use the same partition for an argument as the result but never the other way around. */ if (SSA_NAME_VAR (res) && !DECL_IGNORED_P (SSA_NAME_VAR (res))) for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = PHI_ARG_DEF (phi, i); if (TREE_CODE (arg) == SSA_NAME) updated |= copy_rename_partition_coalesce (map, res, arg, debug); } /* Else if all arguments are in the same partition try to merge it with the result. */ else { int all_p_same = -1; int p = -1; for (i = 0; i < gimple_phi_num_args (phi); i++) { tree arg = PHI_ARG_DEF (phi, i); if (TREE_CODE (arg) != SSA_NAME) { all_p_same = 0; break; } else if (all_p_same == -1) { p = partition_find (map->var_partition, SSA_NAME_VERSION (arg)); all_p_same = 1; } else if (all_p_same == 1 && p != partition_find (map->var_partition, SSA_NAME_VERSION (arg))) { all_p_same = 0; break; } } if (all_p_same == 1) updated |= copy_rename_partition_coalesce (map, res, PHI_ARG_DEF (phi, 0), debug); } } } if (debug) dump_var_map (debug, map); /* Now one more pass to make all elements of a partition share the same root variable. */ for (x = 1; x < num_ssa_names; x++) { part_var = partition_to_var (map, x); if (!part_var) continue; var = ssa_name (x); if (SSA_NAME_VAR (var) == SSA_NAME_VAR (part_var)) continue; if (debug) { fprintf (debug, "Coalesced "); print_generic_expr (debug, var, TDF_SLIM); fprintf (debug, " to "); print_generic_expr (debug, part_var, TDF_SLIM); fprintf (debug, "\n"); } stats.coalesced++; replace_ssa_name_symbol (var, SSA_NAME_VAR (part_var)); } statistics_counter_event (cfun, "copies coalesced", stats.coalesced); delete_var_map (map); return updated ? TODO_remove_unused_locals : 0; }
static void init_copy_prop (void) { basic_block bb; copy_of = XCNEWVEC (prop_value_t, num_ssa_names); FOR_EACH_BB (bb) { gimple_stmt_iterator si; int depth = bb_loop_depth (bb); for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) { gimple stmt = gsi_stmt (si); ssa_op_iter iter; tree def; /* The only statements that we care about are those that may generate useful copies. We also need to mark conditional jumps so that their outgoing edges are added to the work lists of the propagator. Avoid copy propagation from an inner into an outer loop. Otherwise, this may move loop variant variables outside of their loops and prevent coalescing opportunities. If the value was loop invariant, it will be hoisted by LICM and exposed for copy propagation. ??? This doesn't make sense. */ if (stmt_ends_bb_p (stmt)) prop_set_simulate_again (stmt, true); else if (stmt_may_generate_copy (stmt) /* Since we are iterating over the statements in BB, not the phi nodes, STMT will always be an assignment. */ && loop_depth_of_name (gimple_assign_rhs1 (stmt)) <= depth) prop_set_simulate_again (stmt, true); else prop_set_simulate_again (stmt, false); /* Mark all the outputs of this statement as not being the copy of anything. */ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) if (!prop_simulate_again_p (stmt)) set_copy_of_val (def, def); } for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) { gimple phi = gsi_stmt (si); tree def; def = gimple_phi_result (phi); if (virtual_operand_p (def)) prop_set_simulate_again (phi, false); else prop_set_simulate_again (phi, true); if (!prop_simulate_again_p (phi)) set_copy_of_val (def, def); } } }