static bool check_process_case (tree cs) { tree ldecl; basic_block label_bb, following_bb; edge e; ldecl = CASE_LABEL (cs); label_bb = label_to_block (ldecl); e = find_edge (info.switch_bb, label_bb); gcc_assert (e); if (CASE_LOW (cs) == NULL_TREE) { /* Default branch. */ info.default_prob = e->probability; info.default_count = e->count; } else info.other_count += e->count; if (!label_bb) { info.reason = " Bad case - cs BB label is NULL\n"; return false; } if (!single_pred_p (label_bb)) { if (info.final_bb && info.final_bb != label_bb) { info.reason = " Bad case - a non-final BB has two predecessors\n"; return false; /* sth complex going on in this branch */ } following_bb = label_bb; } else { if (!empty_block_p (label_bb)) { info.reason = " Bad case - a non-final BB not empty\n"; return false; } e = single_succ_edge (label_bb); following_bb = single_succ (label_bb); } if (!info.final_bb) info.final_bb = following_bb; else if (info.final_bb != following_bb) { info.reason = " Bad case - different final BB\n"; return false; /* the only successor is not common for all the branches */ } return true; }
static bool forwarder_block_to (basic_block bb, basic_block to_bb) { return empty_block_p (bb) && single_succ_p (bb) && single_succ (bb) == to_bb; }
static unsigned int tree_optimize_tail_calls_1 (bool opt_tailcalls) { edge e; bool phis_constructed = false; struct tailcall *tailcalls = NULL, *act, *next; bool changed = false; basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)); tree param; gimple *stmt; edge_iterator ei; if (!suitable_for_tail_opt_p ()) return 0; if (opt_tailcalls) opt_tailcalls = suitable_for_tail_call_opt_p (); FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) { /* Only traverse the normal exits, i.e. those that end with return statement. */ stmt = last_stmt (e->src); if (stmt && gimple_code (stmt) == GIMPLE_RETURN) find_tail_calls (e->src, &tailcalls); }
static void layout_superblocks (void) { basic_block end = single_succ (ENTRY_BLOCK_PTR); basic_block bb = end->next_bb; while (bb != EXIT_BLOCK_PTR) { edge_iterator ei; edge e, best = NULL; while (end->aux) end = end->aux; FOR_EACH_EDGE (e, ei, end->succs) if (e->dest != EXIT_BLOCK_PTR && e->dest != single_succ (ENTRY_BLOCK_PTR) && !e->dest->il.rtl->visited && (!best || EDGE_FREQUENCY (e) > EDGE_FREQUENCY (best))) best = e; if (best) { end->aux = best->dest; best->dest->il.rtl->visited = 1; } else for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb) { if (!bb->il.rtl->visited) { end->aux = bb; bb->il.rtl->visited = 1; break; } } } }
void gimple_gen_ic_func_profiler (void) { struct cgraph_node * c_node = cgraph_get_node (current_function_decl); gimple_stmt_iterator gsi; gimple stmt1, stmt2; tree tree_uid, cur_func, counter_ptr, ptr_var, void0; if (cgraph_only_called_directly_p (c_node)) return; gimple_init_edge_profiler (); /* Insert code: stmt1: __gcov_indirect_call_profiler (__gcov_indirect_call_counters, current_function_funcdef_no, ¤t_function_decl, __gcov_indirect_call_callee); */ gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR)); cur_func = force_gimple_operand_gsi (&gsi, build_addr (current_function_decl, current_function_decl), true, NULL_TREE, true, GSI_SAME_STMT); counter_ptr = force_gimple_operand_gsi (&gsi, ic_gcov_type_ptr_var, true, NULL_TREE, true, GSI_SAME_STMT); ptr_var = force_gimple_operand_gsi (&gsi, ic_void_ptr_var, true, NULL_TREE, true, GSI_SAME_STMT); tree_uid = build_int_cst (gcov_type_node, current_function_funcdef_no); stmt1 = gimple_build_call (tree_indirect_call_profiler_fn, 4, counter_ptr, tree_uid, cur_func, ptr_var); gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT); /* Set __gcov_indirect_call_callee to 0, so that calls from other modules won't get misattributed to the last caller of the current callee. */ void0 = build_int_cst (build_pointer_type (void_type_node), 0); stmt2 = gimple_build_assign (ic_void_ptr_var, void0); gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT); }
static void eliminate_tail_call (struct tailcall *t) { tree param, rslt; gimple stmt, call; tree arg; size_t idx; basic_block bb, first; edge e; gimple phi; gimple_stmt_iterator gsi; gimple orig_stmt; stmt = orig_stmt = gsi_stmt (t->call_gsi); bb = gsi_bb (t->call_gsi); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Eliminated tail recursion in bb %d : ", bb->index); print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); fprintf (dump_file, "\n"); } gcc_assert (is_gimple_call (stmt)); first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)); /* Remove the code after call_gsi that will become unreachable. The possibly unreachable code in other blocks is removed later in cfg cleanup. */ gsi = t->call_gsi; gsi_next (&gsi); while (!gsi_end_p (gsi)) { gimple t = gsi_stmt (gsi); /* Do not remove the return statement, so that redirect_edge_and_branch sees how the block ends. */ if (gimple_code (t) == GIMPLE_RETURN) break; gsi_remove (&gsi, true); release_defs (t); } /* Number of executions of function has reduced by the tailcall. */ e = single_succ_edge (gsi_bb (t->call_gsi)); decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e)); decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e)); if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e)); /* Replace the call by a jump to the start of function. */ e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)), first); gcc_assert (e); PENDING_STMT (e) = NULL; /* Add phi node entries for arguments. The ordering of the phi nodes should be the same as the ordering of the arguments. */ for (param = DECL_ARGUMENTS (current_function_decl), idx = 0, gsi = gsi_start_phis (first); param; param = DECL_CHAIN (param), idx++) { if (!arg_needs_copy_p (param)) continue; arg = gimple_call_arg (stmt, idx); phi = gsi_stmt (gsi); gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi))); add_phi_arg (phi, arg, e, gimple_location (stmt)); gsi_next (&gsi); } /* Update the values of accumulators. */ adjust_accumulator_values (t->call_gsi, t->mult, t->add, e); call = gsi_stmt (t->call_gsi); rslt = gimple_call_lhs (call); if (rslt != NULL_TREE) { /* Result of the call will no longer be defined. So adjust the SSA_NAME_DEF_STMT accordingly. */ SSA_NAME_DEF_STMT (rslt) = gimple_build_nop (); } gsi_remove (&t->call_gsi, true); release_defs (call); }
static void find_tail_calls (basic_block bb, struct tailcall **ret) { tree ass_var = NULL_TREE, ret_var, func, param; gimple stmt, call = NULL; gimple_stmt_iterator gsi, agsi; bool tail_recursion; struct tailcall *nw; edge e; tree m, a; basic_block abb; size_t idx; tree var; if (!single_succ_p (bb)) return; for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { stmt = gsi_stmt (gsi); /* Ignore labels, returns, clobbers and debug stmts. */ if (gimple_code (stmt) == GIMPLE_LABEL || gimple_code (stmt) == GIMPLE_RETURN || gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; /* Check for a call. */ if (is_gimple_call (stmt)) { call = stmt; ass_var = gimple_call_lhs (stmt); break; } /* If the statement references memory or volatile operands, fail. */ if (gimple_references_memory_p (stmt) || gimple_has_volatile_ops (stmt)) return; } if (gsi_end_p (gsi)) { edge_iterator ei; /* Recurse to the predecessors. */ FOR_EACH_EDGE (e, ei, bb->preds) find_tail_calls (e->src, ret); return; } /* If the LHS of our call is not just a simple register, we can't transform this into a tail or sibling call. This situation happens, in (e.g.) "*p = foo()" where foo returns a struct. In this case we won't have a temporary here, but we need to carry out the side effect anyway, so tailcall is impossible. ??? In some situations (when the struct is returned in memory via invisible argument) we could deal with this, e.g. by passing 'p' itself as that argument to foo, but it's too early to do this here, and expand_call() will not handle it anyway. If it ever can, then we need to revisit this here, to allow that situation. */ if (ass_var && !is_gimple_reg (ass_var)) return; /* We found the call, check whether it is suitable. */ tail_recursion = false; func = gimple_call_fndecl (call); if (func && !DECL_BUILT_IN (func) && recursive_call_p (current_function_decl, func)) { tree arg; for (param = DECL_ARGUMENTS (func), idx = 0; param && idx < gimple_call_num_args (call); param = DECL_CHAIN (param), idx ++) { arg = gimple_call_arg (call, idx); if (param != arg) { /* Make sure there are no problems with copying. The parameter have a copyable type and the two arguments must have reasonably equivalent types. The latter requirement could be relaxed if we emitted a suitable type conversion statement. */ if (!is_gimple_reg_type (TREE_TYPE (param)) || !useless_type_conversion_p (TREE_TYPE (param), TREE_TYPE (arg))) break; /* The parameter should be a real operand, so that phi node created for it at the start of the function has the meaning of copying the value. This test implies is_gimple_reg_type from the previous condition, however this one could be relaxed by being more careful with copying the new value of the parameter (emitting appropriate GIMPLE_ASSIGN and updating the virtual operands). */ if (!is_gimple_reg (param)) break; } } if (idx == gimple_call_num_args (call) && !param) tail_recursion = true; } /* Make sure the tail invocation of this function does not refer to local variables. */ FOR_EACH_LOCAL_DECL (cfun, idx, var) { if (TREE_CODE (var) != PARM_DECL && auto_var_in_fn_p (var, cfun->decl) && (ref_maybe_used_by_stmt_p (call, var) || call_may_clobber_ref_p (call, var))) return; } /* Now check the statements after the call. None of them has virtual operands, so they may only depend on the call through its return value. The return value should also be dependent on each of them, since we are running after dce. */ m = NULL_TREE; a = NULL_TREE; abb = bb; agsi = gsi; while (1) { tree tmp_a = NULL_TREE; tree tmp_m = NULL_TREE; gsi_next (&agsi); while (gsi_end_p (agsi)) { ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); abb = single_succ (abb); agsi = gsi_start_bb (abb); } stmt = gsi_stmt (agsi); if (gimple_code (stmt) == GIMPLE_LABEL) continue; if (gimple_code (stmt) == GIMPLE_RETURN) break; if (gimple_clobber_p (stmt)) continue; if (is_gimple_debug (stmt)) continue; if (gimple_code (stmt) != GIMPLE_ASSIGN) return; /* This is a gimple assign. */ if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var)) return; if (tmp_a) { tree type = TREE_TYPE (tmp_a); if (a) a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a); else a = tmp_a; } if (tmp_m) { tree type = TREE_TYPE (tmp_m); if (m) m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m); else m = tmp_m; if (a) a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m); } } /* See if this is a tail call we can handle. */ ret_var = gimple_return_retval (stmt); /* We may proceed if there either is no return value, or the return value is identical to the call's return. */ if (ret_var && (ret_var != ass_var)) return; /* If this is not a tail recursive call, we cannot handle addends or multiplicands. */ if (!tail_recursion && (m || a)) return; /* For pointers only allow additions. */ if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) return; nw = XNEW (struct tailcall); nw->call_gsi = gsi; nw->tail_recursion = tail_recursion; nw->mult = m; nw->add = a; nw->next = *ret; *ret = nw; }
static tree independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi) { basic_block bb, call_bb, at_bb; edge e; edge_iterator ei; if (is_gimple_min_invariant (expr)) return expr; if (TREE_CODE (expr) != SSA_NAME) return NULL_TREE; /* Mark the blocks in the chain leading to the end. */ at_bb = gimple_bb (at); call_bb = gimple_bb (gsi_stmt (gsi)); for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) bb->aux = &bb->aux; bb->aux = &bb->aux; while (1) { at = SSA_NAME_DEF_STMT (expr); bb = gimple_bb (at); /* The default definition or defined before the chain. */ if (!bb || !bb->aux) break; if (bb == call_bb) { for (; !gsi_end_p (gsi); gsi_next (&gsi)) if (gsi_stmt (gsi) == at) break; if (!gsi_end_p (gsi)) expr = NULL_TREE; break; } if (gimple_code (at) != GIMPLE_PHI) { expr = NULL_TREE; break; } FOR_EACH_EDGE (e, ei, bb->preds) if (e->src->aux) break; gcc_assert (e); expr = PHI_ARG_DEF_FROM_EDGE (at, e); if (TREE_CODE (expr) != SSA_NAME) { /* The value is a constant. */ break; } } /* Unmark the blocks. */ for (bb = call_bb; bb != at_bb; bb = single_succ (bb)) bb->aux = NULL; bb->aux = NULL; return expr; }
static bool tree_if_conversion (struct loop *loop, bool for_vectorizer) { basic_block bb; block_stmt_iterator itr; unsigned int i; ifc_bbs = NULL; /* if-conversion is not appropriate for all loops. First, check if loop is if-convertible or not. */ if (!if_convertible_loop_p (loop, for_vectorizer)) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file,"-------------------------\n"); if (ifc_bbs) { free (ifc_bbs); ifc_bbs = NULL; } free_dominance_info (CDI_POST_DOMINATORS); return false; } /* Do actual work now. */ for (i = 0; i < loop->num_nodes; i++) { tree cond; bb = ifc_bbs [i]; /* Update condition using predicate list. */ cond = bb->aux; /* Process all statements in this basic block. Remove conditional expression, if any, and annotate destination basic block(s) appropriately. */ for (itr = bsi_start (bb); !bsi_end_p (itr); /* empty */) { tree t = bsi_stmt (itr); cond = tree_if_convert_stmt (loop, t, cond, &itr); if (!bsi_end_p (itr)) bsi_next (&itr); } /* If current bb has only one successor, then consider it as an unconditional goto. */ if (single_succ_p (bb)) { basic_block bb_n = single_succ (bb); if (cond != NULL_TREE) add_to_predicate_list (bb_n, cond); } } /* Now, all statements are if-converted and basic blocks are annotated appropriately. Combine all basic block into one huge basic block. */ combine_blocks (loop); /* clean up */ clean_predicate_lists (loop); free (ifc_bbs); ifc_bbs = NULL; return true; }
static bool check_process_case (tree cs, struct switch_conv_info *info) { tree ldecl; basic_block label_bb, following_bb; edge e; ldecl = CASE_LABEL (cs); label_bb = label_to_block (ldecl); e = find_edge (info->switch_bb, label_bb); gcc_assert (e); if (CASE_LOW (cs) == NULL_TREE) { /* Default branch. */ info->default_prob = e->probability; info->default_count = e->count; } else { int i; info->other_count += e->count; for (i = 0; i < 2; i++) if (info->bit_test_bb[i] == label_bb) break; else if (info->bit_test_bb[i] == NULL) { info->bit_test_bb[i] = label_bb; info->bit_test_uniq++; break; } if (i == 2) info->bit_test_uniq = 3; if (CASE_HIGH (cs) != NULL_TREE && ! tree_int_cst_equal (CASE_LOW (cs), CASE_HIGH (cs))) info->bit_test_count += 2; else info->bit_test_count++; } if (!label_bb) { info->reason = "bad case - cs BB label is NULL"; return false; } if (!single_pred_p (label_bb)) { if (info->final_bb && info->final_bb != label_bb) { info->reason = "bad case - a non-final BB has two predecessors"; return false; /* sth complex going on in this branch */ } following_bb = label_bb; } else { if (!empty_block_p (label_bb)) { info->reason = "bad case - a non-final BB not empty"; return false; } e = single_succ_edge (label_bb); following_bb = single_succ (label_bb); } if (!info->final_bb) info->final_bb = following_bb; else if (info->final_bb != following_bb) { info->reason = "bad case - different final BB"; return false; /* the only successor is not common for all the branches */ } return true; }