static bool stmt_may_generate_copy (gimple stmt) { if (gimple_code (stmt) == GIMPLE_PHI) return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt)); if (gimple_code (stmt) != GIMPLE_ASSIGN) return false; /* If the statement has volatile operands, it won't generate a useful copy. */ if (gimple_has_volatile_ops (stmt)) return false; /* Statements with loads and/or stores will never generate a useful copy. */ if (gimple_vuse (stmt)) return false; /* Otherwise, the only statements that generate useful copies are assignments whose RHS is just an SSA name that doesn't flow through abnormal edges. */ return ((gimple_assign_rhs_code (stmt) == SSA_NAME && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))) || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))); }
static bool bb_no_side_effects_p (basic_block bb) { gimple_stmt_iterator gsi; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); if (gimple_has_volatile_ops (stmt) || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) return false; } return true; }
static bool bb_no_side_effects_p (basic_block bb) { gimple_stmt_iterator gsi; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); if (gimple_has_volatile_ops (stmt) || gimple_vuse (stmt)) return false; } return true; }
static bool can_propagate_from (gimple def_stmt) { use_operand_p use_p; ssa_op_iter iter; gcc_assert (is_gimple_assign (def_stmt)); /* If the rhs has side-effects we cannot propagate from it. */ if (gimple_has_volatile_ops (def_stmt)) return false; /* If the rhs is a load we cannot propagate from it. */ if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration) return false; /* Constants can be always propagated. */ if (gimple_assign_single_p (def_stmt) && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) return true; /* We cannot propagate ssa names that occur in abnormal phi nodes. */ FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE) if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p))) return false; /* If the definition is a conversion of a pointer to a function type, then we can not apply optimizations as some targets require function pointers to be canonicalized and in this case this optimization could eliminate a necessary canonicalization. */ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) { tree rhs = gimple_assign_rhs1 (def_stmt); if (POINTER_TYPE_P (TREE_TYPE (rhs)) && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE) return false; } return true; }
/* Look into pointer pointed to by GSIP and figure out what interesting side effects it has. */ static void check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa) { gimple stmt = gsi_stmt (*gsip); if (is_gimple_debug (stmt)) return; if (dump_file) { fprintf (dump_file, " scanning: "); print_gimple_stmt (dump_file, stmt, 0, 0); } if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Volatile stmt is not const/pure\n"); } /* Look for loads and stores. */ walk_stmt_load_store_ops (stmt, local, ipa ? check_ipa_load : check_load, ipa ? check_ipa_store : check_store); if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (stmt)) { if (cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " can throw; looping\n"); local->looping = true; } if (stmt_can_throw_external (stmt)) { if (dump_file) fprintf (dump_file, " can throw externally\n"); local->can_throw = true; } else if (dump_file) fprintf (dump_file, " can throw\n"); } switch (gimple_code (stmt)) { case GIMPLE_CALL: check_call (local, stmt, ipa); break; case GIMPLE_LABEL: if (DECL_NONLOCAL (gimple_label_label (stmt))) /* Target of long jump. */ { if (dump_file) fprintf (dump_file, " nonlocal label is not const/pure\n"); local->pure_const_state = IPA_NEITHER; } break; case GIMPLE_ASM: if (gimple_asm_clobbers_memory_p (stmt)) { if (dump_file) fprintf (dump_file, " memory asm clobber is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; } if (gimple_asm_volatile_p (stmt)) { if (dump_file) fprintf (dump_file, " volatile is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; local->looping = true; } return; default: break; } }
static bool stmt_simple_for_scop_p (basic_block scop_entry, loop_p outermost_loop, gimple stmt, basic_block bb) { loop_p loop = bb->loop_father; gcc_assert (scop_entry); /* GIMPLE_ASM and GIMPLE_CALL may embed arbitrary side effects. Calls have side-effects, except those to const or pure functions. */ if (gimple_has_volatile_ops (stmt) || (gimple_code (stmt) == GIMPLE_CALL && !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) || (gimple_code (stmt) == GIMPLE_ASM)) return false; if (is_gimple_debug (stmt)) return true; if (!stmt_has_simple_data_refs_p (outermost_loop, stmt)) return false; switch (gimple_code (stmt)) { case GIMPLE_RETURN: case GIMPLE_LABEL: return true; case GIMPLE_COND: { /* We can handle all binary comparisons. Inequalities are also supported as they can be represented with union of polyhedra. */ enum tree_code code = gimple_cond_code (stmt); if (!(code == LT_EXPR || code == GT_EXPR || code == LE_EXPR || code == GE_EXPR || code == EQ_EXPR || code == NE_EXPR)) return false; for (unsigned i = 0; i < 2; ++i) { tree op = gimple_op (stmt, i); if (!graphite_can_represent_expr (scop_entry, loop, op) /* We can not handle REAL_TYPE. Failed for pr39260. */ || TREE_CODE (TREE_TYPE (op)) == REAL_TYPE) return false; } return true; } case GIMPLE_ASSIGN: case GIMPLE_CALL: return true; default: /* These nodes cut a new scope. */ return false; } return false; }
static void output_gimple_stmt (struct output_block *ob, gimple stmt) { unsigned i; enum gimple_code code; enum LTO_tags tag; struct bitpack_d bp; histogram_value hist; /* Emit identifying tag. */ code = gimple_code (stmt); tag = lto_gimple_code_to_tag (code); streamer_write_record_start (ob, tag); /* Emit the tuple header. */ bp = bitpack_create (ob->main_stream); bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt)); bp_pack_value (&bp, gimple_no_warning_p (stmt), 1); if (is_gimple_assign (stmt)) bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1); bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1); hist = gimple_histogram_value (cfun, stmt); bp_pack_value (&bp, hist != NULL, 1); bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode); /* Emit location information for the statement. */ stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt))); streamer_write_bitpack (&bp); /* Emit the lexical block holding STMT. */ stream_write_tree (ob, gimple_block (stmt), true); /* Emit the operands. */ switch (gimple_code (stmt)) { case GIMPLE_RESX: streamer_write_hwi (ob, gimple_resx_region (stmt)); break; case GIMPLE_EH_MUST_NOT_THROW: stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true); break; case GIMPLE_EH_DISPATCH: streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt)); break; case GIMPLE_ASM: streamer_write_uhwi (ob, gimple_asm_ninputs (stmt)); streamer_write_uhwi (ob, gimple_asm_noutputs (stmt)); streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt)); streamer_write_uhwi (ob, gimple_asm_nlabels (stmt)); streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt), true); /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < gimple_num_ops (stmt); i++) { tree op = gimple_op (stmt, i); tree *basep = NULL; /* Wrap all uses of non-automatic variables inside MEM_REFs so that we do not have to deal with type mismatches on merged symbols during IL read in. The first operand of GIMPLE_DEBUG must be a decl, not MEM_REF, though. */ if (op && (i || !is_gimple_debug (stmt))) { basep = &op; while (handled_component_p (*basep)) basep = &TREE_OPERAND (*basep, 0); if (TREE_CODE (*basep) == VAR_DECL && !auto_var_in_fn_p (*basep, current_function_decl) && !DECL_REGISTER (*basep)) { bool volatilep = TREE_THIS_VOLATILE (*basep); *basep = build2 (MEM_REF, TREE_TYPE (*basep), build_fold_addr_expr (*basep), build_int_cst (build_pointer_type (TREE_TYPE (*basep)), 0)); TREE_THIS_VOLATILE (*basep) = volatilep; } else basep = NULL; } stream_write_tree (ob, op, true); /* Restore the original base if we wrapped it inside a MEM_REF. */ if (basep) *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) streamer_write_enum (ob->main_stream, internal_fn, IFN_LAST, gimple_call_internal_fn (stmt)); else stream_write_tree (ob, gimple_call_fntype (stmt), true); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gcc_assert (gimple_transaction_body (stmt) == NULL); stream_write_tree (ob, gimple_transaction_label (stmt), true); break; default: gcc_unreachable (); } if (hist) stream_out_histogram_value (ob, hist); }
static void find_tail_calls (basic_block bb, struct tailcall **ret) { tree ass_var = NULL_TREE, ret_var, func, param; gimple stmt, call = NULL; gimple_stmt_iterator gsi, agsi; bool tail_recursion; struct tailcall *nw; edge e; tree m, a; basic_block abb; size_t idx; tree var; if (!single_succ_p (bb)) return; for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { stmt = gsi_stmt (gsi); /* Ignore labels, returns, clobbers and debug stmts. */ if (gimple_code (stmt) == GIMPLE_LABEL || gimple_code (stmt) == GIMPLE_RETURN || gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; /* Check for a call. */ if (is_gimple_call (stmt)) { call = stmt; ass_var = gimple_call_lhs (stmt); break; } /* If the statement references memory or volatile operands, fail. */ if (gimple_references_memory_p (stmt) || gimple_has_volatile_ops (stmt)) return; } if (gsi_end_p (gsi)) { edge_iterator ei; /* Recurse to the predecessors. */ FOR_EACH_EDGE (e, ei, bb->preds) find_tail_calls (e->src, ret); return; } /* If the LHS of our call is not just a simple register, we can't transform this into a tail or sibling call. This situation happens, in (e.g.) "*p = foo()" where foo returns a struct. In this case we won't have a temporary here, but we need to carry out the side effect anyway, so tailcall is impossible. ??? In some situations (when the struct is returned in memory via invisible argument) we could deal with this, e.g. by passing 'p' itself as that argument to foo, but it's too early to do this here, and expand_call() will not handle it anyway. If it ever can, then we need to revisit this here, to allow that situation. */ if (ass_var && !is_gimple_reg (ass_var)) return; /* We found the call, check whether it is suitable. */ tail_recursion = false; func = gimple_call_fndecl (call); if (func && !DECL_BUILT_IN (func) && recursive_call_p (current_function_decl, func)) { tree arg; for (param = DECL_ARGUMENTS (func), idx = 0; param && idx < gimple_call_num_args (call); param = DECL_CHAIN (param), idx ++) { arg = gimple_call_arg (call, idx); if (param != arg) { /* Make sure there are no problems with copying. The parameter have a copyable type and the two arguments must have reasonably equivalent types. The latter requirement could be relaxed if we emitted a suitable type conversion statement. */ if (!is_gimple_reg_type (TREE_TYPE (param)) || !useless_type_conversion_p (TREE_TYPE (param), TREE_TYPE (arg))) break; /* The parameter should be a real operand, so that phi node created for it at the start of the function has the meaning of copying the value. This test implies is_gimple_reg_type from the previous condition, however this one could be relaxed by being more careful with copying the new value of the parameter (emitting appropriate GIMPLE_ASSIGN and updating the virtual operands). */ if (!is_gimple_reg (param)) break; } } if (idx == gimple_call_num_args (call) && !param) tail_recursion = true; } /* Make sure the tail invocation of this function does not refer to local variables. */ FOR_EACH_LOCAL_DECL (cfun, idx, var) { if (TREE_CODE (var) != PARM_DECL && auto_var_in_fn_p (var, cfun->decl) && (ref_maybe_used_by_stmt_p (call, var) || call_may_clobber_ref_p (call, var))) return; } /* Now check the statements after the call. None of them has virtual operands, so they may only depend on the call through its return value. The return value should also be dependent on each of them, since we are running after dce. */ m = NULL_TREE; a = NULL_TREE; abb = bb; agsi = gsi; while (1) { tree tmp_a = NULL_TREE; tree tmp_m = NULL_TREE; gsi_next (&agsi); while (gsi_end_p (agsi)) { ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); abb = single_succ (abb); agsi = gsi_start_bb (abb); } stmt = gsi_stmt (agsi); if (gimple_code (stmt) == GIMPLE_LABEL) continue; if (gimple_code (stmt) == GIMPLE_RETURN) break; if (gimple_clobber_p (stmt)) continue; if (is_gimple_debug (stmt)) continue; if (gimple_code (stmt) != GIMPLE_ASSIGN) return; /* This is a gimple assign. */ if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var)) return; if (tmp_a) { tree type = TREE_TYPE (tmp_a); if (a) a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a); else a = tmp_a; } if (tmp_m) { tree type = TREE_TYPE (tmp_m); if (m) m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m); else m = tmp_m; if (a) a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m); } } /* See if this is a tail call we can handle. */ ret_var = gimple_return_retval (stmt); /* We may proceed if there either is no return value, or the return value is identical to the call's return. */ if (ret_var && (ret_var != ass_var)) return; /* If this is not a tail recursive call, we cannot handle addends or multiplicands. */ if (!tail_recursion && (m || a)) return; /* For pointers only allow additions. */ if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) return; nw = XNEW (struct tailcall); nw->call_gsi = gsi; nw->tail_recursion = tail_recursion; nw->mult = m; nw->add = a; nw->next = *ret; *ret = nw; }
static inline bool is_replaceable_p (gimple stmt) { use_operand_p use_p; tree def; gimple use_stmt; location_t locus1, locus2; tree block1, block2; /* Only consider modify stmts. */ if (!is_gimple_assign (stmt)) return false; /* If the statement may throw an exception, it cannot be replaced. */ if (stmt_could_throw_p (stmt)) return false; /* Punt if there is more than 1 def. */ def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF); if (!def) return false; /* Only consider definitions which have a single use. */ if (!single_imm_use (def, &use_p, &use_stmt)) return false; /* If the use isn't in this block, it wont be replaced either. */ if (gimple_bb (use_stmt) != gimple_bb (stmt)) return false; locus1 = gimple_location (stmt); block1 = gimple_block (stmt); if (gimple_code (use_stmt) == GIMPLE_PHI) { locus2 = 0; block2 = NULL_TREE; } else { locus2 = gimple_location (use_stmt); block2 = gimple_block (use_stmt); } if (!optimize && ((locus1 && locus1 != locus2) || (block1 && block1 != block2))) return false; /* Used in this block, but at the TOP of the block, not the end. */ if (gimple_code (use_stmt) == GIMPLE_PHI) return false; /* There must be no VDEFs. */ if (!(ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))) return false; /* Without alias info we can't move around loads. */ if (gimple_references_memory_p (stmt) && !optimize) return false; /* Float expressions must go through memory if float-store is on. */ if (flag_float_store && FLOAT_TYPE_P (gimple_expr_type (stmt))) return false; /* An assignment with a register variable on the RHS is not replaceable. */ if (gimple_assign_rhs_code (stmt) == VAR_DECL && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))) return false; /* No function calls can be replaced. */ if (is_gimple_call (stmt)) return false; /* Leave any stmt with volatile operands alone as well. */ if (gimple_has_volatile_ops (stmt)) return false; return true; }