/* Look into pointer pointed to by GSIP and figure out what interesting side effects it has. */ static void check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa) { gimple stmt = gsi_stmt (*gsip); if (is_gimple_debug (stmt)) return; if (dump_file) { fprintf (dump_file, " scanning: "); print_gimple_stmt (dump_file, stmt, 0, 0); } if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Volatile stmt is not const/pure\n"); } /* Look for loads and stores. */ walk_stmt_load_store_ops (stmt, local, ipa ? check_ipa_load : check_load, ipa ? check_ipa_store : check_store); if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (stmt)) { if (cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " can throw; looping\n"); local->looping = true; } if (stmt_can_throw_external (stmt)) { if (dump_file) fprintf (dump_file, " can throw externally\n"); local->can_throw = true; } else if (dump_file) fprintf (dump_file, " can throw\n"); } switch (gimple_code (stmt)) { case GIMPLE_CALL: check_call (local, stmt, ipa); break; case GIMPLE_LABEL: if (DECL_NONLOCAL (gimple_label_label (stmt))) /* Target of long jump. */ { if (dump_file) fprintf (dump_file, " nonlocal label is not const/pure\n"); local->pure_const_state = IPA_NEITHER; } break; case GIMPLE_ASM: if (gimple_asm_clobbers_memory_p (stmt)) { if (dump_file) fprintf (dump_file, " memory asm clobber is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; } if (gimple_asm_volatile_p (stmt)) { if (dump_file) fprintf (dump_file, " volatile is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; local->looping = true; } return; default: break; } }
static bool hashable_expr_equal_p (const struct hashable_expr *expr0, const struct hashable_expr *expr1) { tree type0 = expr0->type; tree type1 = expr1->type; /* If either type is NULL, there is nothing to check. */ if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE)) return false; /* If both types don't have the same signedness, precision, and mode, then we can't consider them equal. */ if (type0 != type1 && (TREE_CODE (type0) == ERROR_MARK || TREE_CODE (type1) == ERROR_MARK || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1) || TYPE_PRECISION (type0) != TYPE_PRECISION (type1) || TYPE_MODE (type0) != TYPE_MODE (type1))) return false; if (expr0->kind != expr1->kind) return false; switch (expr0->kind) { case EXPR_SINGLE: return operand_equal_p (expr0->ops.single.rhs, expr1->ops.single.rhs, 0); case EXPR_UNARY: if (expr0->ops.unary.op != expr1->ops.unary.op) return false; if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op) || expr0->ops.unary.op == NON_LVALUE_EXPR) && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type)) return false; return operand_equal_p (expr0->ops.unary.opnd, expr1->ops.unary.opnd, 0); case EXPR_BINARY: if (expr0->ops.binary.op != expr1->ops.binary.op) return false; if (operand_equal_p (expr0->ops.binary.opnd0, expr1->ops.binary.opnd0, 0) && operand_equal_p (expr0->ops.binary.opnd1, expr1->ops.binary.opnd1, 0)) return true; /* For commutative ops, allow the other order. */ return (commutative_tree_code (expr0->ops.binary.op) && operand_equal_p (expr0->ops.binary.opnd0, expr1->ops.binary.opnd1, 0) && operand_equal_p (expr0->ops.binary.opnd1, expr1->ops.binary.opnd0, 0)); case EXPR_TERNARY: if (expr0->ops.ternary.op != expr1->ops.ternary.op || !operand_equal_p (expr0->ops.ternary.opnd2, expr1->ops.ternary.opnd2, 0)) return false; if (operand_equal_p (expr0->ops.ternary.opnd0, expr1->ops.ternary.opnd0, 0) && operand_equal_p (expr0->ops.ternary.opnd1, expr1->ops.ternary.opnd1, 0)) return true; /* For commutative ops, allow the other order. */ return (commutative_ternary_tree_code (expr0->ops.ternary.op) && operand_equal_p (expr0->ops.ternary.opnd0, expr1->ops.ternary.opnd1, 0) && operand_equal_p (expr0->ops.ternary.opnd1, expr1->ops.ternary.opnd0, 0)); case EXPR_CALL: { size_t i; /* If the calls are to different functions, then they clearly cannot be equal. */ if (!gimple_call_same_target_p (expr0->ops.call.fn_from, expr1->ops.call.fn_from)) return false; if (! expr0->ops.call.pure) return false; if (expr0->ops.call.nargs != expr1->ops.call.nargs) return false; for (i = 0; i < expr0->ops.call.nargs; i++) if (! operand_equal_p (expr0->ops.call.args[i], expr1->ops.call.args[i], 0)) return false; if (stmt_could_throw_p (expr0->ops.call.fn_from)) { int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from); int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from); if ((lp0 > 0 || lp1 > 0) && lp0 != lp1) return false; } return true; } case EXPR_PHI: { size_t i; if (expr0->ops.phi.nargs != expr1->ops.phi.nargs) return false; for (i = 0; i < expr0->ops.phi.nargs; i++) if (! operand_equal_p (expr0->ops.phi.args[i], expr1->ops.phi.args[i], 0)) return false; return true; } default: gcc_unreachable (); } }
static void check_call (funct_state local, gimple call, bool ipa) { int flags = gimple_call_flags (call); tree callee_t = gimple_call_fndecl (call); bool possibly_throws = stmt_could_throw_p (call); bool possibly_throws_externally = (possibly_throws && stmt_can_throw_external (call)); if (possibly_throws) { unsigned int i; for (i = 0; i < gimple_num_ops (call); i++) if (gimple_op (call, i) && tree_could_throw_p (gimple_op (call, i))) { if (possibly_throws && cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " operand can throw; looping\n"); local->looping = true; } if (possibly_throws_externally) { if (dump_file) fprintf (dump_file, " operand can throw externally\n"); local->can_throw = true; } } } /* The const and pure flags are set by a variety of places in the compiler (including here). If someone has already set the flags for the callee, (such as for some of the builtins) we will use them, otherwise we will compute our own information. Const and pure functions have less clobber effects than other functions so we process these first. Otherwise if it is a call outside the compilation unit or an indirect call we punt. This leaves local calls which will be processed by following the call graph. */ if (callee_t) { enum pure_const_state_e call_state; bool call_looping; if (special_builtin_state (&call_state, &call_looping, callee_t)) { worse_state (&local->pure_const_state, &local->looping, call_state, call_looping); return; } /* When bad things happen to bad functions, they cannot be const or pure. */ if (setjmp_call_p (callee_t)) { if (dump_file) fprintf (dump_file, " setjmp is not const/pure\n"); local->looping = true; local->pure_const_state = IPA_NEITHER; } if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL) switch (DECL_FUNCTION_CODE (callee_t)) { case BUILT_IN_LONGJMP: case BUILT_IN_NONLOCAL_GOTO: if (dump_file) fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n"); local->pure_const_state = IPA_NEITHER; local->looping = true; break; default: break; } } /* When not in IPA mode, we can still handle self recursion. */ if (!ipa && callee_t == current_function_decl) { if (dump_file) fprintf (dump_file, " Recursive call can loop.\n"); local->looping = true; } /* Either callee is unknown or we are doing local analysis. Look to see if there are any bits available for the callee (such as by declaration or because it is builtin) and process solely on the basis of those bits. */ else if (!ipa) { enum pure_const_state_e call_state; bool call_looping; if (possibly_throws && cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " can throw; looping\n"); local->looping = true; } if (possibly_throws_externally) { if (dump_file) { fprintf (dump_file, " can throw externally to lp %i\n", lookup_stmt_eh_lp (call)); if (callee_t) fprintf (dump_file, " callee:%s\n", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t))); } local->can_throw = true; } if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " checking flags for call:"); state_from_flags (&call_state, &call_looping, flags, ((flags & (ECF_NORETURN | ECF_NOTHROW)) == (ECF_NORETURN | ECF_NOTHROW)) || (!flag_exceptions && (flags & ECF_NORETURN))); worse_state (&local->pure_const_state, &local->looping, call_state, call_looping); } /* Direct functions calls are handled by IPA propagation. */ }
static inline bool is_replaceable_p (gimple stmt) { use_operand_p use_p; tree def; gimple use_stmt; location_t locus1, locus2; tree block1, block2; /* Only consider modify stmts. */ if (!is_gimple_assign (stmt)) return false; /* If the statement may throw an exception, it cannot be replaced. */ if (stmt_could_throw_p (stmt)) return false; /* Punt if there is more than 1 def. */ def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF); if (!def) return false; /* Only consider definitions which have a single use. */ if (!single_imm_use (def, &use_p, &use_stmt)) return false; /* If the use isn't in this block, it wont be replaced either. */ if (gimple_bb (use_stmt) != gimple_bb (stmt)) return false; locus1 = gimple_location (stmt); block1 = gimple_block (stmt); if (gimple_code (use_stmt) == GIMPLE_PHI) { locus2 = 0; block2 = NULL_TREE; } else { locus2 = gimple_location (use_stmt); block2 = gimple_block (use_stmt); } if (!optimize && ((locus1 && locus1 != locus2) || (block1 && block1 != block2))) return false; /* Used in this block, but at the TOP of the block, not the end. */ if (gimple_code (use_stmt) == GIMPLE_PHI) return false; /* There must be no VDEFs. */ if (!(ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))) return false; /* Without alias info we can't move around loads. */ if (gimple_references_memory_p (stmt) && !optimize) return false; /* Float expressions must go through memory if float-store is on. */ if (flag_float_store && FLOAT_TYPE_P (gimple_expr_type (stmt))) return false; /* An assignment with a register variable on the RHS is not replaceable. */ if (gimple_assign_rhs_code (stmt) == VAR_DECL && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))) return false; /* No function calls can be replaced. */ if (is_gimple_call (stmt)) return false; /* Leave any stmt with volatile operands alone as well. */ if (gimple_has_volatile_ops (stmt)) return false; return true; }