static unsigned int ubsan_pass (void) { basic_block bb; gimple_stmt_iterator gsi; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) { gimple stmt = gsi_stmt (gsi); if (is_gimple_debug (stmt) || gimple_clobber_p (stmt)) { gsi_next (&gsi); continue; } if (flag_sanitize & SANITIZE_NULL) { if (gimple_store_p (stmt)) instrument_null (gsi, true); if (gimple_assign_load_p (stmt)) instrument_null (gsi, false); } gsi_next (&gsi); } } return 0; }
static bool afdo_set_bb_count (basic_block bb, const stmt_set &promoted) { gimple_stmt_iterator gsi; edge e; edge_iterator ei; gcov_type max_count = 0; bool has_annotated = false; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { count_info info; gimple *stmt = gsi_stmt (gsi); if (gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; if (afdo_source_profile->get_count_info (stmt, &info)) { if (info.count > max_count) max_count = info.count; has_annotated = true; if (info.targets.size () > 0 && promoted.find (stmt) == promoted.end ()) afdo_vpt (&gsi, info.targets, false); } } if (!has_annotated) return false; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) afdo_source_profile->mark_annotated (gimple_location (gsi_stmt (gsi))); for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); gsi_next (&gpi)) { gphi *phi = gpi.phi (); size_t i; for (i = 0; i < gimple_phi_num_args (phi); i++) afdo_source_profile->mark_annotated (gimple_phi_arg_location (phi, i)); } FOR_EACH_EDGE (e, ei, bb->succs) afdo_source_profile->mark_annotated (e->goto_locus); bb->count = profile_count::from_gcov_type (max_count).afdo (); return true; }
static unsigned int ubsan_pass (void) { basic_block bb; gimple_stmt_iterator gsi; FOR_EACH_BB_FN (bb, cfun) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) { gimple stmt = gsi_stmt (gsi); if (is_gimple_debug (stmt) || gimple_clobber_p (stmt)) { gsi_next (&gsi); continue; } if ((flag_sanitize & SANITIZE_SI_OVERFLOW) && is_gimple_assign (stmt)) instrument_si_overflow (gsi); if (flag_sanitize & SANITIZE_NULL) { if (gimple_store_p (stmt)) instrument_null (gsi, true); if (gimple_assign_load_p (stmt)) instrument_null (gsi, false); } if (flag_sanitize & (SANITIZE_BOOL | SANITIZE_ENUM) && gimple_assign_load_p (stmt)) instrument_bool_enum_load (&gsi); gsi_next (&gsi); } } return 0; }
/* Look into pointer pointed to by GSIP and figure out what interesting side effects it has. */ static void check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa) { gimple stmt = gsi_stmt (*gsip); if (is_gimple_debug (stmt)) return; if (dump_file) { fprintf (dump_file, " scanning: "); print_gimple_stmt (dump_file, stmt, 0, 0); } if (gimple_has_volatile_ops (stmt) && !gimple_clobber_p (stmt)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Volatile stmt is not const/pure\n"); } /* Look for loads and stores. */ walk_stmt_load_store_ops (stmt, local, ipa ? check_ipa_load : check_load, ipa ? check_ipa_store : check_store); if (gimple_code (stmt) != GIMPLE_CALL && stmt_could_throw_p (stmt)) { if (cfun->can_throw_non_call_exceptions) { if (dump_file) fprintf (dump_file, " can throw; looping\n"); local->looping = true; } if (stmt_can_throw_external (stmt)) { if (dump_file) fprintf (dump_file, " can throw externally\n"); local->can_throw = true; } else if (dump_file) fprintf (dump_file, " can throw\n"); } switch (gimple_code (stmt)) { case GIMPLE_CALL: check_call (local, stmt, ipa); break; case GIMPLE_LABEL: if (DECL_NONLOCAL (gimple_label_label (stmt))) /* Target of long jump. */ { if (dump_file) fprintf (dump_file, " nonlocal label is not const/pure\n"); local->pure_const_state = IPA_NEITHER; } break; case GIMPLE_ASM: if (gimple_asm_clobbers_memory_p (stmt)) { if (dump_file) fprintf (dump_file, " memory asm clobber is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; } if (gimple_asm_volatile_p (stmt)) { if (dump_file) fprintf (dump_file, " volatile is not const/pure\n"); /* Abandon all hope, ye who enter here. */ local->pure_const_state = IPA_NEITHER; local->looping = true; } return; default: break; } }
static void find_tail_calls (basic_block bb, struct tailcall **ret) { tree ass_var = NULL_TREE, ret_var, func, param; gimple stmt, call = NULL; gimple_stmt_iterator gsi, agsi; bool tail_recursion; struct tailcall *nw; edge e; tree m, a; basic_block abb; size_t idx; tree var; if (!single_succ_p (bb)) return; for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { stmt = gsi_stmt (gsi); /* Ignore labels, returns, clobbers and debug stmts. */ if (gimple_code (stmt) == GIMPLE_LABEL || gimple_code (stmt) == GIMPLE_RETURN || gimple_clobber_p (stmt) || is_gimple_debug (stmt)) continue; /* Check for a call. */ if (is_gimple_call (stmt)) { call = stmt; ass_var = gimple_call_lhs (stmt); break; } /* If the statement references memory or volatile operands, fail. */ if (gimple_references_memory_p (stmt) || gimple_has_volatile_ops (stmt)) return; } if (gsi_end_p (gsi)) { edge_iterator ei; /* Recurse to the predecessors. */ FOR_EACH_EDGE (e, ei, bb->preds) find_tail_calls (e->src, ret); return; } /* If the LHS of our call is not just a simple register, we can't transform this into a tail or sibling call. This situation happens, in (e.g.) "*p = foo()" where foo returns a struct. In this case we won't have a temporary here, but we need to carry out the side effect anyway, so tailcall is impossible. ??? In some situations (when the struct is returned in memory via invisible argument) we could deal with this, e.g. by passing 'p' itself as that argument to foo, but it's too early to do this here, and expand_call() will not handle it anyway. If it ever can, then we need to revisit this here, to allow that situation. */ if (ass_var && !is_gimple_reg (ass_var)) return; /* We found the call, check whether it is suitable. */ tail_recursion = false; func = gimple_call_fndecl (call); if (func && !DECL_BUILT_IN (func) && recursive_call_p (current_function_decl, func)) { tree arg; for (param = DECL_ARGUMENTS (func), idx = 0; param && idx < gimple_call_num_args (call); param = DECL_CHAIN (param), idx ++) { arg = gimple_call_arg (call, idx); if (param != arg) { /* Make sure there are no problems with copying. The parameter have a copyable type and the two arguments must have reasonably equivalent types. The latter requirement could be relaxed if we emitted a suitable type conversion statement. */ if (!is_gimple_reg_type (TREE_TYPE (param)) || !useless_type_conversion_p (TREE_TYPE (param), TREE_TYPE (arg))) break; /* The parameter should be a real operand, so that phi node created for it at the start of the function has the meaning of copying the value. This test implies is_gimple_reg_type from the previous condition, however this one could be relaxed by being more careful with copying the new value of the parameter (emitting appropriate GIMPLE_ASSIGN and updating the virtual operands). */ if (!is_gimple_reg (param)) break; } } if (idx == gimple_call_num_args (call) && !param) tail_recursion = true; } /* Make sure the tail invocation of this function does not refer to local variables. */ FOR_EACH_LOCAL_DECL (cfun, idx, var) { if (TREE_CODE (var) != PARM_DECL && auto_var_in_fn_p (var, cfun->decl) && (ref_maybe_used_by_stmt_p (call, var) || call_may_clobber_ref_p (call, var))) return; } /* Now check the statements after the call. None of them has virtual operands, so they may only depend on the call through its return value. The return value should also be dependent on each of them, since we are running after dce. */ m = NULL_TREE; a = NULL_TREE; abb = bb; agsi = gsi; while (1) { tree tmp_a = NULL_TREE; tree tmp_m = NULL_TREE; gsi_next (&agsi); while (gsi_end_p (agsi)) { ass_var = propagate_through_phis (ass_var, single_succ_edge (abb)); abb = single_succ (abb); agsi = gsi_start_bb (abb); } stmt = gsi_stmt (agsi); if (gimple_code (stmt) == GIMPLE_LABEL) continue; if (gimple_code (stmt) == GIMPLE_RETURN) break; if (gimple_clobber_p (stmt)) continue; if (is_gimple_debug (stmt)) continue; if (gimple_code (stmt) != GIMPLE_ASSIGN) return; /* This is a gimple assign. */ if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var)) return; if (tmp_a) { tree type = TREE_TYPE (tmp_a); if (a) a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a); else a = tmp_a; } if (tmp_m) { tree type = TREE_TYPE (tmp_m); if (m) m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m); else m = tmp_m; if (a) a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m); } } /* See if this is a tail call we can handle. */ ret_var = gimple_return_retval (stmt); /* We may proceed if there either is no return value, or the return value is identical to the call's return. */ if (ret_var && (ret_var != ass_var)) return; /* If this is not a tail recursive call, we cannot handle addends or multiplicands. */ if (!tail_recursion && (m || a)) return; /* For pointers only allow additions. */ if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl)))) return; nw = XNEW (struct tailcall); nw->call_gsi = gsi; nw->tail_recursion = tail_recursion; nw->mult = m; nw->add = a; nw->next = *ret; *ret = nw; }