static bool has_indirect_call (basic_block bb) { gimple_stmt_iterator gsi; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple *stmt = gsi_stmt (gsi); if (gimple_code (stmt) == GIMPLE_CALL && !gimple_call_internal_p (stmt) && (gimple_call_fn (stmt) == NULL || TREE_CODE (gimple_call_fn (stmt)) != FUNCTION_DECL)) return true; } return false; }
static const char* get_called_name(const_gimple stmt) { /* return the low level, assembler name if possible */ tree op = gimple_call_fn(stmt); if (TREE_CODE(op) == NON_LVALUE_EXPR) op = TREE_OPERAND(op, 0); do_again: if (TREE_CODE(op) == ADDR_EXPR) { op = TREE_OPERAND(op, 0); goto do_again; } else if (TREE_CODE(op) == FUNCTION_DECL) { if (DECL_ASSEMBLER_NAME(op)) return IDENTIFIER_POINTER(DECL_ASSEMBLER_NAME(op)); else if (DECL_NAME(op)) return IDENTIFIER_POINTER(DECL_NAME(op)); } return "_anonymous_"; }
static void afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map, bool transform) { gimple gs = gsi_stmt (*gsi); tree callee; if (map.size () == 0) return; gcall *stmt = dyn_cast <gcall *> (gs); if ((!stmt) || gimple_call_fndecl (stmt) != NULL_TREE) return; callee = gimple_call_fn (stmt); histogram_value hist = gimple_alloc_histogram_value ( cfun, HIST_TYPE_INDIR_CALL, stmt, callee); hist->n_counters = 3; hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters); gimple_add_histogram_value (cfun, stmt, hist); gcov_type total = 0; icall_target_map::const_iterator max_iter = map.end (); for (icall_target_map::const_iterator iter = map.begin (); iter != map.end (); ++iter) { total += iter->second; if (max_iter == map.end () || max_iter->second < iter->second) max_iter = iter; } hist->hvalue.counters[0] = (unsigned long long)afdo_string_table->get_name (max_iter->first); hist->hvalue.counters[1] = max_iter->second; hist->hvalue.counters[2] = total; if (!transform) return; struct cgraph_edge *indirect_edge = cgraph_node::get (current_function_decl)->get_edge (stmt); struct cgraph_node *direct_call = cgraph_node::get_for_asmname ( get_identifier ((const char *) hist->hvalue.counters[0])); if (direct_call == NULL || !check_ic_target (stmt, direct_call)) return; if (DECL_STRUCT_FUNCTION (direct_call->decl) == NULL) return; struct cgraph_edge *new_edge = indirect_edge->make_speculative (direct_call, 0, 0); new_edge->redirect_call_stmt_to_callee (); gimple_remove_histogram_value (cfun, stmt, hist); inline_call (new_edge, true, NULL, NULL, false); }
bool aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi) { bool changed = false; gimple stmt = gsi_stmt (*gsi); tree call = gimple_call_fn (stmt); tree fndecl; gimple new_stmt = NULL; if (call) { fndecl = gimple_call_fndecl (stmt); if (fndecl) { int fcode = DECL_FUNCTION_CODE (fndecl); int nargs = gimple_call_num_args (stmt); tree *args = (nargs > 0 ? gimple_call_arg_ptr (stmt, 0) : &error_mark_node); /* We use gimple's REDUC_(PLUS|MIN|MAX)_EXPRs for float, signed int and unsigned int; it will distinguish according to the types of the arguments to the __builtin. */ switch (fcode) { BUILTIN_VALL (UNOP, reduc_plus_scal_, 10) new_stmt = gimple_build_assign (gimple_call_lhs (stmt), REDUC_PLUS_EXPR, args[0]); break; BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10) BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10) new_stmt = gimple_build_assign (gimple_call_lhs (stmt), REDUC_MAX_EXPR, args[0]); break; BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10) BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10) new_stmt = gimple_build_assign (gimple_call_lhs (stmt), REDUC_MIN_EXPR, args[0]); break; default: break; } } } if (new_stmt) { gsi_replace (gsi, new_stmt, true); changed = true; } return changed; }
static tree get_fn_or_fnptr_decl(const gcall *call_stmt) { const_tree fnptr; const_gimple def_stmt; tree decl = gimple_call_fndecl(call_stmt); if (decl != NULL_TREE) return decl; fnptr = gimple_call_fn(call_stmt); // !!! assertot kell irni 0-ra, mert csak az lehet ott if (is_gimple_constant(fnptr)) return NULL_TREE; def_stmt = get_fnptr_def_stmt(fnptr); return handle_fnptr_assign(def_stmt); }
/* * find all C level function pointer dereferences and forcibly set the highest bit of the pointer */ static unsigned int execute_kernexec_fptr(void) { basic_block bb; // 1. loop through BBs and GIMPLE statements FOR_EACH_BB(bb) { gimple_stmt_iterator gsi; for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) { // gimple match: h_1 = get_fptr (); D.2709_3 = h_1 (x_2(D)); tree fn; gimple call_stmt; // is it a call ... call_stmt = gsi_stmt(gsi); if (!is_gimple_call(call_stmt)) continue; fn = gimple_call_fn(call_stmt); if (TREE_CODE(fn) == ADDR_EXPR) continue; if (TREE_CODE(fn) != SSA_NAME) gcc_unreachable(); // ... through a function pointer if (SSA_NAME_VAR(fn) != NULL_TREE) { fn = SSA_NAME_VAR(fn); if (TREE_CODE(fn) != VAR_DECL && TREE_CODE(fn) != PARM_DECL) { debug_tree(fn); gcc_unreachable(); } } fn = TREE_TYPE(fn); if (TREE_CODE(fn) != POINTER_TYPE) continue; fn = TREE_TYPE(fn); if (TREE_CODE(fn) != FUNCTION_TYPE) continue; kernexec_instrument_fptr(&gsi); //debug_tree(gimple_call_fn(call_stmt)); //print_gimple_stmt(stderr, call_stmt, 0, TDF_LINENO); } } return 0; }
static void kernexec_instrument_fptr_or(gimple_stmt_iterator *gsi) { gimple asm_or_stmt, call_stmt; tree old_fptr, new_fptr, input, output; #if BUILDING_GCC_VERSION <= 4007 VEC(tree, gc) *inputs = NULL; VEC(tree, gc) *outputs = NULL; #else vec<tree, va_gc> *inputs = NULL; vec<tree, va_gc> *outputs = NULL; #endif call_stmt = gsi_stmt(*gsi); old_fptr = gimple_call_fn(call_stmt); // create temporary fptr variable new_fptr = create_tmp_var(TREE_TYPE(old_fptr), "kernexec_or"); #if BUILDING_GCC_VERSION <= 4007 add_referenced_var(new_fptr); #endif new_fptr = make_ssa_name(new_fptr, NULL); // build asm volatile("orq %%r10, %0\n\t" : "=r"(new_fptr) : "0"(old_fptr)); input = build_tree_list(NULL_TREE, build_string(2, "0")); input = chainon(NULL_TREE, build_tree_list(input, old_fptr)); output = build_tree_list(NULL_TREE, build_string(3, "=r")); output = chainon(NULL_TREE, build_tree_list(output, new_fptr)); #if BUILDING_GCC_VERSION <= 4007 VEC_safe_push(tree, gc, inputs, input); VEC_safe_push(tree, gc, outputs, output); #else vec_safe_push(inputs, input); vec_safe_push(outputs, output); #endif asm_or_stmt = gimple_build_asm_vec("orq %%r10, %0\n\t", inputs, outputs, NULL, NULL); SSA_NAME_DEF_STMT(new_fptr) = asm_or_stmt; gimple_asm_set_volatile(asm_or_stmt, true); gsi_insert_before(gsi, asm_or_stmt, GSI_SAME_STMT); update_stmt(asm_or_stmt); // replace call stmt fn with the new fptr gimple_call_set_fn(call_stmt, new_fptr); update_stmt(call_stmt); }
/* * add special KERNEXEC instrumentation: force MSB of fptr to 1, which will produce * a non-canonical address from a userland ptr and will just trigger a GPF on dereference */ static void kernexec_instrument_fptr_bts(gimple_stmt_iterator *gsi) { gimple assign_intptr, assign_new_fptr, call_stmt; tree intptr, orptr, old_fptr, new_fptr, kernexec_mask; call_stmt = gsi_stmt(*gsi); old_fptr = gimple_call_fn(call_stmt); // create temporary unsigned long variable used for bitops and cast fptr to it intptr = create_tmp_var(long_unsigned_type_node, "kernexec_bts"); #if BUILDING_GCC_VERSION <= 4007 add_referenced_var(intptr); #endif intptr = make_ssa_name(intptr, NULL); assign_intptr = gimple_build_assign(intptr, fold_convert(long_unsigned_type_node, old_fptr)); SSA_NAME_DEF_STMT(intptr) = assign_intptr; gsi_insert_before(gsi, assign_intptr, GSI_SAME_STMT); update_stmt(assign_intptr); // apply logical or to temporary unsigned long and bitmask kernexec_mask = build_int_cstu(long_long_unsigned_type_node, 0x8000000000000000LL); // kernexec_mask = build_int_cstu(long_long_unsigned_type_node, 0xffffffff80000000LL); orptr = fold_build2(BIT_IOR_EXPR, long_long_unsigned_type_node, intptr, kernexec_mask); intptr = make_ssa_name(SSA_NAME_VAR(intptr), NULL); assign_intptr = gimple_build_assign(intptr, orptr); SSA_NAME_DEF_STMT(intptr) = assign_intptr; gsi_insert_before(gsi, assign_intptr, GSI_SAME_STMT); update_stmt(assign_intptr); // cast temporary unsigned long back to a temporary fptr variable new_fptr = create_tmp_var(TREE_TYPE(old_fptr), "kernexec_fptr"); #if BUILDING_GCC_VERSION <= 4007 add_referenced_var(new_fptr); #endif new_fptr = make_ssa_name(new_fptr, NULL); assign_new_fptr = gimple_build_assign(new_fptr, fold_convert(TREE_TYPE(old_fptr), intptr)); SSA_NAME_DEF_STMT(new_fptr) = assign_new_fptr; gsi_insert_before(gsi, assign_new_fptr, GSI_SAME_STMT); update_stmt(assign_new_fptr); // replace call stmt fn with the new fptr gimple_call_set_fn(call_stmt, new_fptr); update_stmt(call_stmt); }
static unsigned int slimer_exec(void) { basic_block bb; gimple stmt; gimple_stmt_iterator gsi; if (has_been_processed(cfun->decl)) return 0; if (DECL_EXTERNAL(cfun->decl)) return 0; if (get_identifier(get_name(cfun->decl)) == get_identifier("main")) insert_slimer_init(); /* Go through the basic blocks of this function */ FOR_EACH_BB(bb) for (gsi=gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) { stmt = gsi_stmt(gsi); if (is_gimple_call(stmt) || is_gimple_assign(stmt)) { /* If its a call to a function we added already (junk or some * initlization functions), or a function we have previously * analyized, avoid inserting junk data. */ if (is_gimple_call(stmt) && !has_been_processed(gimple_call_fn(stmt))) continue; else if ((max_calls > 0) && ((rand() % 2) == 0)) { insert_call_to_junk_fn(stmt); --max_calls; } } } /* Mark as being analyized so we avoid trying to junkify it again */ VEC_safe_push(tree, gc, analyized_fns, cfun->decl); return 0; }
bool gimple_simplify (gimple stmt, code_helper *rcode, tree *ops, gimple_seq *seq, tree (*valueize)(tree)) { switch (gimple_code (stmt)) { case GIMPLE_ASSIGN: { enum tree_code code = gimple_assign_rhs_code (stmt); tree type = TREE_TYPE (gimple_assign_lhs (stmt)); switch (gimple_assign_rhs_class (stmt)) { case GIMPLE_SINGLE_RHS: if (code == REALPART_EXPR || code == IMAGPART_EXPR || code == VIEW_CONVERT_EXPR) { tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); if (valueize && TREE_CODE (op0) == SSA_NAME) { tree tem = valueize (op0); if (tem) op0 = tem; } *rcode = code; ops[0] = op0; return gimple_resimplify1 (seq, rcode, type, ops, valueize); } else if (code == BIT_FIELD_REF) { tree rhs1 = gimple_assign_rhs1 (stmt); tree op0 = TREE_OPERAND (rhs1, 0); if (valueize && TREE_CODE (op0) == SSA_NAME) { tree tem = valueize (op0); if (tem) op0 = tem; } *rcode = code; ops[0] = op0; ops[1] = TREE_OPERAND (rhs1, 1); ops[2] = TREE_OPERAND (rhs1, 2); return gimple_resimplify3 (seq, rcode, type, ops, valueize); } else if (code == SSA_NAME && valueize) { tree op0 = gimple_assign_rhs1 (stmt); tree valueized = valueize (op0); if (!valueized || op0 == valueized) return false; ops[0] = valueized; *rcode = TREE_CODE (op0); return true; } break; case GIMPLE_UNARY_RHS: { tree rhs1 = gimple_assign_rhs1 (stmt); if (valueize && TREE_CODE (rhs1) == SSA_NAME) { tree tem = valueize (rhs1); if (tem) rhs1 = tem; } *rcode = code; ops[0] = rhs1; return gimple_resimplify1 (seq, rcode, type, ops, valueize); } case GIMPLE_BINARY_RHS: { tree rhs1 = gimple_assign_rhs1 (stmt); if (valueize && TREE_CODE (rhs1) == SSA_NAME) { tree tem = valueize (rhs1); if (tem) rhs1 = tem; } tree rhs2 = gimple_assign_rhs2 (stmt); if (valueize && TREE_CODE (rhs2) == SSA_NAME) { tree tem = valueize (rhs2); if (tem) rhs2 = tem; } *rcode = code; ops[0] = rhs1; ops[1] = rhs2; return gimple_resimplify2 (seq, rcode, type, ops, valueize); } case GIMPLE_TERNARY_RHS: { tree rhs1 = gimple_assign_rhs1 (stmt); if (valueize && TREE_CODE (rhs1) == SSA_NAME) { tree tem = valueize (rhs1); if (tem) rhs1 = tem; } tree rhs2 = gimple_assign_rhs2 (stmt); if (valueize && TREE_CODE (rhs2) == SSA_NAME) { tree tem = valueize (rhs2); if (tem) rhs2 = tem; } tree rhs3 = gimple_assign_rhs3 (stmt); if (valueize && TREE_CODE (rhs3) == SSA_NAME) { tree tem = valueize (rhs3); if (tem) rhs3 = tem; } *rcode = code; ops[0] = rhs1; ops[1] = rhs2; ops[2] = rhs3; return gimple_resimplify3 (seq, rcode, type, ops, valueize); } default: gcc_unreachable (); } break; } case GIMPLE_CALL: /* ??? This way we can't simplify calls with side-effects. */ if (gimple_call_lhs (stmt) != NULL_TREE) { tree fn = gimple_call_fn (stmt); /* ??? Internal function support missing. */ if (!fn) return false; if (valueize && TREE_CODE (fn) == SSA_NAME) { tree tem = valueize (fn); if (tem) fn = tem; } if (!fn || TREE_CODE (fn) != ADDR_EXPR || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL || DECL_BUILT_IN_CLASS (TREE_OPERAND (fn, 0)) != BUILT_IN_NORMAL || !builtin_decl_implicit (DECL_FUNCTION_CODE (TREE_OPERAND (fn, 0))) || !gimple_builtin_call_types_compatible_p (stmt, TREE_OPERAND (fn, 0))) return false; tree decl = TREE_OPERAND (fn, 0); tree type = TREE_TYPE (gimple_call_lhs (stmt)); switch (gimple_call_num_args (stmt)) { case 1: { tree arg1 = gimple_call_arg (stmt, 0); if (valueize && TREE_CODE (arg1) == SSA_NAME) { tree tem = valueize (arg1); if (tem) arg1 = tem; } *rcode = DECL_FUNCTION_CODE (decl); ops[0] = arg1; return gimple_resimplify1 (seq, rcode, type, ops, valueize); } case 2: { tree arg1 = gimple_call_arg (stmt, 0); if (valueize && TREE_CODE (arg1) == SSA_NAME) { tree tem = valueize (arg1); if (tem) arg1 = tem; } tree arg2 = gimple_call_arg (stmt, 1); if (valueize && TREE_CODE (arg2) == SSA_NAME) { tree tem = valueize (arg2); if (tem) arg2 = tem; } *rcode = DECL_FUNCTION_CODE (decl); ops[0] = arg1; ops[1] = arg2; return gimple_resimplify2 (seq, rcode, type, ops, valueize); } case 3: { tree arg1 = gimple_call_arg (stmt, 0); if (valueize && TREE_CODE (arg1) == SSA_NAME) { tree tem = valueize (arg1); if (tem) arg1 = tem; } tree arg2 = gimple_call_arg (stmt, 1); if (valueize && TREE_CODE (arg2) == SSA_NAME) { tree tem = valueize (arg2); if (tem) arg2 = tem; } tree arg3 = gimple_call_arg (stmt, 2); if (valueize && TREE_CODE (arg3) == SSA_NAME) { tree tem = valueize (arg3); if (tem) arg3 = tem; } *rcode = DECL_FUNCTION_CODE (decl); ops[0] = arg1; ops[1] = arg2; ops[2] = arg3; return gimple_resimplify3 (seq, rcode, type, ops, valueize); } default: return false; } } break; case GIMPLE_COND: { tree lhs = gimple_cond_lhs (stmt); if (valueize && TREE_CODE (lhs) == SSA_NAME) { tree tem = valueize (lhs); if (tem) lhs = tem; } tree rhs = gimple_cond_rhs (stmt); if (valueize && TREE_CODE (rhs) == SSA_NAME) { tree tem = valueize (rhs); if (tem) rhs = tem; } *rcode = gimple_cond_code (stmt); ops[0] = lhs; ops[1] = rhs; return gimple_resimplify2 (seq, rcode, boolean_type_node, ops, valueize); } default: break; } return false; }
bool gimple_simplify (gimple *stmt, code_helper *rcode, tree *ops, gimple_seq *seq, tree (*valueize)(tree), tree (*top_valueize)(tree)) { switch (gimple_code (stmt)) { case GIMPLE_ASSIGN: { enum tree_code code = gimple_assign_rhs_code (stmt); tree type = TREE_TYPE (gimple_assign_lhs (stmt)); switch (gimple_assign_rhs_class (stmt)) { case GIMPLE_SINGLE_RHS: if (code == REALPART_EXPR || code == IMAGPART_EXPR || code == VIEW_CONVERT_EXPR) { tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); bool valueized = false; op0 = do_valueize (op0, top_valueize, valueized); *rcode = code; ops[0] = op0; return (gimple_resimplify1 (seq, rcode, type, ops, valueize) || valueized); } else if (code == BIT_FIELD_REF) { tree rhs1 = gimple_assign_rhs1 (stmt); tree op0 = TREE_OPERAND (rhs1, 0); bool valueized = false; op0 = do_valueize (op0, top_valueize, valueized); *rcode = code; ops[0] = op0; ops[1] = TREE_OPERAND (rhs1, 1); ops[2] = TREE_OPERAND (rhs1, 2); return (gimple_resimplify3 (seq, rcode, type, ops, valueize) || valueized); } else if (code == SSA_NAME && top_valueize) { tree op0 = gimple_assign_rhs1 (stmt); tree valueized = top_valueize (op0); if (!valueized || op0 == valueized) return false; ops[0] = valueized; *rcode = TREE_CODE (op0); return true; } break; case GIMPLE_UNARY_RHS: { tree rhs1 = gimple_assign_rhs1 (stmt); bool valueized = false; rhs1 = do_valueize (rhs1, top_valueize, valueized); *rcode = code; ops[0] = rhs1; return (gimple_resimplify1 (seq, rcode, type, ops, valueize) || valueized); } case GIMPLE_BINARY_RHS: { tree rhs1 = gimple_assign_rhs1 (stmt); tree rhs2 = gimple_assign_rhs2 (stmt); bool valueized = false; rhs1 = do_valueize (rhs1, top_valueize, valueized); rhs2 = do_valueize (rhs2, top_valueize, valueized); *rcode = code; ops[0] = rhs1; ops[1] = rhs2; return (gimple_resimplify2 (seq, rcode, type, ops, valueize) || valueized); } case GIMPLE_TERNARY_RHS: { bool valueized = false; tree rhs1 = gimple_assign_rhs1 (stmt); /* If this is a [VEC_]COND_EXPR first try to simplify an embedded GENERIC condition. */ if (code == COND_EXPR || code == VEC_COND_EXPR) { if (COMPARISON_CLASS_P (rhs1)) { tree lhs = TREE_OPERAND (rhs1, 0); tree rhs = TREE_OPERAND (rhs1, 1); lhs = do_valueize (lhs, top_valueize, valueized); rhs = do_valueize (rhs, top_valueize, valueized); code_helper rcode2 = TREE_CODE (rhs1); tree ops2[3] = {}; ops2[0] = lhs; ops2[1] = rhs; if ((gimple_resimplify2 (seq, &rcode2, TREE_TYPE (rhs1), ops2, valueize) || valueized) && rcode2.is_tree_code ()) { valueized = true; if (TREE_CODE_CLASS ((enum tree_code)rcode2) == tcc_comparison) rhs1 = build2 (rcode2, TREE_TYPE (rhs1), ops2[0], ops2[1]); else if (rcode2 == SSA_NAME || rcode2 == INTEGER_CST) rhs1 = ops2[0]; else valueized = false; } } } tree rhs2 = gimple_assign_rhs2 (stmt); tree rhs3 = gimple_assign_rhs3 (stmt); rhs1 = do_valueize (rhs1, top_valueize, valueized); rhs2 = do_valueize (rhs2, top_valueize, valueized); rhs3 = do_valueize (rhs3, top_valueize, valueized); *rcode = code; ops[0] = rhs1; ops[1] = rhs2; ops[2] = rhs3; return (gimple_resimplify3 (seq, rcode, type, ops, valueize) || valueized); } default: gcc_unreachable (); } break; } case GIMPLE_CALL: /* ??? This way we can't simplify calls with side-effects. */ if (gimple_call_lhs (stmt) != NULL_TREE && gimple_call_num_args (stmt) >= 1 && gimple_call_num_args (stmt) <= 3) { tree fn = gimple_call_fn (stmt); /* ??? Internal function support missing. */ if (!fn) return false; bool valueized = false; fn = do_valueize (fn, top_valueize, valueized); if (TREE_CODE (fn) != ADDR_EXPR || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) return false; tree decl = TREE_OPERAND (fn, 0); if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL || !builtin_decl_implicit (DECL_FUNCTION_CODE (decl)) || !gimple_builtin_call_types_compatible_p (stmt, decl)) return false; tree type = TREE_TYPE (gimple_call_lhs (stmt)); *rcode = DECL_FUNCTION_CODE (decl); for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i) { tree arg = gimple_call_arg (stmt, i); ops[i] = do_valueize (arg, top_valueize, valueized); } switch (gimple_call_num_args (stmt)) { case 1: return (gimple_resimplify1 (seq, rcode, type, ops, valueize) || valueized); case 2: return (gimple_resimplify2 (seq, rcode, type, ops, valueize) || valueized); case 3: return (gimple_resimplify3 (seq, rcode, type, ops, valueize) || valueized); default: gcc_unreachable (); } } break; case GIMPLE_COND: { tree lhs = gimple_cond_lhs (stmt); tree rhs = gimple_cond_rhs (stmt); bool valueized = false; lhs = do_valueize (lhs, top_valueize, valueized); rhs = do_valueize (rhs, top_valueize, valueized); *rcode = gimple_cond_code (stmt); ops[0] = lhs; ops[1] = rhs; return (gimple_resimplify2 (seq, rcode, boolean_type_node, ops, valueize) || valueized); } default: break; } return false; }
void expr_hash_elt::print (FILE *stream) { fprintf (stream, "STMT "); if (m_lhs) { print_generic_expr (stream, m_lhs, 0); fprintf (stream, " = "); } switch (m_expr.kind) { case EXPR_SINGLE: print_generic_expr (stream, m_expr.ops.single.rhs, 0); break; case EXPR_UNARY: fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op)); print_generic_expr (stream, m_expr.ops.unary.opnd, 0); break; case EXPR_BINARY: print_generic_expr (stream, m_expr.ops.binary.opnd0, 0); fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op)); print_generic_expr (stream, m_expr.ops.binary.opnd1, 0); break; case EXPR_TERNARY: fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op)); print_generic_expr (stream, m_expr.ops.ternary.opnd0, 0); fputs (", ", stream); print_generic_expr (stream, m_expr.ops.ternary.opnd1, 0); fputs (", ", stream); print_generic_expr (stream, m_expr.ops.ternary.opnd2, 0); fputs (">", stream); break; case EXPR_CALL: { size_t i; size_t nargs = m_expr.ops.call.nargs; gcall *fn_from; fn_from = m_expr.ops.call.fn_from; if (gimple_call_internal_p (fn_from)) fputs (internal_fn_name (gimple_call_internal_fn (fn_from)), stream); else print_generic_expr (stream, gimple_call_fn (fn_from), 0); fprintf (stream, " ("); for (i = 0; i < nargs; i++) { print_generic_expr (stream, m_expr.ops.call.args[i], 0); if (i + 1 < nargs) fprintf (stream, ", "); } fprintf (stream, ")"); } break; case EXPR_PHI: { size_t i; size_t nargs = m_expr.ops.phi.nargs; fprintf (stream, "PHI <"); for (i = 0; i < nargs; i++) { print_generic_expr (stream, m_expr.ops.phi.args[i], 0); if (i + 1 < nargs) fprintf (stream, ", "); } fprintf (stream, ">"); } break; } if (m_vop) { fprintf (stream, " with "); print_generic_expr (stream, m_vop, 0); } fprintf (stream, "\n"); }
static void add_hashable_expr (const struct hashable_expr *expr, hash &hstate) { switch (expr->kind) { case EXPR_SINGLE: inchash::add_expr (expr->ops.single.rhs, hstate); break; case EXPR_UNARY: hstate.add_object (expr->ops.unary.op); /* Make sure to include signedness in the hash computation. Don't hash the type, that can lead to having nodes which compare equal according to operand_equal_p, but which have different hash codes. */ if (CONVERT_EXPR_CODE_P (expr->ops.unary.op) || expr->ops.unary.op == NON_LVALUE_EXPR) hstate.add_int (TYPE_UNSIGNED (expr->type)); inchash::add_expr (expr->ops.unary.opnd, hstate); break; case EXPR_BINARY: hstate.add_object (expr->ops.binary.op); if (commutative_tree_code (expr->ops.binary.op)) inchash::add_expr_commutative (expr->ops.binary.opnd0, expr->ops.binary.opnd1, hstate); else { inchash::add_expr (expr->ops.binary.opnd0, hstate); inchash::add_expr (expr->ops.binary.opnd1, hstate); } break; case EXPR_TERNARY: hstate.add_object (expr->ops.ternary.op); if (commutative_ternary_tree_code (expr->ops.ternary.op)) inchash::add_expr_commutative (expr->ops.ternary.opnd0, expr->ops.ternary.opnd1, hstate); else { inchash::add_expr (expr->ops.ternary.opnd0, hstate); inchash::add_expr (expr->ops.ternary.opnd1, hstate); } inchash::add_expr (expr->ops.ternary.opnd2, hstate); break; case EXPR_CALL: { size_t i; enum tree_code code = CALL_EXPR; gcall *fn_from; hstate.add_object (code); fn_from = expr->ops.call.fn_from; if (gimple_call_internal_p (fn_from)) hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from)); else inchash::add_expr (gimple_call_fn (fn_from), hstate); for (i = 0; i < expr->ops.call.nargs; i++) inchash::add_expr (expr->ops.call.args[i], hstate); } break; case EXPR_PHI: { size_t i; for (i = 0; i < expr->ops.phi.nargs; i++) inchash::add_expr (expr->ops.phi.args[i], hstate); } break; default: gcc_unreachable (); } }