bool is_gimple_condexpr (tree t) { return (is_gimple_val (t) || (COMPARISON_CLASS_P (t) && !tree_could_throw_p (t) && is_gimple_val (TREE_OPERAND (t, 0)) && is_gimple_val (TREE_OPERAND (t, 1)))); }
tree maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops, gimple_seq *seq, tree res) { if (rcode.is_tree_code ()) { if (!res && (TREE_CODE_LENGTH ((tree_code) rcode) == 0 || ((tree_code) rcode) == ADDR_EXPR) && is_gimple_val (ops[0])) return ops[0]; if (!seq) return NULL_TREE; /* Play safe and do not allow abnormals to be mentioned in newly created statements. */ if ((TREE_CODE (ops[0]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])) || (ops[1] && TREE_CODE (ops[1]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])) || (ops[2] && TREE_CODE (ops[2]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))) return NULL_TREE; if (!res) res = make_ssa_name (type, NULL); maybe_build_generic_op (rcode, type, &ops[0], ops[1], ops[2]); gimple new_stmt = gimple_build_assign_with_ops (rcode, res, ops[0], ops[1], ops[2]); gimple_seq_add_stmt_without_update (seq, new_stmt); return res; } else { if (!seq) return NULL_TREE; tree decl = builtin_decl_implicit (rcode); if (!decl) return NULL_TREE; unsigned nargs = type_num_arguments (TREE_TYPE (decl)); gcc_assert (nargs <= 3); /* Play safe and do not allow abnormals to be mentioned in newly created statements. */ if ((TREE_CODE (ops[0]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])) || (nargs >= 2 && TREE_CODE (ops[1]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])) || (nargs == 3 && TREE_CODE (ops[2]) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))) return NULL_TREE; if (!res) res = make_ssa_name (type, NULL); gimple new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]); gimple_call_set_lhs (new_stmt, res); gimple_seq_add_stmt_without_update (seq, new_stmt); return res; } }
bool is_gimple_asm_val (tree t) { if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) return true; return is_gimple_val (t); }
bool is_gimple_mem_rhs (tree t) { /* If we're dealing with a renamable type, either source or dest must be a renamed variable. Also force a temporary if the type doesn't need to be stored in memory, since it's cheap and prevents erroneous tailcalls (PR 17526). */ if (is_gimple_reg_type (TREE_TYPE (t)) || TYPE_MODE (TREE_TYPE (t)) != BLKmode) return is_gimple_val (t); else return is_gimple_formal_tmp_rhs (t); }
tree force_gimple_operand_1 (tree expr, gimple_seq *stmts, gimple_predicate gimple_test_f, tree var) { enum gimplify_status ret; location_t saved_location; *stmts = NULL; /* gimple_test_f might be more strict than is_gimple_val, make sure we pass both. Just checking gimple_test_f doesn't work because most gimple predicates do not work recursively. */ if (is_gimple_val (expr) && (*gimple_test_f) (expr)) return expr; push_gimplify_context (gimple_in_ssa_p (cfun), true); saved_location = input_location; input_location = UNKNOWN_LOCATION; if (var) { if (gimple_in_ssa_p (cfun) && is_gimple_reg (var)) var = make_ssa_name (var, NULL); expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); } if (TREE_CODE (expr) != MODIFY_EXPR && TREE_TYPE (expr) == void_type_node) { gimplify_and_add (expr, stmts); expr = NULL_TREE; } else { ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); gcc_assert (ret != GS_ERROR); } input_location = saved_location; pop_gimplify_context (NULL); return expr; }
bool is_gimple_formal_tmp_rhs (tree t) { enum tree_code code = TREE_CODE (t); switch (TREE_CODE_CLASS (code)) { case tcc_unary: case tcc_binary: case tcc_comparison: return true; default: break; } switch (code) { case TRUTH_NOT_EXPR: case TRUTH_AND_EXPR: case TRUTH_OR_EXPR: case TRUTH_XOR_EXPR: case ADDR_EXPR: case CALL_EXPR: case CONSTRUCTOR: case COMPLEX_EXPR: case INTEGER_CST: case REAL_CST: case STRING_CST: case COMPLEX_CST: case VECTOR_CST: case OBJ_TYPE_REF: case ASSERT_EXPR: return true; default: break; } return is_gimple_lvalue (t) || is_gimple_val (t); }
bool is_gimple_address (const_tree t) { tree op; if (TREE_CODE (t) != ADDR_EXPR) return false; op = TREE_OPERAND (t, 0); while (handled_component_p (op)) { if ((TREE_CODE (op) == ARRAY_REF || TREE_CODE (op) == ARRAY_RANGE_REF) && !is_gimple_val (TREE_OPERAND (op, 1))) return false; op = TREE_OPERAND (op, 0); } if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF) return true; switch (TREE_CODE (op)) { case PARM_DECL: case RESULT_DECL: case LABEL_DECL: case FUNCTION_DECL: case VAR_DECL: case CONST_DECL: return true; default: return false; } }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, location_t location) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; gimple unregister_fncall, register_fncall; tree unregister_fncall_param, register_fncall_param; /* Variable-sized objects should have sizes already been gimplified when we got here. */ size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); gcc_assert (is_gimple_val (size)); unregister_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3, unregister_fncall_param, size, integer_three_node); variable_name = mf_varname_tree (decl); register_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ register_fncall = gimple_build_call (mf_register_fndecl, 4, register_fncall_param, size, integer_three_node, variable_name); /* Accumulate the two calls. */ gimple_set_location (register_fncall, location); gimple_set_location (unregister_fncall, location); /* Add the __mf_register call at the current appending point. */ if (gsi_end_p (initially_stmts)) { if (!mf_artificial (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ gimple_seq_add_stmt (&finally_stmts, unregister_fncall); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = NULL; gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
bool is_gimple_call_addr (tree t) { return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); }
tree create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr, tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed) { tree mem_ref, tmp; struct mem_address parts; addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed); gimplify_mem_ref_parts (gsi, &parts); mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); if (mem_ref) return mem_ref; /* The expression is too complicated. Try making it simpler. */ if (parts.step && !integer_onep (parts.step)) { /* Move the multiplication to index. */ gcc_assert (parts.index); parts.index = force_gimple_operand_gsi (gsi, fold_build2 (MULT_EXPR, sizetype, parts.index, parts.step), true, NULL_TREE, true, GSI_SAME_STMT); parts.step = NULL_TREE; mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); if (mem_ref) return mem_ref; } if (parts.symbol) { tmp = parts.symbol; gcc_assert (is_gimple_val (tmp)); /* Add the symbol to base, eventually forcing it to register. */ if (parts.base) { gcc_assert (useless_type_conversion_p (sizetype, TREE_TYPE (parts.base))); if (parts.index) { parts.base = force_gimple_operand_gsi_1 (gsi, fold_build_pointer_plus (tmp, parts.base), is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT); } else { parts.index = parts.base; parts.base = tmp; } } else parts.base = tmp; parts.symbol = NULL_TREE; mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); if (mem_ref) return mem_ref; } if (parts.index) { /* Add index to base. */ if (parts.base) { parts.base = force_gimple_operand_gsi_1 (gsi, fold_build_pointer_plus (parts.base, parts.index), is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT); } else parts.base = parts.index; parts.index = NULL_TREE; mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); if (mem_ref) return mem_ref; } if (parts.offset && !integer_zerop (parts.offset)) { /* Try adding offset to base. */ if (parts.base) { parts.base = force_gimple_operand_gsi_1 (gsi, fold_build_pointer_plus (parts.base, parts.offset), is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT); } else parts.base = parts.offset; parts.offset = NULL_TREE; mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); if (mem_ref) return mem_ref; } /* Verify that the address is in the simplest possible shape (only a register). If we cannot create such a memory reference, something is really wrong. */ gcc_assert (parts.symbol == NULL_TREE); gcc_assert (parts.index == NULL_TREE); gcc_assert (!parts.step || integer_onep (parts.step)); gcc_assert (!parts.offset || integer_zerop (parts.offset)); gcc_unreachable (); }
bool is_gimple_condexpr (tree t) { return (is_gimple_val (t) || COMPARISON_CLASS_P (t)); }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, gimple stmt, location_t location, bool func_args) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); bool sframe_inserted = false; size_t front_rz_size, rear_rz_size; tree fsize, rsize, size; gimple uninit_fncall_front, uninit_fncall_rear, init_fncall_front, \ init_fncall_rear, init_assign_stmt; tree fncall_param_front, fncall_param_rear; int map_ret; while (decl != NULL_TREE) { if ((mf_decl_eligible_p (decl) || TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl) && get_name(decl)) { DEBUGLOG("DEBUG Instrumenting %s is_complete_type %d\n", IDENTIFIER_POINTER(DECL_NAME(decl)), COMPLETE_TYPE_P(decl)); /* construct a tree corresponding to the type struct{ unsigned int rz_front[6U]; original variable unsigned int rz_rear[6U]; }; */ if (!sframe_inserted){ gimple ensure_fn_call = gimple_build_call (lbc_ensure_sframe_bitmap_fndecl, 0); gimple_set_location (ensure_fn_call, location); gsi_insert_before (&initially_stmts, ensure_fn_call, GSI_SAME_STMT); sframe_inserted = true; } // Calculate the zone sizes size_t element_size = 0, request_size = 0; if (COMPLETE_TYPE_P(decl)){ request_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(decl))); if (TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) element_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(TREE_TYPE(decl)))); else element_size = request_size; } calculate_zone_sizes(element_size, request_size, /*global*/ false, COMPLETE_TYPE_P(decl), &front_rz_size, &rear_rz_size); DEBUGLOG("DEBUG *SIZES* req_size %u, ele_size %u, fsize %u, rsize %u\n", request_size, element_size, front_rz_size, rear_rz_size); tree struct_type = create_struct_type(decl, front_rz_size, rear_rz_size); tree struct_var = create_struct_var(struct_type, decl, location); declare_vars(struct_var, stmt, 0); /* Inserting into hashtable */ PWord_t PV; JSLI(PV, decl_map, mf_varname_tree(decl)); gcc_assert(PV); *PV = (PWord_t) struct_var; fsize = convert (unsigned_type_node, size_int(front_rz_size)); gcc_assert (is_gimple_val (fsize)); tree rz_front = TYPE_FIELDS(struct_type); fncall_param_front = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_front), struct_var, rz_front, NULL_TREE))); uninit_fncall_front = gimple_build_call (lbc_uninit_front_rz_fndecl, 2, fncall_param_front, fsize); init_fncall_front = gimple_build_call (lbc_init_front_rz_fndecl, 2, fncall_param_front, fsize); gimple_set_location (init_fncall_front, location); gimple_set_location (uninit_fncall_front, location); // In complete types have only a front red zone if (COMPLETE_TYPE_P(decl)){ rsize = convert (unsigned_type_node, size_int(rear_rz_size)); gcc_assert (is_gimple_val (rsize)); tree rz_rear = DECL_CHAIN(DECL_CHAIN(TYPE_FIELDS (struct_type))); fncall_param_rear = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_rear), struct_var, rz_rear, NULL_TREE))); init_fncall_rear = gimple_build_call (lbc_init_rear_rz_fndecl, 2, fncall_param_rear, rsize); uninit_fncall_rear = gimple_build_call (lbc_uninit_rear_rz_fndecl, 2, fncall_param_rear, rsize); gimple_set_location (init_fncall_rear, location); gimple_set_location (uninit_fncall_rear, location); } // TODO Do I need this? #if 0 if (DECL_INITIAL(decl) != NULL_TREE){ // This code never seems to be getting executed for somehting like int i = 10; // I have no idea why? But looking at the tree dump, seems like its because // by the time it gets here, these kind of statements are split into two statements // as int i; and i = 10; respectively. I am leaving it in just in case. tree orig_var_type = DECL_CHAIN(TYPE_FIELDS (struct_type)); tree orig_var_lval = mf_mark (build3 (COMPONENT_REF, TREE_TYPE(orig_var_type), struct_var, orig_var_type, NULL_TREE)); init_assign_stmt = gimple_build_assign(orig_var_lval, DECL_INITIAL(decl)); gimple_set_location (init_assign_stmt, location); } #endif if (gsi_end_p (initially_stmts)) { // TODO handle this if (!DECL_ARTIFICIAL (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { #if 0 // Insert the declaration initializer if (DECL_INITIAL(decl) != NULL_TREE) gsi_insert_before (&initially_stmts, init_assign_stmt, GSI_SAME_STMT); #endif //gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); gsi_insert_before (&initially_stmts, init_fncall_front, GSI_SAME_STMT); if (COMPLETE_TYPE_P(decl)) gsi_insert_before (&initially_stmts, init_fncall_rear, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ //gimple_seq_add_stmt (&finally_stmts, unregister_fncall); gimple_seq_add_stmt (&finally_stmts, uninit_fncall_front); if (COMPLETE_TYPE_P(decl)) gimple_seq_add_stmt (&finally_stmts, uninit_fncall_rear); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = gimple_seq_alloc (); gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
static tree forward_propagate_into_cond_1 (tree cond, tree *test_var_p) { tree new_cond = NULL_TREE; enum tree_code cond_code = TREE_CODE (cond); tree test_var = NULL_TREE; tree def; tree def_rhs; /* If the condition is not a lone variable or an equality test of an SSA_NAME against an integral constant, then we do not have an optimizable case. Note these conditions also ensure the COND_EXPR has no virtual operands or other side effects. */ if (cond_code != SSA_NAME && !((cond_code == EQ_EXPR || cond_code == NE_EXPR) && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1)) && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1))))) return NULL_TREE; /* Extract the single variable used in the test into TEST_VAR. */ if (cond_code == SSA_NAME) test_var = cond; else test_var = TREE_OPERAND (cond, 0); /* Now get the defining statement for TEST_VAR. Skip this case if it's not defined by some MODIFY_EXPR. */ def = SSA_NAME_DEF_STMT (test_var); if (TREE_CODE (def) != MODIFY_EXPR) return NULL_TREE; def_rhs = TREE_OPERAND (def, 1); /* If TEST_VAR is set by adding or subtracting a constant from an SSA_NAME, then it is interesting to us as we can adjust the constant in the conditional and thus eliminate the arithmetic operation. */ if (TREE_CODE (def_rhs) == PLUS_EXPR || TREE_CODE (def_rhs) == MINUS_EXPR) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); /* The first operand must be an SSA_NAME and the second operand must be a constant. */ if (TREE_CODE (op0) != SSA_NAME || !CONSTANT_CLASS_P (op1) || !INTEGRAL_TYPE_P (TREE_TYPE (op1))) return NULL_TREE; /* Don't propagate if the first operand occurs in an abnormal PHI. */ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0)) return NULL_TREE; if (has_single_use (test_var)) { enum tree_code new_code; tree t; /* If the variable was defined via X + C, then we must subtract C from the constant in the conditional. Otherwise we add C to the constant in the conditional. The result must fold into a valid gimple operand to be optimizable. */ new_code = (TREE_CODE (def_rhs) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR); t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0); if (!is_gimple_val (t)) return NULL_TREE; new_cond = build (cond_code, boolean_type_node, op0, t); } } /* These cases require comparisons of a naked SSA_NAME or comparison of an SSA_NAME against zero or one. */ else if (TREE_CODE (cond) == SSA_NAME || integer_zerop (TREE_OPERAND (cond, 1)) || integer_onep (TREE_OPERAND (cond, 1))) { /* If TEST_VAR is set from a relational operation between two SSA_NAMEs or a combination of an SSA_NAME and a constant, then it is interesting. */ if (COMPARISON_CLASS_P (def_rhs)) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); /* Both operands of DEF_RHS must be SSA_NAMEs or constants. */ if ((TREE_CODE (op0) != SSA_NAME && !is_gimple_min_invariant (op0)) || (TREE_CODE (op1) != SSA_NAME && !is_gimple_min_invariant (op1))) return NULL_TREE; /* Don't propagate if the first operand occurs in an abnormal PHI. */ if (TREE_CODE (op0) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0)) return NULL_TREE; /* Don't propagate if the second operand occurs in an abnormal PHI. */ if (TREE_CODE (op1) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1)) return NULL_TREE; if (has_single_use (test_var)) { /* TEST_VAR was set from a relational operator. */ new_cond = build (TREE_CODE (def_rhs), boolean_type_node, op0, op1); /* Invert the conditional if necessary. */ if ((cond_code == EQ_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == NE_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) { new_cond = invert_truthvalue (new_cond); /* If we did not get a simple relational expression or bare SSA_NAME, then we can not optimize this case. */ if (!COMPARISON_CLASS_P (new_cond) && TREE_CODE (new_cond) != SSA_NAME) new_cond = NULL_TREE; } } } /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it is interesting. */ else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR) { enum tree_code new_code; def_rhs = TREE_OPERAND (def_rhs, 0); /* DEF_RHS must be an SSA_NAME or constant. */ if (TREE_CODE (def_rhs) != SSA_NAME && !is_gimple_min_invariant (def_rhs)) return NULL_TREE; /* Don't propagate if the operand occurs in an abnormal PHI. */ if (TREE_CODE (def_rhs) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs)) return NULL_TREE; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = EQ_EXPR; else new_code = NE_EXPR; new_cond = build2 (new_code, boolean_type_node, def_rhs, fold_convert (TREE_TYPE (def_rhs), integer_zero_node)); } /* If TEST_VAR was set from a cast of an integer type to a boolean type or a cast of a boolean to an integral, then it is interesting. */ else if (TREE_CODE (def_rhs) == NOP_EXPR || TREE_CODE (def_rhs) == CONVERT_EXPR) { tree outer_type; tree inner_type; outer_type = TREE_TYPE (def_rhs); inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0)); if ((TREE_CODE (outer_type) == BOOLEAN_TYPE && INTEGRAL_TYPE_P (inner_type)) || (TREE_CODE (inner_type) == BOOLEAN_TYPE && INTEGRAL_TYPE_P (outer_type))) ; else if (INTEGRAL_TYPE_P (outer_type) && INTEGRAL_TYPE_P (inner_type) && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs, 0))) ; else return NULL_TREE; /* Don't propagate if the operand occurs in an abnormal PHI. */ if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (def_rhs, 0))) return NULL_TREE; if (has_single_use (test_var)) { enum tree_code new_code; tree new_arg; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = NE_EXPR; else new_code = EQ_EXPR; new_arg = TREE_OPERAND (def_rhs, 0); new_cond = build2 (new_code, boolean_type_node, new_arg, fold_convert (TREE_TYPE (new_arg), integer_zero_node)); } } } *test_var_p = test_var; return new_cond; }
static void substitute_single_use_vars (varray_type *cond_worklist, varray_type vars_worklist) { while (VARRAY_ACTIVE_SIZE (vars_worklist) > 0) { tree test_var = VARRAY_TOP_TREE (vars_worklist); tree def = SSA_NAME_DEF_STMT (test_var); dataflow_t df; int j, num_uses, propagated_uses; VARRAY_POP (vars_worklist); /* Now compute the immediate uses of TEST_VAR. */ df = get_immediate_uses (def); num_uses = num_immediate_uses (df); propagated_uses = 0; /* If TEST_VAR is used more than once and is not a boolean set via TRUTH_NOT_EXPR with another SSA_NAME as its argument, then we can not optimize. */ if (num_uses == 1 || (TREE_CODE (TREE_TYPE (test_var)) == BOOLEAN_TYPE && TREE_CODE (TREE_OPERAND (def, 1)) == TRUTH_NOT_EXPR && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (def, 1), 0)) == SSA_NAME))) ; else continue; /* Walk over each use and try to forward propagate the RHS of DEF into the use. */ for (j = 0; j < num_uses; j++) { tree cond_stmt; tree cond; enum tree_code cond_code; tree def_rhs; enum tree_code def_rhs_code; tree new_cond; cond_stmt = immediate_use (df, j); /* For now we can only propagate into COND_EXPRs. */ if (TREE_CODE (cond_stmt) != COND_EXPR) continue; cond = COND_EXPR_COND (cond_stmt); cond_code = TREE_CODE (cond); def_rhs = TREE_OPERAND (def, 1); def_rhs_code = TREE_CODE (def_rhs); /* If the definition of the single use variable was from an arithmetic operation, then we just need to adjust the constant in the COND_EXPR_COND and update the variable tested. */ if (def_rhs_code == PLUS_EXPR || def_rhs_code == MINUS_EXPR) { tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); enum tree_code new_code; tree t; /* If the variable was defined via X + C, then we must subtract C from the constant in the conditional. Otherwise we add C to the constant in the conditional. The result must fold into a valid gimple operand to be optimizable. */ new_code = def_rhs_code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR; t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0); if (!is_gimple_val (t)) continue; new_cond = build (cond_code, boolean_type_node, op0, t); } /* If the variable is defined by a conditional expression... */ else if (TREE_CODE_CLASS (def_rhs_code) == tcc_comparison) { /* TEST_VAR was set from a relational operator. */ tree op0 = TREE_OPERAND (def_rhs, 0); tree op1 = TREE_OPERAND (def_rhs, 1); new_cond = build (def_rhs_code, boolean_type_node, op0, op1); /* Invert the conditional if necessary. */ if ((cond_code == EQ_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == NE_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) { new_cond = invert_truthvalue (new_cond); /* If we did not get a simple relational expression or bare SSA_NAME, then we can not optimize this case. */ if (!COMPARISON_CLASS_P (new_cond) && TREE_CODE (new_cond) != SSA_NAME) continue; } } else { bool invert = false; enum tree_code new_code; tree new_arg; /* TEST_VAR was set from a TRUTH_NOT_EXPR or a NOP_EXPR. */ if (def_rhs_code == TRUTH_NOT_EXPR) invert = true; /* If we don't have <NE_EXPR/EQ_EXPR x INT_CST>, then we cannot optimize this case. */ if ((cond_code == NE_EXPR || cond_code == EQ_EXPR) && TREE_CODE (TREE_OPERAND (cond, 1)) != INTEGER_CST) continue; if (cond_code == SSA_NAME || (cond_code == NE_EXPR && integer_zerop (TREE_OPERAND (cond, 1))) || (cond_code == EQ_EXPR && integer_onep (TREE_OPERAND (cond, 1)))) new_code = NE_EXPR; else new_code = EQ_EXPR; if (invert) new_code = (new_code == EQ_EXPR ? NE_EXPR : EQ_EXPR); new_arg = TREE_OPERAND (def_rhs, 0); new_cond = build2 (new_code, boolean_type_node, new_arg, fold_convert (TREE_TYPE (new_arg), integer_zero_node)); } /* Dump details. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, " Replaced '"); print_generic_expr (dump_file, cond, dump_flags); fprintf (dump_file, "' with '"); print_generic_expr (dump_file, new_cond, dump_flags); fprintf (dump_file, "'\n"); } /* Replace the condition. */ COND_EXPR_COND (cond_stmt) = new_cond; modify_stmt (cond_stmt); propagated_uses++; VARRAY_PUSH_TREE (*cond_worklist, cond_stmt); } /* If we propagated into all the uses, then we can delete DEF. Unfortunately, we have to find the defining statement in whatever block it might be in. */ if (num_uses && num_uses == propagated_uses) { block_stmt_iterator bsi = bsi_for_stmt (def); bsi_remove (&bsi); } } }