static unsigned int execute_return_slot_opt (void) { basic_block bb; FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator gsi; for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); bool slot_opt_p; if (is_gimple_call (stmt) && gimple_call_lhs (stmt) && !gimple_call_return_slot_opt_p (stmt) && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)), gimple_call_fndecl (stmt))) { /* Check if the location being assigned to is clobbered by the call. */ slot_opt_p = dest_safe_for_nrv_p (stmt); gimple_call_set_return_slot_opt (stmt, slot_opt_p); } } }
static unsigned int execute_return_slot_opt (void) { basic_block bb; FOR_EACH_BB (bb) { block_stmt_iterator i; for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) { tree stmt = bsi_stmt (i); tree call; if (TREE_CODE (stmt) == MODIFY_EXPR && (call = TREE_OPERAND (stmt, 1), TREE_CODE (call) == CALL_EXPR) && !CALL_EXPR_RETURN_SLOT_OPT (call) && aggregate_value_p (call, call)) /* Check if the location being assigned to is call-clobbered. */ CALL_EXPR_RETURN_SLOT_OPT (call) = dest_safe_for_nrv_p (TREE_OPERAND (stmt, 0)) ? 1 : 0; } } return 0; }
int avm2_return_pops_args (tree fundecl, tree funtype, int size) { int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE); /* Cdecl functions override -mrtd, and never pop the stack. */ if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) { /* Stdcall and fastcall functions will pop the stack if not variable args. */ if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)) || lookup_attribute ("fastcall", TYPE_ATTRIBUTES (funtype))) rtd = 1; if (rtd && (TYPE_ARG_TYPES (funtype) == NULL_TREE || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node))) return size; } /* Lose any fake structure return argument if it is passed on the stack. */ if (aggregate_value_p (TREE_TYPE (funtype), fundecl) && !KEEP_AGGREGATE_RETURN_POINTER) { int nregs = avm2_function_regparm (funtype, fundecl); if (!nregs) return GET_MODE_SIZE (Pmode); } return 0; }
bool is_gimple_mem_rhs (tree t) { /* If we're dealing with a renamable type, either source or dest must be a renamed variable. Also force a temporary if the type doesn't need to be stored in memory, since it's cheap and prevents erroneous tailcalls (PR 17526). */ if (is_gimple_reg_type (TREE_TYPE (t)) || (TYPE_MODE (TREE_TYPE (t)) != BLKmode && (TREE_CODE (t) != CALL_EXPR || ! aggregate_value_p (t, t)))) return is_gimple_val (t); else return is_gimple_formal_tmp_rhs (t); }
static void execute_return_slot_opt (void) { basic_block bb; FOR_EACH_BB (bb) { block_stmt_iterator i; for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) { tree stmt = bsi_stmt (i); tree call; if (TREE_CODE (stmt) == MODIFY_EXPR && (call = TREE_OPERAND (stmt, 1), TREE_CODE (call) == CALL_EXPR) && !CALL_EXPR_RETURN_SLOT_OPT (call) && aggregate_value_p (call, call)) { def_operand_p def_p; ssa_op_iter op_iter; /* We determine whether or not the LHS address escapes by asking whether it is call clobbered. When the LHS isn't a simple decl, we need to check the VDEFs, so it's simplest to just loop through all the DEFs. */ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_ALL_DEFS) { tree def = DEF_FROM_PTR (def_p); if (TREE_CODE (def) == SSA_NAME) def = SSA_NAME_VAR (def); if (is_call_clobbered (def)) goto unsafe; } /* No defs are call clobbered, so the optimization is safe. */ CALL_EXPR_RETURN_SLOT_OPT (call) = 1; /* This is too late to mark the target addressable like we do in gimplify_modify_expr_rhs, but that's OK; anything that wasn't already addressable was handled there. */ unsafe:; } }
static void tree_nrv (void) { tree result = DECL_RESULT (current_function_decl); tree result_type = TREE_TYPE (result); tree found = NULL; basic_block bb; block_stmt_iterator bsi; struct nrv_data data; /* If this function does not return an aggregate type in memory, then there is nothing to do. */ if (!aggregate_value_p (result, current_function_decl)) return; /* Look through each block for assignments to the RESULT_DECL. */ FOR_EACH_BB (bb) { for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) { tree stmt = bsi_stmt (bsi); tree ret_expr; if (TREE_CODE (stmt) == RETURN_EXPR) { /* In a function with an aggregate return value, the gimplifier has changed all non-empty RETURN_EXPRs to return the RESULT_DECL. */ ret_expr = TREE_OPERAND (stmt, 0); if (ret_expr) gcc_assert (ret_expr == result); } else if (TREE_CODE (stmt) == MODIFY_EXPR && TREE_OPERAND (stmt, 0) == result) { ret_expr = TREE_OPERAND (stmt, 1); /* Now verify that this return statement uses the same value as any previously encountered return statement. */ if (found != NULL) { /* If we found a return statement using a different variable than previous return statements, then we can not perform NRV optimizations. */ if (found != ret_expr) return; } else found = ret_expr; /* The returned value must be a local automatic variable of the same type and alignment as the function's result. */ if (TREE_CODE (found) != VAR_DECL || TREE_THIS_VOLATILE (found) || DECL_CONTEXT (found) != current_function_decl || TREE_STATIC (found) || TREE_ADDRESSABLE (found) || DECL_ALIGN (found) > DECL_ALIGN (result) || !lang_hooks.types_compatible_p (TREE_TYPE (found), result_type)) return; } } } if (!found) return; /* If dumping details, then note once and only the NRV replacement. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "NRV Replaced: "); print_generic_expr (dump_file, found, dump_flags); fprintf (dump_file, " with: "); print_generic_expr (dump_file, result, dump_flags); fprintf (dump_file, "\n"); } /* At this point we know that all the return statements return the same local which has suitable attributes for NRV. Copy debugging information from FOUND to RESULT. */ DECL_NAME (result) = DECL_NAME (found); DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found); DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found); TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found); /* Now walk through the function changing all references to VAR to be RESULT. */ data.var = found; data.result = result; FOR_EACH_BB (bb) { for (bsi = bsi_start (bb); !bsi_end_p (bsi); ) { tree *tp = bsi_stmt_ptr (bsi); /* If this is a copy from VAR to RESULT, remove it. */ if (TREE_CODE (*tp) == MODIFY_EXPR && TREE_OPERAND (*tp, 0) == result && TREE_OPERAND (*tp, 1) == found) bsi_remove (&bsi); else { walk_tree (tp, finalize_nrv_r, &data, 0); bsi_next (&bsi); } } } /* FOUND is no longer used. Ensure it gets removed. */ var_ann (found)->used = 0; }
void gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p) { size_t i, num_ops; tree lhs; gimple_seq pre = NULL; gimple post_stmt = NULL; push_gimplify_context (gimple_in_ssa_p (cfun)); switch (gimple_code (stmt)) { case GIMPLE_COND: gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL, is_gimple_val, fb_rvalue); gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL, is_gimple_val, fb_rvalue); break; case GIMPLE_SWITCH: gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL, is_gimple_val, fb_rvalue); break; case GIMPLE_OMP_ATOMIC_LOAD: gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL, is_gimple_val, fb_rvalue); break; case GIMPLE_ASM: { size_t i, noutputs = gimple_asm_noutputs (stmt); const char *constraint, **oconstraints; bool allows_mem, allows_reg, is_inout; oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); for (i = 0; i < noutputs; i++) { tree op = gimple_asm_output_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); oconstraints[i] = constraint; parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, &is_inout); gimplify_expr (&TREE_VALUE (op), &pre, NULL, is_inout ? is_gimple_min_lval : is_gimple_lvalue, fb_lvalue | fb_mayfail); } for (i = 0; i < gimple_asm_ninputs (stmt); i++) { tree op = gimple_asm_input_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, &allows_mem, &allows_reg); if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem) allows_reg = 0; if (!allows_reg && allows_mem) gimplify_expr (&TREE_VALUE (op), &pre, NULL, is_gimple_lvalue, fb_lvalue | fb_mayfail); else gimplify_expr (&TREE_VALUE (op), &pre, NULL, is_gimple_asm_val, fb_rvalue); } } break; default: /* NOTE: We start gimplifying operands from last to first to make sure that side-effects on the RHS of calls, assignments and ASMs are executed before the LHS. The ordering is not important for other statements. */ num_ops = gimple_num_ops (stmt); for (i = num_ops; i > 0; i--) { tree op = gimple_op (stmt, i - 1); if (op == NULL_TREE) continue; if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt))) gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue); else if (i == 2 && is_gimple_assign (stmt) && num_ops == 2 && get_gimple_rhs_class (gimple_expr_code (stmt)) == GIMPLE_SINGLE_RHS) gimplify_expr (&op, &pre, NULL, rhs_predicate_for (gimple_assign_lhs (stmt)), fb_rvalue); else if (i == 2 && is_gimple_call (stmt)) { if (TREE_CODE (op) == FUNCTION_DECL) continue; gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue); } else gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue); gimple_set_op (stmt, i - 1, op); } lhs = gimple_get_lhs (stmt); /* If the LHS changed it in a way that requires a simple RHS, create temporary. */ if (lhs && !is_gimple_reg (lhs)) { bool need_temp = false; if (is_gimple_assign (stmt) && num_ops == 2 && get_gimple_rhs_class (gimple_expr_code (stmt)) == GIMPLE_SINGLE_RHS) gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL, rhs_predicate_for (gimple_assign_lhs (stmt)), fb_rvalue); else if (is_gimple_reg (lhs)) { if (is_gimple_reg_type (TREE_TYPE (lhs))) { if (is_gimple_call (stmt)) { i = gimple_call_flags (stmt); if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE))) need_temp = true; } if (stmt_can_throw_internal (stmt)) need_temp = true; } } else { if (is_gimple_reg_type (TREE_TYPE (lhs))) need_temp = true; else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode) { if (is_gimple_call (stmt)) { tree fndecl = gimple_call_fndecl (stmt); if (!aggregate_value_p (TREE_TYPE (lhs), fndecl) && !(fndecl && DECL_RESULT (fndecl) && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))) need_temp = true; } else need_temp = true; } } if (need_temp) { tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL); if (gimple_in_ssa_p (cfun)) temp = make_ssa_name (temp, NULL); gimple_set_lhs (stmt, temp); post_stmt = gimple_build_assign (lhs, temp); } } break; } if (!gimple_seq_empty_p (pre)) gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT); if (post_stmt) gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT); pop_gimplify_context (NULL); }
static unsigned int tree_nrv (void) { tree result = DECL_RESULT (current_function_decl); tree result_type = TREE_TYPE (result); tree found = NULL; basic_block bb; gimple_stmt_iterator gsi; struct nrv_data data; /* If this function does not return an aggregate type in memory, then there is nothing to do. */ if (!aggregate_value_p (result, current_function_decl)) return 0; /* If a GIMPLE type is returned in memory, finalize_nrv_r might create non-GIMPLE. */ if (is_gimple_reg_type (result_type)) return 0; /* Look through each block for assignments to the RESULT_DECL. */ FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree ret_val; if (gimple_code (stmt) == GIMPLE_RETURN) { /* In a function with an aggregate return value, the gimplifier has changed all non-empty RETURN_EXPRs to return the RESULT_DECL. */ ret_val = gimple_return_retval (stmt); if (ret_val) gcc_assert (ret_val == result); } else if (is_gimple_assign (stmt) && gimple_assign_lhs (stmt) == result) { tree rhs; if (!gimple_assign_copy_p (stmt)) return 0; rhs = gimple_assign_rhs1 (stmt); /* Now verify that this return statement uses the same value as any previously encountered return statement. */ if (found != NULL) { /* If we found a return statement using a different variable than previous return statements, then we can not perform NRV optimizations. */ if (found != rhs) return 0; } else found = rhs; /* The returned value must be a local automatic variable of the same type and alignment as the function's result. */ if (TREE_CODE (found) != VAR_DECL || TREE_THIS_VOLATILE (found) || DECL_CONTEXT (found) != current_function_decl || TREE_STATIC (found) || TREE_ADDRESSABLE (found) || DECL_ALIGN (found) > DECL_ALIGN (result) || !useless_type_conversion_p (result_type, TREE_TYPE (found))) return 0; } else if (is_gimple_assign (stmt)) { tree addr = get_base_address (gimple_assign_lhs (stmt)); /* If there's any MODIFY of component of RESULT, then bail out. */ if (addr && addr == result) return 0; } } } if (!found) return 0; /* If dumping details, then note once and only the NRV replacement. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "NRV Replaced: "); print_generic_expr (dump_file, found, dump_flags); fprintf (dump_file, " with: "); print_generic_expr (dump_file, result, dump_flags); fprintf (dump_file, "\n"); } /* At this point we know that all the return statements return the same local which has suitable attributes for NRV. Copy debugging information from FOUND to RESULT. */ DECL_NAME (result) = DECL_NAME (found); DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found); DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found); TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found); /* Now walk through the function changing all references to VAR to be RESULT. */ data.var = found; data.result = result; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) { gimple stmt = gsi_stmt (gsi); /* If this is a copy from VAR to RESULT, remove it. */ if (gimple_assign_copy_p (stmt) && gimple_assign_lhs (stmt) == result && gimple_assign_rhs1 (stmt) == found) gsi_remove (&gsi, true); else { struct walk_stmt_info wi; memset (&wi, 0, sizeof (wi)); wi.info = &data; walk_gimple_op (stmt, finalize_nrv_r, &wi); gsi_next (&gsi); } } } /* FOUND is no longer used. Ensure it gets removed. */ var_ann (found)->used = 0; return 0; }