static bool dest_safe_for_nrv_p (tree dest) { switch (TREE_CODE (dest)) { case VAR_DECL: { subvar_t subvar; if (is_call_clobbered (dest)) return false; for (subvar = get_subvars_for_var (dest); subvar; subvar = subvar->next) if (is_call_clobbered (subvar->var)) return false; return true; } case ARRAY_REF: case COMPONENT_REF: return dest_safe_for_nrv_p (TREE_OPERAND (dest, 0)); default: return false; } }
static void execute_return_slot_opt (void) { basic_block bb; FOR_EACH_BB (bb) { block_stmt_iterator i; for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) { tree stmt = bsi_stmt (i); tree call; if (TREE_CODE (stmt) == MODIFY_EXPR && (call = TREE_OPERAND (stmt, 1), TREE_CODE (call) == CALL_EXPR) && !CALL_EXPR_RETURN_SLOT_OPT (call) && aggregate_value_p (call, call)) { def_operand_p def_p; ssa_op_iter op_iter; /* We determine whether or not the LHS address escapes by asking whether it is call clobbered. When the LHS isn't a simple decl, we need to check the VDEFs, so it's simplest to just loop through all the DEFs. */ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_ALL_DEFS) { tree def = DEF_FROM_PTR (def_p); if (TREE_CODE (def) == SSA_NAME) def = SSA_NAME_VAR (def); if (is_call_clobbered (def)) goto unsafe; } /* No defs are call clobbered, so the optimization is safe. */ CALL_EXPR_RETURN_SLOT_OPT (call) = 1; /* This is too late to mark the target addressable like we do in gimplify_modify_expr_rhs, but that's OK; anything that wasn't already addressable was handled there. */ unsafe:; } }