Exemple #1
0
static unsigned int
execute_return_slot_opt (void)
{
  basic_block bb;

  FOR_EACH_BB (bb)
    {
      block_stmt_iterator i;
      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
        {
          tree stmt = bsi_stmt (i);
          tree call;

          if (TREE_CODE (stmt) == MODIFY_EXPR
              && (call = TREE_OPERAND (stmt, 1),
                  TREE_CODE (call) == CALL_EXPR)
              && !CALL_EXPR_RETURN_SLOT_OPT (call)
              && aggregate_value_p (call, call))
            /* Check if the location being assigned to is
               call-clobbered.  */
            CALL_EXPR_RETURN_SLOT_OPT (call) =
              dest_safe_for_nrv_p (TREE_OPERAND (stmt, 0)) ? 1 : 0;
        }
    }
  return 0;
}
Exemple #2
0
static void
execute_return_slot_opt (void)
{
  basic_block bb;

  FOR_EACH_BB (bb)
    {
      block_stmt_iterator i;
      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
	{
	  tree stmt = bsi_stmt (i);
	  tree call;

	  if (TREE_CODE (stmt) == MODIFY_EXPR
	      && (call = TREE_OPERAND (stmt, 1),
		  TREE_CODE (call) == CALL_EXPR)
	      && !CALL_EXPR_RETURN_SLOT_OPT (call)
	      && aggregate_value_p (call, call))
	    {
	      def_operand_p def_p;
	      ssa_op_iter op_iter;

	      /* We determine whether or not the LHS address escapes by
		 asking whether it is call clobbered.  When the LHS isn't a
		 simple decl, we need to check the VDEFs, so it's simplest
		 to just loop through all the DEFs.  */
	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_ALL_DEFS)
		{
		  tree def = DEF_FROM_PTR (def_p);
		  if (TREE_CODE (def) == SSA_NAME)
		    def = SSA_NAME_VAR (def);
		  if (is_call_clobbered (def))
		    goto unsafe;
		}

	      /* No defs are call clobbered, so the optimization is safe.  */
	      CALL_EXPR_RETURN_SLOT_OPT (call) = 1;
	      /* This is too late to mark the target addressable like we do
		 in gimplify_modify_expr_rhs, but that's OK; anything that
		 wasn't already addressable was handled there.  */

	      unsafe:;
	    }
	}
Exemple #3
0
int
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
  int saved_stmts_are_full_exprs_p = 0;
  enum tree_code code = TREE_CODE (*expr_p);
  enum gimplify_status ret;

  if (STATEMENT_CODE_P (code))
    {
      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
      current_stmt_tree ()->stmts_are_full_exprs_p
	= STMT_IS_FULL_EXPR_P (*expr_p);
    }

  switch (code)
    {
    case PTRMEM_CST:
      *expr_p = cplus_expand_constant (*expr_p);
      ret = GS_OK;
      break;

    case AGGR_INIT_EXPR:
      simplify_aggr_init_expr (expr_p);
      ret = GS_OK;
      break;

    case VEC_INIT_EXPR:
      {
	location_t loc = input_location;
	tree init = VEC_INIT_EXPR_INIT (*expr_p);
	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
	input_location = EXPR_LOCATION (*expr_p);
	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
				  from_array,
				  tf_warning_or_error);
	cp_genericize_tree (expr_p);
	ret = GS_OK;
	input_location = loc;
      }
      break;

    case THROW_EXPR:
      /* FIXME communicate throw type to back end, probably by moving
	 THROW_EXPR into ../tree.def.  */
      *expr_p = TREE_OPERAND (*expr_p, 0);
      ret = GS_OK;
      break;

    case MUST_NOT_THROW_EXPR:
      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
      break;

      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
	 LHS of an assignment might also be involved in the RHS, as in bug
	 25979.  */
    case INIT_EXPR:
      if (fn_contains_cilk_spawn_p (cfun)
	  && cilk_detect_spawn_and_unwrap (expr_p)
	  && !seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
      cp_gimplify_init_expr (expr_p);
      if (TREE_CODE (*expr_p) != INIT_EXPR)
	return GS_OK;
      /* Otherwise fall through.  */
    case MODIFY_EXPR:
      {
	if (fn_contains_cilk_spawn_p (cfun)
	    && cilk_detect_spawn_and_unwrap (expr_p)
	    && !seen_error ())
	  return (enum gimplify_status) gimplify_cilk_spawn (expr_p);

	/* If the back end isn't clever enough to know that the lhs and rhs
	   types are the same, add an explicit conversion.  */
	tree op0 = TREE_OPERAND (*expr_p, 0);
	tree op1 = TREE_OPERAND (*expr_p, 1);

	if (!error_operand_p (op0)
	    && !error_operand_p (op1)
	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
					      TREE_TYPE (op0), op1);

	else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
		  || (TREE_CODE (op1) == CONSTRUCTOR
		      && CONSTRUCTOR_NELTS (op1) == 0
		      && !TREE_CLOBBER_P (op1))
		  || (TREE_CODE (op1) == CALL_EXPR
		      && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
		 && is_really_empty_class (TREE_TYPE (op0)))
	  {
	    /* Remove any copies of empty classes.  We check that the RHS
	       has a simple form so that TARGET_EXPRs and non-empty
	       CONSTRUCTORs get reduced properly, and we leave the return
	       slot optimization alone because it isn't a copy (FIXME so it
	       shouldn't be represented as one).

	       Also drop volatile variables on the RHS to avoid infinite
	       recursion from gimplify_expr trying to load the value.  */
	    if (!TREE_SIDE_EFFECTS (op1)
		|| (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
	      *expr_p = op0;
	    else if (TREE_CODE (op1) == MEM_REF
		     && TREE_THIS_VOLATILE (op1))
	      {
		/* Similarly for volatile MEM_REFs on the RHS.  */
		if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
		  *expr_p = op0;
		else
		  *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				    TREE_OPERAND (op1, 0), op0);
	      }
	    else
	      *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				op0, op1);
	  }
      }
      ret = GS_OK;
      break;

    case EMPTY_CLASS_EXPR:
      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
      ret = GS_OK;
      break;

    case BASELINK:
      *expr_p = BASELINK_FUNCTIONS (*expr_p);
      ret = GS_OK;
      break;

    case TRY_BLOCK:
      genericize_try_block (expr_p);
      ret = GS_OK;
      break;

    case HANDLER:
      genericize_catch_block (expr_p);
      ret = GS_OK;
      break;

    case EH_SPEC_BLOCK:
      genericize_eh_spec_block (expr_p);
      ret = GS_OK;
      break;

    case USING_STMT:
      gcc_unreachable ();

    case FOR_STMT:
    case WHILE_STMT:
    case DO_STMT:
    case SWITCH_STMT:
    case CONTINUE_STMT:
    case BREAK_STMT:
      gcc_unreachable ();

    case OMP_FOR:
    case OMP_SIMD:
    case OMP_DISTRIBUTE:
      ret = cp_gimplify_omp_for (expr_p, pre_p);
      break;

    case EXPR_STMT:
      gimplify_expr_stmt (expr_p);
      ret = GS_OK;
      break;

    case UNARY_PLUS_EXPR:
      {
	tree arg = TREE_OPERAND (*expr_p, 0);
	tree type = TREE_TYPE (*expr_p);
	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
					    : arg;
	ret = GS_OK;
      }
      break;

    case CILK_SPAWN_STMT:
      gcc_assert 
	(fn_contains_cilk_spawn_p (cfun) 
	 && cilk_detect_spawn_and_unwrap (expr_p));

      /* If errors are seen, then just process it as a CALL_EXPR.  */
      if (!seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
      
    case CALL_EXPR:
      if (fn_contains_cilk_spawn_p (cfun)
	  && cilk_detect_spawn_and_unwrap (expr_p)
	  && !seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);

    default:
      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
      break;
    }

  /* Restore saved state.  */
  if (STATEMENT_CODE_P (code))
    current_stmt_tree ()->stmts_are_full_exprs_p
      = saved_stmts_are_full_exprs_p;

  return ret;
}