Beispiel #1
0
tree
target_for_debug_bind (tree var)
{
  if (!MAY_HAVE_DEBUG_STMTS)
    return NULL_TREE;

  if (TREE_CODE (var) == SSA_NAME)
    {
      var = SSA_NAME_VAR (var);
      if (var == NULL_TREE)
	return NULL_TREE;
    }

  if ((TREE_CODE (var) != VAR_DECL
       || VAR_DECL_IS_VIRTUAL_OPERAND (var))
      && TREE_CODE (var) != PARM_DECL)
    return NULL_TREE;

  if (DECL_HAS_VALUE_EXPR_P (var))
    return target_for_debug_bind (DECL_VALUE_EXPR (var));

  if (DECL_IGNORED_P (var))
    return NULL_TREE;

  /* var-tracking only tracks registers.  */
  if (!is_gimple_reg_type (TREE_TYPE (var)))
    return NULL_TREE;

  return var;
}
Beispiel #2
0
tree
make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
{
  tree t;
  use_operand_p imm;

  gcc_assert (TREE_CODE (var) == VAR_DECL
	      || TREE_CODE (var) == PARM_DECL
	      || TREE_CODE (var) == RESULT_DECL
	      || (TYPE_P (var) && is_gimple_reg_type (var)));

  /* If our free list has an element, then use it.  */
  if (!vec_safe_is_empty (FREE_SSANAMES (fn)))
    {
      t = FREE_SSANAMES (fn)->pop ();
      if (GATHER_STATISTICS)
	ssa_name_nodes_reused++;

      /* The node was cleared out when we put it on the free list, so
	 there is no need to do so again here.  */
      gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL);
      (*SSANAMES (fn))[SSA_NAME_VERSION (t)] = t;
    }
  else
    {
      t = make_node (SSA_NAME);
      SSA_NAME_VERSION (t) = SSANAMES (fn)->length ();
      vec_safe_push (SSANAMES (fn), t);
      if (GATHER_STATISTICS)
	ssa_name_nodes_created++;
    }

  if (TYPE_P (var))
    {
      TREE_TYPE (t) = var;
      SET_SSA_NAME_VAR_OR_IDENTIFIER (t, NULL_TREE);
    }
  else
    {
      TREE_TYPE (t) = TREE_TYPE (var);
      SET_SSA_NAME_VAR_OR_IDENTIFIER (t, var);
    }
  SSA_NAME_DEF_STMT (t) = stmt;
  if (POINTER_TYPE_P (TREE_TYPE (t)))
    SSA_NAME_PTR_INFO (t) = NULL;
  else
    SSA_NAME_RANGE_INFO (t) = NULL;

  SSA_NAME_IN_FREE_LIST (t) = 0;
  SSA_NAME_IS_DEFAULT_DEF (t) = 0;
  imm = &(SSA_NAME_IMM_USE_NODE (t));
  imm->use = NULL;
  imm->prev = imm;
  imm->next = imm;
  imm->loc.ssa_name = t;

  return t;
}
Beispiel #3
0
bool
is_gimple_val (tree t)
{
  /* Make loads from volatiles and memory vars explicit.  */
  if (is_gimple_variable (t)
      && is_gimple_reg_type (TREE_TYPE (t))
      && !is_gimple_reg (t))
    return false;

  return (is_gimple_variable (t) || is_gimple_min_invariant (t));
}
Beispiel #4
0
bool
is_gimple_mem_rhs (tree t)
{
  /* If we're dealing with a renamable type, either source or dest must be
     a renamed variable.  Also force a temporary if the type doesn't need
     to be stored in memory, since it's cheap and prevents erroneous
     tailcalls (PR 17526).  */
  if (is_gimple_reg_type (TREE_TYPE (t))
      || TYPE_MODE (TREE_TYPE (t)) != BLKmode)
    return is_gimple_val (t);
  else
    return is_gimple_formal_tmp_rhs (t);
}
Beispiel #5
0
bool
is_gimple_reg (tree t)
{
  var_ann_t ann;

  if (TREE_CODE (t) == SSA_NAME)
    t = SSA_NAME_VAR (t);

  if (!is_gimple_variable (t))
    return false;

  if (!is_gimple_reg_type (TREE_TYPE (t)))
    return false;

  /* A volatile decl is not acceptable because we can't reuse it as
     needed.  We need to copy it into a temp first.  */
  if (TREE_THIS_VOLATILE (t))
    return false;

  /* We define "registers" as things that can be renamed as needed,
     which with our infrastructure does not apply to memory.  */
  if (needs_to_live_in_memory (t))
    return false;

  /* Hard register variables are an interesting case.  For those that
     are call-clobbered, we don't know where all the calls are, since
     we don't (want to) take into account which operations will turn
     into libcalls at the rtl level.  For those that are call-saved,
     we don't currently model the fact that calls may in fact change
     global hard registers, nor do we examine ASM_CLOBBERS at the tree
     level, and so miss variable changes that might imply.  All around,
     it seems safest to not do too much optimization with these at the
     tree level at all.  We'll have to rely on the rtl optimizers to
     clean this up, as there we've got all the appropriate bits exposed.  */
  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
    return false;

  /* Complex values must have been put into ssa form.  That is, no 
     assignments to the individual components.  */
  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
    return DECL_COMPLEX_GIMPLE_REG_P (t);

  /* Some compiler temporaries are created to be used exclusively in
     virtual operands (currently memory tags and sub-variables).
     These variables should never be considered GIMPLE registers.  */
  if (DECL_ARTIFICIAL (t) && (ann = var_ann (t)) != NULL)
    return ann->mem_tag_kind == NOT_A_TAG;

  return true;
}
Beispiel #6
0
bool
is_gimple_val (tree t)
{
  /* Make loads from volatiles and memory vars explicit.  */
  if (is_gimple_variable (t)
      && is_gimple_reg_type (TREE_TYPE (t))
      && !is_gimple_reg (t))
    return false;

  /* FIXME make these decls.  That can happen only when we expose the
     entire landing-pad construct at the tree level.  */
  if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
    return 1;

  return (is_gimple_variable (t) || is_gimple_min_invariant (t));
}
Beispiel #7
0
bool
is_gimple_reg (tree t)
{
  if (virtual_operand_p (t))
    return false;

  if (TREE_CODE (t) == SSA_NAME)
    return true;

  if (!is_gimple_variable (t))
    return false;

  if (!is_gimple_reg_type (TREE_TYPE (t)))
    return false;

  /* A volatile decl is not acceptable because we can't reuse it as
     needed.  We need to copy it into a temp first.  */
  if (TREE_THIS_VOLATILE (t))
    return false;

  /* We define "registers" as things that can be renamed as needed,
     which with our infrastructure does not apply to memory.  */
  if (needs_to_live_in_memory (t))
    return false;

  /* Hard register variables are an interesting case.  For those that
     are call-clobbered, we don't know where all the calls are, since
     we don't (want to) take into account which operations will turn
     into libcalls at the rtl level.  For those that are call-saved,
     we don't currently model the fact that calls may in fact change
     global hard registers, nor do we examine ASM_CLOBBERS at the tree
     level, and so miss variable changes that might imply.  All around,
     it seems safest to not do too much optimization with these at the
     tree level at all.  We'll have to rely on the rtl optimizers to
     clean this up, as there we've got all the appropriate bits exposed.  */
  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
    return false;

  /* Complex and vector values must have been put into SSA-like form.
     That is, no assignments to the individual components.  */
  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
      || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
    return DECL_GIMPLE_REG_P (t);

  return true;
}
Beispiel #8
0
bool
is_gimple_reg_rhs (tree t)
{
  /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
     and the LHS is a user variable, then we need to introduce a formal
     temporary.  This way the optimizers can determine that the user
     variable is only modified if evaluation of the RHS does not throw.

     Don't force a temp of a non-renamable type; the copy could be
     arbitrarily expensive.  Instead we will generate a V_MAY_DEF for
     the assignment.  */

  if (is_gimple_reg_type (TREE_TYPE (t))
      && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t))
	  || tree_could_throw_p (t)))
    return false;

  return is_gimple_formal_tmp_rhs (t);
}
Beispiel #9
0
static void
find_tail_calls (basic_block bb, struct tailcall **ret)
{
  tree ass_var = NULL_TREE, ret_var, func, param;
  gimple stmt, call = NULL;
  gimple_stmt_iterator gsi, agsi;
  bool tail_recursion;
  struct tailcall *nw;
  edge e;
  tree m, a;
  basic_block abb;
  size_t idx;
  tree var;

  if (!single_succ_p (bb))
    return;

  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
    {
      stmt = gsi_stmt (gsi);

      /* Ignore labels, returns, clobbers and debug stmts.  */
      if (gimple_code (stmt) == GIMPLE_LABEL
	  || gimple_code (stmt) == GIMPLE_RETURN
	  || gimple_clobber_p (stmt)
	  || is_gimple_debug (stmt))
	continue;

      /* Check for a call.  */
      if (is_gimple_call (stmt))
	{
	  call = stmt;
	  ass_var = gimple_call_lhs (stmt);
	  break;
	}

      /* If the statement references memory or volatile operands, fail.  */
      if (gimple_references_memory_p (stmt)
	  || gimple_has_volatile_ops (stmt))
	return;
    }

  if (gsi_end_p (gsi))
    {
      edge_iterator ei;
      /* Recurse to the predecessors.  */
      FOR_EACH_EDGE (e, ei, bb->preds)
	find_tail_calls (e->src, ret);

      return;
    }

  /* If the LHS of our call is not just a simple register, we can't
     transform this into a tail or sibling call.  This situation happens,
     in (e.g.) "*p = foo()" where foo returns a struct.  In this case
     we won't have a temporary here, but we need to carry out the side
     effect anyway, so tailcall is impossible.

     ??? In some situations (when the struct is returned in memory via
     invisible argument) we could deal with this, e.g. by passing 'p'
     itself as that argument to foo, but it's too early to do this here,
     and expand_call() will not handle it anyway.  If it ever can, then
     we need to revisit this here, to allow that situation.  */
  if (ass_var && !is_gimple_reg (ass_var))
    return;

  /* We found the call, check whether it is suitable.  */
  tail_recursion = false;
  func = gimple_call_fndecl (call);
  if (func
      && !DECL_BUILT_IN (func)
      && recursive_call_p (current_function_decl, func))
    {
      tree arg;

      for (param = DECL_ARGUMENTS (func), idx = 0;
	   param && idx < gimple_call_num_args (call);
	   param = DECL_CHAIN (param), idx ++)
	{
	  arg = gimple_call_arg (call, idx);
	  if (param != arg)
	    {
	      /* Make sure there are no problems with copying.  The parameter
	         have a copyable type and the two arguments must have reasonably
	         equivalent types.  The latter requirement could be relaxed if
	         we emitted a suitable type conversion statement.  */
	      if (!is_gimple_reg_type (TREE_TYPE (param))
		  || !useless_type_conversion_p (TREE_TYPE (param),
					         TREE_TYPE (arg)))
		break;

	      /* The parameter should be a real operand, so that phi node
		 created for it at the start of the function has the meaning
		 of copying the value.  This test implies is_gimple_reg_type
		 from the previous condition, however this one could be
		 relaxed by being more careful with copying the new value
		 of the parameter (emitting appropriate GIMPLE_ASSIGN and
		 updating the virtual operands).  */
	      if (!is_gimple_reg (param))
		break;
	    }
	}
      if (idx == gimple_call_num_args (call) && !param)
	tail_recursion = true;
    }

  /* Make sure the tail invocation of this function does not refer
     to local variables.  */
  FOR_EACH_LOCAL_DECL (cfun, idx, var)
    {
      if (TREE_CODE (var) != PARM_DECL
	  && auto_var_in_fn_p (var, cfun->decl)
	  && (ref_maybe_used_by_stmt_p (call, var)
	      || call_may_clobber_ref_p (call, var)))
	return;
    }

  /* Now check the statements after the call.  None of them has virtual
     operands, so they may only depend on the call through its return
     value.  The return value should also be dependent on each of them,
     since we are running after dce.  */
  m = NULL_TREE;
  a = NULL_TREE;

  abb = bb;
  agsi = gsi;
  while (1)
    {
      tree tmp_a = NULL_TREE;
      tree tmp_m = NULL_TREE;
      gsi_next (&agsi);

      while (gsi_end_p (agsi))
	{
	  ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
	  abb = single_succ (abb);
	  agsi = gsi_start_bb (abb);
	}

      stmt = gsi_stmt (agsi);

      if (gimple_code (stmt) == GIMPLE_LABEL)
	continue;

      if (gimple_code (stmt) == GIMPLE_RETURN)
	break;

      if (gimple_clobber_p (stmt))
	continue;

      if (is_gimple_debug (stmt))
	continue;

      if (gimple_code (stmt) != GIMPLE_ASSIGN)
	return;

      /* This is a gimple assign. */
      if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
	return;

      if (tmp_a)
	{
	  tree type = TREE_TYPE (tmp_a);
	  if (a)
	    a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
	  else
	    a = tmp_a;
	}
      if (tmp_m)
	{
	  tree type = TREE_TYPE (tmp_m);
	  if (m)
	    m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
	  else
	    m = tmp_m;

	  if (a)
	    a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
	}
    }

  /* See if this is a tail call we can handle.  */
  ret_var = gimple_return_retval (stmt);

  /* We may proceed if there either is no return value, or the return value
     is identical to the call's return.  */
  if (ret_var
      && (ret_var != ass_var))
    return;

  /* If this is not a tail recursive call, we cannot handle addends or
     multiplicands.  */
  if (!tail_recursion && (m || a))
    return;

  /* For pointers only allow additions.  */
  if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
    return;

  nw = XNEW (struct tailcall);

  nw->call_gsi = gsi;

  nw->tail_recursion = tail_recursion;

  nw->mult = m;
  nw->add = a;

  nw->next = *ret;
  *ret = nw;
}
Beispiel #10
0
void
gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
{
    size_t i, num_ops;
    tree lhs;
    gimple_seq pre = NULL;
    gimple post_stmt = NULL;

    push_gimplify_context (gimple_in_ssa_p (cfun));

    switch (gimple_code (stmt))
    {
    case GIMPLE_COND:
        gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_SWITCH:
        gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_OMP_ATOMIC_LOAD:
        gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_ASM:
    {
        size_t i, noutputs = gimple_asm_noutputs (stmt);
        const char *constraint, **oconstraints;
        bool allows_mem, allows_reg, is_inout;

        oconstraints
            = (const char **) alloca ((noutputs) * sizeof (const char *));
        for (i = 0; i < noutputs; i++)
        {
            tree op = gimple_asm_output_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            oconstraints[i] = constraint;
            parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
                                     &allows_reg, &is_inout);
            gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                           is_inout ? is_gimple_min_lval : is_gimple_lvalue,
                           fb_lvalue | fb_mayfail);
        }
        for (i = 0; i < gimple_asm_ninputs (stmt); i++)
        {
            tree op = gimple_asm_input_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                                    oconstraints, &allows_mem, &allows_reg);
            if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
                allows_reg = 0;
            if (!allows_reg && allows_mem)
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_lvalue, fb_lvalue | fb_mayfail);
            else
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_asm_val, fb_rvalue);
        }
    }
    break;
    default:
        /* NOTE: We start gimplifying operands from last to first to
        make sure that side-effects on the RHS of calls, assignments
         and ASMs are executed before the LHS.  The ordering is not
         important for other statements.  */
        num_ops = gimple_num_ops (stmt);
        for (i = num_ops; i > 0; i--)
        {
            tree op = gimple_op (stmt, i - 1);
            if (op == NULL_TREE)
                continue;
            if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
                gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
            else if (i == 2
                     && is_gimple_assign (stmt)
                     && num_ops == 2
                     && get_gimple_rhs_class (gimple_expr_code (stmt))
                     == GIMPLE_SINGLE_RHS)
                gimplify_expr (&op, &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (i == 2 && is_gimple_call (stmt))
            {
                if (TREE_CODE (op) == FUNCTION_DECL)
                    continue;
                gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
            }
            else
                gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
            gimple_set_op (stmt, i - 1, op);
        }

        lhs = gimple_get_lhs (stmt);
        /* If the LHS changed it in a way that requires a simple RHS,
        create temporary.  */
        if (lhs && !is_gimple_reg (lhs))
        {
            bool need_temp = false;

            if (is_gimple_assign (stmt)
                    && num_ops == 2
                    && get_gimple_rhs_class (gimple_expr_code (stmt))
                    == GIMPLE_SINGLE_RHS)
                gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (is_gimple_reg (lhs))
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                {
                    if (is_gimple_call (stmt))
                    {
                        i = gimple_call_flags (stmt);
                        if ((i & ECF_LOOPING_CONST_OR_PURE)
                                || !(i & (ECF_CONST | ECF_PURE)))
                            need_temp = true;
                    }
                    if (stmt_can_throw_internal (stmt))
                        need_temp = true;
                }
            }
            else
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                    need_temp = true;
                else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
                {
                    if (is_gimple_call (stmt))
                    {
                        tree fndecl = gimple_call_fndecl (stmt);

                        if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
                                && !(fndecl && DECL_RESULT (fndecl)
                                     && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
                            need_temp = true;
                    }
                    else
                        need_temp = true;
                }
            }
            if (need_temp)
            {
                tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
                if (gimple_in_ssa_p (cfun))
                    temp = make_ssa_name (temp, NULL);
                gimple_set_lhs (stmt, temp);
                post_stmt = gimple_build_assign (lhs, temp);
            }
        }
        break;
    }

    if (!gimple_seq_empty_p (pre))
        gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
    if (post_stmt)
        gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);

    pop_gimplify_context (NULL);
}
Beispiel #11
0
tree
walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
		struct walk_stmt_info *wi)
{
  hash_set<tree> *pset = (wi) ? wi->pset : NULL;
  unsigned i;
  tree ret = NULL_TREE;

  switch (gimple_code (stmt))
    {
    case GIMPLE_ASSIGN:
      /* Walk the RHS operands.  If the LHS is of a non-renamable type or
         is a register variable, we may use a COMPONENT_REF on the RHS.  */
      if (wi)
	{
	  tree lhs = gimple_assign_lhs (stmt);
	  wi->val_only
	    = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
	      || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
	}

      for (i = 1; i < gimple_num_ops (stmt); i++)
	{
	  ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
			   pset);
	  if (ret)
	    return ret;
	}

      /* Walk the LHS.  If the RHS is appropriate for a memory, we
	 may use a COMPONENT_REF on the LHS.  */
      if (wi)
	{
          /* If the RHS is of a non-renamable type or is a register variable,
	     we may use a COMPONENT_REF on the LHS.  */
	  tree rhs1 = gimple_assign_rhs1 (stmt);
	  wi->val_only
	    = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
	      || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
	  wi->is_lhs = true;
	}

      ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
      if (ret)
	return ret;

      if (wi)
	{
	  wi->val_only = true;
	  wi->is_lhs = false;
	}
      break;

    case GIMPLE_CALL:
      if (wi)
	{
	  wi->is_lhs = false;
	  wi->val_only = true;
	}

      ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
		       callback_op, wi, pset);
      if (ret)
        return ret;

      ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
      if (ret)
        return ret;

      for (i = 0; i < gimple_call_num_args (stmt); i++)
	{
	  if (wi)
	    wi->val_only
	      = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
	  ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
			   pset);
	  if (ret)
	    return ret;
	}

      if (gimple_call_lhs (stmt))
	{
	  if (wi)
	    {
	      wi->is_lhs = true;
	      wi->val_only
		= is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
	    }

	  ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
	  if (ret)
	    return ret;
	}

      if (wi)
	{
	  wi->is_lhs = false;
	  wi->val_only = true;
	}
      break;

    case GIMPLE_CATCH:
      ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
		       callback_op, wi, pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_EH_FILTER:
      ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
		       pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_ASM:
      ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
      if (ret)
	return ret;
      break;

    case GIMPLE_OMP_CONTINUE:
      {
	gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
	ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;

	ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_CRITICAL:
      {
	gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt);
	ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_ORDERED:
      {
	gomp_ordered *omp_stmt = as_a <gomp_ordered *> (stmt);
	ret = walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_FOR:
      ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
		       pset);
      if (ret)
	return ret;
      for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
	{
	  ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
			   wi, pset);
	  if (ret)
	    return ret;
	  ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
			   wi, pset);
	  if (ret)
	    return ret;
	  ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
			   wi, pset);
	  if (ret)
	    return ret;
	  ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
			   wi, pset);
	  if (ret)
	    return ret;
	}
      break;

    case GIMPLE_OMP_PARALLEL:
      {
	gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
	ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_TASK:
      ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_OMP_SECTIONS:
      ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;
      ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
		       wi, pset);
      if (ret)
	return ret;

      break;

    case GIMPLE_OMP_SINGLE:
      ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
		       pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_OMP_TARGET:
      {
	gomp_target *omp_stmt = as_a <gomp_target *> (stmt);
	ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_TEAMS:
      ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
		       pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_OMP_ATOMIC_LOAD:
      {
	gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
	ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
	ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_OMP_ATOMIC_STORE:
      {
	gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt);
	ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt),
			 callback_op, wi, pset);
	if (ret)
	  return ret;
      }
      break;

    case GIMPLE_TRANSACTION:
      ret = walk_tree (gimple_transaction_label_ptr (
			 as_a <gtransaction *> (stmt)),
		       callback_op, wi, pset);
      if (ret)
	return ret;
      break;

    case GIMPLE_OMP_RETURN:
      ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
		       pset);
      if (ret)
	return ret;
      break;

      /* Tuples that do not have operands.  */
    case GIMPLE_NOP:
    case GIMPLE_RESX:
    case GIMPLE_PREDICT:
      break;

    default:
      {
	enum gimple_statement_structure_enum gss;
	gss = gimple_statement_structure (stmt);
	if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
	  for (i = 0; i < gimple_num_ops (stmt); i++)
	    {
	      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
	      if (ret)
		return ret;
	    }
      }
      break;
    }

  return NULL_TREE;
}
Beispiel #12
0
static unsigned int
tree_nrv (void)
{
  tree result = DECL_RESULT (current_function_decl);
  tree result_type = TREE_TYPE (result);
  tree found = NULL;
  basic_block bb;
  gimple_stmt_iterator gsi;
  struct nrv_data data;

  /* If this function does not return an aggregate type in memory, then
     there is nothing to do.  */
  if (!aggregate_value_p (result, current_function_decl))
    return 0;

  /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
     non-GIMPLE.  */
  if (is_gimple_reg_type (result_type))
    return 0;

  /* Look through each block for assignments to the RESULT_DECL.  */
  FOR_EACH_BB (bb)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree ret_val;

	  if (gimple_code (stmt) == GIMPLE_RETURN)
	    {
	      /* In a function with an aggregate return value, the
		 gimplifier has changed all non-empty RETURN_EXPRs to
		 return the RESULT_DECL.  */
	      ret_val = gimple_return_retval (stmt);
	      if (ret_val)
		gcc_assert (ret_val == result);
	    }
	  else if (is_gimple_assign (stmt)
		   && gimple_assign_lhs (stmt) == result)
	    {
              tree rhs;

	      if (!gimple_assign_copy_p (stmt))
		return 0;

	      rhs = gimple_assign_rhs1 (stmt);

	      /* Now verify that this return statement uses the same value
		 as any previously encountered return statement.  */
	      if (found != NULL)
		{
		  /* If we found a return statement using a different variable
		     than previous return statements, then we can not perform
		     NRV optimizations.  */
		  if (found != rhs)
		    return 0;
		}
	      else
		found = rhs;

	      /* The returned value must be a local automatic variable of the
		 same type and alignment as the function's result.  */
	      if (TREE_CODE (found) != VAR_DECL
		  || TREE_THIS_VOLATILE (found)
		  || DECL_CONTEXT (found) != current_function_decl
		  || TREE_STATIC (found)
		  || TREE_ADDRESSABLE (found)
		  || DECL_ALIGN (found) > DECL_ALIGN (result)
		  || !useless_type_conversion_p (result_type,
					        TREE_TYPE (found)))
		return 0;
	    }
	  else if (is_gimple_assign (stmt))
	    {
	      tree addr = get_base_address (gimple_assign_lhs (stmt));
	       /* If there's any MODIFY of component of RESULT, 
		  then bail out.  */
	      if (addr && addr == result)
		return 0;
	    }
	}
    }

  if (!found)
    return 0;

  /* If dumping details, then note once and only the NRV replacement.  */
  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "NRV Replaced: ");
      print_generic_expr (dump_file, found, dump_flags);
      fprintf (dump_file, "  with: ");
      print_generic_expr (dump_file, result, dump_flags);
      fprintf (dump_file, "\n");
    }

  /* At this point we know that all the return statements return the
     same local which has suitable attributes for NRV.   Copy debugging
     information from FOUND to RESULT.  */
  DECL_NAME (result) = DECL_NAME (found);
  DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found);
  DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found);
  TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found);

  /* Now walk through the function changing all references to VAR to be
     RESULT.  */
  data.var = found;
  data.result = result;
  FOR_EACH_BB (bb)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
	{
	  gimple stmt = gsi_stmt (gsi);
	  /* If this is a copy from VAR to RESULT, remove it.  */
	  if (gimple_assign_copy_p (stmt)
	      && gimple_assign_lhs (stmt) == result
	      && gimple_assign_rhs1 (stmt) == found)
	    gsi_remove (&gsi, true);
	  else
	    {
	      struct walk_stmt_info wi;
	      memset (&wi, 0, sizeof (wi));
	      wi.info = &data;
	      walk_gimple_op (stmt, finalize_nrv_r, &wi);
	      gsi_next (&gsi);
	    }
	}
    }

  /* FOUND is no longer used.  Ensure it gets removed.  */
  var_ann (found)->used = 0;
  return 0;
}