Beispiel #1
0
static bool
equal_mem_array_ref_p (tree t0, tree t1)
{
  if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
    return false;
  if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
    return false;

  if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
    return false;
  bool rev0;
  HOST_WIDE_INT off0, sz0, max0;
  tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
  if (sz0 == -1
      || sz0 != max0)
    return false;

  bool rev1;
  HOST_WIDE_INT off1, sz1, max1;
  tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
  if (sz1 == -1
      || sz1 != max1)
    return false;

  if (rev0 != rev1)
    return false;

  /* Types were compatible, so this is a sanity check.  */
  gcc_assert (sz0 == sz1);

  return (off0 == off1) && operand_equal_p (base0, base1, 0);
}
Beispiel #2
0
static tree
cp_ubsan_check_member_access_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p, t;
  cp_ubsan_check_member_access_data *ucmd
    = (cp_ubsan_check_member_access_data *) data;
  switch (TREE_CODE (stmt))
    {
    case ADDR_EXPR:
      t = TREE_OPERAND (stmt, 0);
      while ((TREE_CODE (t) == MEM_REF || TREE_CODE (t) == INDIRECT_REF)
	     && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
	t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
      if (handled_component_p (t))
	{
	  *walk_subtrees = 0;
	  ucmd->is_addr = true;
	  cp_walk_tree (&t, cp_ubsan_check_member_access_r,
			data, ucmd->pset);
	  ucmd->is_addr = false;
	}
      break;
    case MEM_REF:
    case INDIRECT_REF:
      t = TREE_OPERAND (stmt, 0);
      if (TREE_CODE (t) == ADDR_EXPR)
	{
	  *walk_subtrees = 0;
	  t = TREE_OPERAND (stmt, 0);
	  cp_walk_tree (&t, cp_ubsan_check_member_access_r, data, ucmd->pset);
	}
      break;
    case COMPONENT_REF:
      if (!ucmd->is_addr && cp_ubsan_maybe_instrument_member_access (stmt, ucmd))
	{
	  *walk_subtrees = 0;
	  break;
	}
      /* FALLTHRU */
    default:
      if (ucmd->is_addr && handled_component_p (stmt))
	{
	  int i, len = TREE_OPERAND_LENGTH (stmt);
	  *walk_subtrees = 0;
	  if (!handled_component_p (TREE_OPERAND (stmt, 0)))
	    ucmd->is_addr = false;
	  for (i = 0; i < len; i++)
	    {
	      cp_walk_tree (&TREE_OPERAND (stmt, i),
			    cp_ubsan_check_member_access_r, data, ucmd->pset);
	      ucmd->is_addr = false;
	    }
	  ucmd->is_addr = true;
	}
      break;
    }
  return NULL_TREE;
}
Beispiel #3
0
static void
check_tree (ipa_reference_local_vars_info_t local, tree t, bool checking_write)
{
  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
    return;

  while (TREE_CODE (t) == REALPART_EXPR 
         || TREE_CODE (t) == IMAGPART_EXPR
         || handled_component_p (t))
    {
      if (TREE_CODE (t) == ARRAY_REF)
        check_operand (local, TREE_OPERAND (t, 1), false);
      t = TREE_OPERAND (t, 0);
    }

  /* The bottom of an indirect reference can only be read, not
     written.  So just recurse and whatever we find, check it against
     the read bitmaps.  */

  /*  if (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF) */
  /* FIXME when we have array_ref's of pointers.  */
  if (INDIRECT_REF_P (t))
    check_tree (local, TREE_OPERAND (t, 0), false);

  if (SSA_VAR_P (t))
    check_operand (local, t, checking_write);
}
Beispiel #4
0
bool
walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
			       walk_stmt_load_store_addr_fn visit_load,
			       walk_stmt_load_store_addr_fn visit_store,
			       walk_stmt_load_store_addr_fn visit_addr)
{
  bool ret = false;
  unsigned i;
  if (gimple_assign_single_p (stmt))
    {
      tree lhs, rhs, arg;
      if (visit_store)
	{
	  arg = gimple_assign_lhs (stmt);
	  lhs = get_base_loadstore (arg);
	  if (lhs)
	    ret |= visit_store (stmt, lhs, arg, data);
	}
      arg = gimple_assign_rhs1 (stmt);
      rhs = arg;
      while (handled_component_p (rhs))
	rhs = TREE_OPERAND (rhs, 0);
      if (visit_addr)
	{
	  if (TREE_CODE (rhs) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
	  else if (TREE_CODE (rhs) == TARGET_MEM_REF
		   && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), arg,
			       data);
	  else if (TREE_CODE (rhs) == OBJ_TYPE_REF
		   && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
						   0), arg, data);
	  else if (TREE_CODE (rhs) == CONSTRUCTOR)
	    {
	      unsigned int ix;
	      tree val;

	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
		if (TREE_CODE (val) == ADDR_EXPR)
		  ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
		else if (TREE_CODE (val) == OBJ_TYPE_REF
			 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
		  ret |= visit_addr (stmt,
				     TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
						   0), arg, data);
	    }
          lhs = gimple_assign_lhs (stmt);
	  if (TREE_CODE (lhs) == TARGET_MEM_REF
              && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), lhs, data);
	}
      if (visit_load)
	{
	  rhs = get_base_loadstore (rhs);
	  if (rhs)
	    ret |= visit_load (stmt, rhs, arg, data);
	}
    }
Beispiel #5
0
void
mark_addressable (tree x)
{
  while (handled_component_p (x))
    x = TREE_OPERAND (x, 0);
  if (TREE_CODE (x) == MEM_REF
      && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
    x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
  if (!VAR_P (x)
      && TREE_CODE (x) != PARM_DECL
      && TREE_CODE (x) != RESULT_DECL)
    return;
  mark_addressable_1 (x);

  /* Also mark the artificial SSA_NAME that points to the partition of X.  */
  if (TREE_CODE (x) == VAR_DECL
      && !DECL_EXTERNAL (x)
      && !TREE_STATIC (x)
      && cfun->gimple_df != NULL
      && cfun->gimple_df->decls_to_pointers != NULL)
    {
      tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
      if (namep)
	mark_addressable_1 (*namep);
    }
}
Beispiel #6
0
static bool
constant_after_peeling (tree op, gimple *stmt, struct loop *loop)
{
  affine_iv iv;

  if (is_gimple_min_invariant (op))
    return true;

  /* We can still fold accesses to constant arrays when index is known.  */
  if (TREE_CODE (op) != SSA_NAME)
    {
      tree base = op;

      /* First make fast look if we see constant array inside.  */
      while (handled_component_p (base))
	base = TREE_OPERAND (base, 0);
      if ((DECL_P (base)
	   && ctor_for_folding (base) != error_mark_node)
	  || CONSTANT_CLASS_P (base))
	{
	  /* If so, see if we understand all the indices.  */
	  base = op;
	  while (handled_component_p (base))
	    {
	      if (TREE_CODE (base) == ARRAY_REF
		  && !constant_after_peeling (TREE_OPERAND (base, 1), stmt, loop))
		return false;
	      base = TREE_OPERAND (base, 0);
	    }
	  return true;
	}
      return false;
    }

  /* Induction variables are constants.  */
  if (!simple_iv (loop, loop_containing_stmt (stmt), op, &iv, false))
    return false;
  if (!is_gimple_min_invariant (iv.base))
    return false;
  if (!is_gimple_min_invariant (iv.step))
    return false;
  return true;
}
Beispiel #7
0
bool
is_gimple_addressable (tree t)
{
  return (is_gimple_id (t) || handled_component_p (t)
          /* LLVM LOCAL begin */
#ifdef ENABLE_LLVM
          || llvm_is_array_arrayref_extension (t)
#endif
          /* LLVM LOCAL end */
	  || INDIRECT_REF_P (t));
}
Beispiel #8
0
static tree
get_base_loadstore (tree op)
{
  while (handled_component_p (op))
    op = TREE_OPERAND (op, 0);
  if (DECL_P (op)
      || INDIRECT_REF_P (op)
      || TREE_CODE (op) == MEM_REF
      || TREE_CODE (op) == TARGET_MEM_REF)
    return op;
  return NULL_TREE;
}
Beispiel #9
0
tree
get_base_address (tree t)
{
  /* LLVM LOCAL begin */
#ifndef ENABLE_LLVM
  while (handled_component_p (t))
#else
  /* Support the "array ref with pointer base" extension. */
  while (handled_component_p (t) || TREE_CODE(t) == ARRAY_REF)
#endif
  /* LLVM LOCAL end */
    t = TREE_OPERAND (t, 0);
  
  if (SSA_VAR_P (t)
      || TREE_CODE (t) == STRING_CST
      || TREE_CODE (t) == CONSTRUCTOR
      || INDIRECT_REF_P (t))
    return t;
  else
    return NULL_TREE;
}
Beispiel #10
0
static alias_set_type
gfc_get_alias_set (tree t)
{
  tree u;

  /* Permit type-punning when accessing an EQUIVALENCEd variable or
     mixed type entry master's return value.  */
  for (u = t; handled_component_p (u); u = TREE_OPERAND (u, 0))
    if (TREE_CODE (u) == COMPONENT_REF
	&& TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
      return 0;

  return -1;
}
Beispiel #11
0
tree
get_base_address (tree t)
{
  while (handled_component_p (t))
    t = TREE_OPERAND (t, 0);
  
  if (SSA_VAR_P (t)
      || TREE_CODE (t) == STRING_CST
      || TREE_CODE (t) == CONSTRUCTOR
      || INDIRECT_REF_P (t))
    return t;
  else
    return NULL_TREE;
}
Beispiel #12
0
static bool
dest_safe_for_nrv_p (tree dest)
{
  while (handled_component_p (dest))
    dest = TREE_OPERAND (dest, 0);

  if (! SSA_VAR_P (dest))
    return false;

  if (TREE_CODE (dest) == SSA_NAME)
    dest = SSA_NAME_VAR (dest);

  if (is_call_used (dest))
    return false;

  return true;
}
Beispiel #13
0
static void
check_tree (funct_state local, tree t, bool checking_write)
{
  if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
    return;

  /* Any tree which is volatile disqualifies thie function from being
     const or pure. */
  if (TREE_THIS_VOLATILE (t))
    {
      local->pure_const_state = IPA_NEITHER;
      return;
    }

  while (TREE_CODE (t) == REALPART_EXPR 
	 || TREE_CODE (t) == IMAGPART_EXPR
	 || handled_component_p (t))
    {
      if (TREE_CODE (t) == ARRAY_REF)
	check_operand (local, TREE_OPERAND (t, 1), false);
      t = TREE_OPERAND (t, 0);
    }

  /* The bottom of an indirect reference can only be read, not
     written.  */
  if (INDIRECT_REF_P (t))
    {
      check_tree (local, TREE_OPERAND (t, 0), false);
      
      /* Any indirect reference that occurs on the lhs
	 disqualifies the function from being pure or const. Any
	 indirect reference that occurs on the rhs disqualifies the
	 function from being const.  */
      if (checking_write) 
	{
	  local->pure_const_state = IPA_NEITHER;
	  return;
	}
      else if (local->pure_const_state == IPA_CONST)
	local->pure_const_state = IPA_PURE;
    }

  if (SSA_VAR_P (t))
    check_operand (local, t, checking_write);
}
Beispiel #14
0
static hashval_t
avail_expr_hash (class expr_hash_elt *p)
{
  const struct hashable_expr *expr = p->expr ();
  inchash::hash hstate;

  if (expr->kind == EXPR_SINGLE)
    {
      /* T could potentially be a switch index or a goto dest.  */
      tree t = expr->ops.single.rhs;
      if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
	{
	  /* Make equivalent statements of both these kinds hash together.
	     Dealing with both MEM_REF and ARRAY_REF allows us not to care
	     about equivalence with other statements not considered here.  */
	  bool reverse;
	  HOST_WIDE_INT offset, size, max_size;
	  tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
					       &reverse);
	  /* Strictly, we could try to normalize variable-sized accesses too,
	    but here we just deal with the common case.  */
	  if (size != -1
	      && size == max_size)
	    {
	      enum tree_code code = MEM_REF;
	      hstate.add_object (code);
	      inchash::add_expr (base, hstate);
	      hstate.add_object (offset);
	      hstate.add_object (size);
	      return hstate.end ();
	    }
	}
    }

  inchash::add_hashable_expr (expr, hstate);

  return hstate.end ();
}
Beispiel #15
0
bool
is_gimple_address (const_tree t)
{
  tree op;

  if (TREE_CODE (t) != ADDR_EXPR)
    return false;

  op = TREE_OPERAND (t, 0);
  while (handled_component_p (op))
    {
      if ((TREE_CODE (op) == ARRAY_REF
	   || TREE_CODE (op) == ARRAY_RANGE_REF)
	  && !is_gimple_val (TREE_OPERAND (op, 1)))
	    return false;

      op = TREE_OPERAND (op, 0);
    }

  if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
    return true;

  switch (TREE_CODE (op))
    {
    case PARM_DECL:
    case RESULT_DECL:
    case LABEL_DECL:
    case FUNCTION_DECL:
    case VAR_DECL:
    case CONST_DECL:
      return true;

    default:
      return false;
    }
}
Beispiel #16
0
tree
convert_to_void (tree expr, impl_conv_void implicit, tsubst_flags_t complain)
{
  if (expr == error_mark_node
      || TREE_TYPE (expr) == error_mark_node)
    return error_mark_node;

  if (implicit == ICV_CAST)
    mark_exp_read (expr);
  else
    {
      tree exprv = expr;

      while (TREE_CODE (exprv) == COMPOUND_EXPR)
	exprv = TREE_OPERAND (exprv, 1);
      if (DECL_P (exprv)
	  || handled_component_p (exprv)
	  || TREE_CODE (exprv) == INDIRECT_REF)
	/* Expr is not being 'used' here, otherwise we whould have
	   called mark_{rl}value_use use here, which would have in turn
	   called mark_exp_read.  Rather, we call mark_exp_read directly
	   to avoid some warnings when
	   -Wunused-but-set-{variable,parameter} is in effect.  */
	mark_exp_read (exprv);
    }

  if (!TREE_TYPE (expr))
    return expr;
  if (invalid_nonstatic_memfn_p (expr, complain))
    return error_mark_node;
  if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
    {
      if (complain & tf_error)
        error ("pseudo-destructor is not called");
      return error_mark_node;
    }
  if (VOID_TYPE_P (TREE_TYPE (expr)))
    return expr;
  switch (TREE_CODE (expr))
    {
    case COND_EXPR:
      {
	/* The two parts of a cond expr might be separate lvalues.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree op2 = TREE_OPERAND (expr,2);
	bool side_effects = TREE_SIDE_EFFECTS (op1) || TREE_SIDE_EFFECTS (op2);
	tree new_op1, new_op2;
	if (implicit != ICV_CAST && !side_effects)
	  {
	    new_op1 = convert_to_void (op1, ICV_SECOND_OF_COND, complain);
	    new_op2 = convert_to_void (op2, ICV_THIRD_OF_COND, complain);
	  }
	else
	  {
	    new_op1 = convert_to_void (op1, ICV_CAST, complain);
	    new_op2 = convert_to_void (op2, ICV_CAST, complain);
	  }

	expr = build3 (COND_EXPR, TREE_TYPE (new_op1),
		       TREE_OPERAND (expr, 0), new_op1, new_op2);
	break;
      }

    case COMPOUND_EXPR:
      {
	/* The second part of a compound expr contains the value.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree new_op1;
	if (implicit != ICV_CAST && !TREE_NO_WARNING (expr))
	  new_op1 = convert_to_void (op1, ICV_RIGHT_OF_COMMA, complain);
	else
	  new_op1 = convert_to_void (op1, ICV_CAST, complain);

	if (new_op1 != op1)
	  {
	    tree t = build2 (COMPOUND_EXPR, TREE_TYPE (new_op1),
			     TREE_OPERAND (expr, 0), new_op1);
	    expr = t;
	  }

	break;
      }

    case NON_LVALUE_EXPR:
    case NOP_EXPR:
      /* These have already decayed to rvalue.  */
      break;

    case CALL_EXPR:   /* We have a special meaning for volatile void fn().  */
      break;

    case INDIRECT_REF:
      {
	tree type = TREE_TYPE (expr);
	int is_reference = TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0)))
			   == REFERENCE_TYPE;
	int is_volatile = TYPE_VOLATILE (type);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	/* Can't load the value if we don't know the type.  */
	if (is_volatile && !is_complete)
          {
            if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of incomplete type %qT", type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in second operand "
				"of conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in third operand "
				"of conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in right operand of "
				"comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in left operand of "
				"comma operator", type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in statement", type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in for increment "
				"expression", type);
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	/* Don't load the value if this is an implicit dereference, or if
	   the type needs to be handled by ctors/dtors.  */
	else if (is_volatile && is_reference)
          {
            if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of type %qT", type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "implicit dereference will not access object "
				"of type %qT in second operand of "
				"conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "implicit dereference will not access object "
		  	      	"of type %qT in third operand of "
				"conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in right operand of "
				"comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in left operand of comma operator",
			     type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "implicit dereference will not access object "
		     		"of type %qT in statement",  type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in for increment expression",
			     type);
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	else if (is_volatile && TREE_ADDRESSABLE (type))
	  {
	    if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of non-trivially-copyable type %qT",
			     type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "indirection will not access object of "
				"non-trivially-copyable type %qT in second "
				"operand of conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "indirection will not access object of "
		  	      	"non-trivially-copyable type %qT in third "
				"operand of conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in right "
				"operand of comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in left "
				"operand of comma operator", type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "indirection will not access object of "
		     		"non-trivially-copyable type %qT in statement",
			      type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in for "
				"increment expression", type);
		    break;
		  default:
		    gcc_unreachable ();
		}
	  }
	if (is_reference || !is_volatile || !is_complete || TREE_ADDRESSABLE (type))
          {
            /* Emit a warning (if enabled) when the "effect-less" INDIRECT_REF
               operation is stripped off. Note that we don't warn about
               - an expression with TREE_NO_WARNING set. (For an example of
                 such expressions, see build_over_call in call.c.)
               - automatic dereferencing of references, since the user cannot
                 control it. (See also warn_if_unused_value() in stmt.c.)  */
            if (warn_unused_value
		&& implicit != ICV_CAST
                && (complain & tf_warning)
                && !TREE_NO_WARNING (expr)
                && !is_reference)
              warning (OPT_Wunused_value, "value computed is not used");
            expr = TREE_OPERAND (expr, 0);
          }

	break;
      }

    case VAR_DECL:
      {
	/* External variables might be incomplete.  */
	tree type = TREE_TYPE (expr);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	if (TYPE_VOLATILE (type) && !is_complete && (complain & tf_warning))
	  switch (implicit)
	    {
	      case ICV_CAST:
		warning (0, "conversion to void will not access "
			    "object %qE of incomplete type %qT", expr, type);
		break;
	      case ICV_SECOND_OF_COND:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in second operand of "
			    "conditional expression", expr, type);
		break;
	      case ICV_THIRD_OF_COND:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in third operand of "
			    "conditional expression", expr, type);
		break;
	      case ICV_RIGHT_OF_COMMA:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in right operand of comma operator",
			 expr, type);
		break;
	      case ICV_LEFT_OF_COMMA:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in left operand of comma operator",
			 expr, type);
		break;
	      case ICV_STATEMENT:
	        warning (0, "variable %qE of incomplete type %qT will not "
		            "be accessed in statement", expr, type);
		break;
	      case ICV_THIRD_IN_FOR:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in for increment expression",
		         expr, type);
		break;
	      default:
	        gcc_unreachable ();
	    }

	break;
      }

    case TARGET_EXPR:
      /* Don't bother with the temporary object returned from a function if
	 we don't use it and don't need to destroy it.  We'll still
	 allocate space for it in expand_call or declare_return_variable,
	 but we don't need to track it through all the tree phases.  */
      if (TARGET_EXPR_IMPLICIT_P (expr)
	  && TYPE_HAS_TRIVIAL_DESTRUCTOR (TREE_TYPE (expr)))
	{
	  tree init = TARGET_EXPR_INITIAL (expr);
	  if (TREE_CODE (init) == AGGR_INIT_EXPR
	      && !AGGR_INIT_VIA_CTOR_P (init))
	    {
	      tree fn = AGGR_INIT_EXPR_FN (init);
	      expr = build_call_array_loc (input_location,
					   TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
					   fn,
					   aggr_init_expr_nargs (init),
					   AGGR_INIT_EXPR_ARGP (init));
	    }
	}
      break;

    default:;
    }
  expr = resolve_nondeduced_context (expr);
  {
    tree probe = expr;

    if (TREE_CODE (probe) == ADDR_EXPR)
      probe = TREE_OPERAND (expr, 0);
    if (type_unknown_p (probe))
      {
	/* [over.over] enumerates the places where we can take the address
	   of an overloaded function, and this is not one of them.  */
	if (complain & tf_error)
	  switch (implicit)
	    {
	      case ICV_CAST:
		error ("conversion to void "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_SECOND_OF_COND:
		error ("second operand of conditional expression "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_THIRD_OF_COND:
		error ("third operand of conditional expression "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_RIGHT_OF_COMMA:
		error ("right operand of comma operator "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_LEFT_OF_COMMA:
		error ("left operand of comma operator "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_STATEMENT:
		error ("statement "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_THIRD_IN_FOR:
		error ("for increment expression "
		       "cannot resolve address of overloaded function");
		break;
	    }
	else
	  return error_mark_node;
	expr = void_zero_node;
      }
    else if (implicit != ICV_CAST && probe == expr && is_overloaded_fn (probe))
      {
	/* Only warn when there is no &.  */
	if (complain & tf_warning)
	  switch (implicit)
	    {
	      case ICV_SECOND_OF_COND:
	        warning (OPT_Waddress,
			 "second operand of conditional expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_THIRD_OF_COND:
	        warning (OPT_Waddress,
			 "third operand of conditional expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_RIGHT_OF_COMMA:
	        warning (OPT_Waddress,
			 "right operand of comma operator "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_LEFT_OF_COMMA:
	        warning (OPT_Waddress,
			 "left operand of comma operator "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_STATEMENT:
	        warning (OPT_Waddress,
			 "statement is a reference, not call, to function %qE",
			 expr);
		break;
	      case ICV_THIRD_IN_FOR:
	        warning (OPT_Waddress,
			 "for increment expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      default:
	        gcc_unreachable ();
	    }

	if (TREE_CODE (expr) == COMPONENT_REF)
	  expr = TREE_OPERAND (expr, 0);
      }
  }

  if (expr != error_mark_node && !VOID_TYPE_P (TREE_TYPE (expr)))
    {
      if (implicit != ICV_CAST
	  && warn_unused_value
	  && !TREE_NO_WARNING (expr)
	  && !processing_template_decl)
	{
	  /* The middle end does not warn about expressions that have
	     been explicitly cast to void, so we must do so here.  */
	  if (!TREE_SIDE_EFFECTS (expr)) {
            if (complain & tf_warning)
	      switch (implicit)
		{
		  case ICV_SECOND_OF_COND:
		    warning (OPT_Wunused_value,
			     "second operand of conditional expression has no effect");
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (OPT_Wunused_value,
		    	     "third operand of conditional expression has no effect");
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (OPT_Wunused_value,
		    	     "right operand of comma operator has no effect");
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (OPT_Wunused_value,
		    	     "left operand of comma operator has no effect");
		    break;
		  case ICV_STATEMENT:
		    warning (OPT_Wunused_value,
		    	     "statement has no effect");
		    break;
		  case ICV_THIRD_IN_FOR:
		    warning (OPT_Wunused_value,
		    	     "for increment expression has no effect");
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	  else
	    {
	      tree e;
	      enum tree_code code;
	      enum tree_code_class tclass;

	      e = expr;
	      /* We might like to warn about (say) "(int) f()", as the
		 cast has no effect, but the compiler itself will
		 generate implicit conversions under some
		 circumstances.  (For example a block copy will be
		 turned into a call to "__builtin_memcpy", with a
		 conversion of the return value to an appropriate
		 type.)  So, to avoid false positives, we strip
		 conversions.  Do not use STRIP_NOPs because it will
		 not strip conversions to "void", as that is not a
		 mode-preserving conversion.  */
	      while (TREE_CODE (e) == NOP_EXPR)
		e = TREE_OPERAND (e, 0);

	      code = TREE_CODE (e);
	      tclass = TREE_CODE_CLASS (code);
	      if ((tclass == tcc_comparison
		   || tclass == tcc_unary
		   || (tclass == tcc_binary
		       && !(code == MODIFY_EXPR
			    || code == INIT_EXPR
			    || code == PREDECREMENT_EXPR
			    || code == PREINCREMENT_EXPR
			    || code == POSTDECREMENT_EXPR
			    || code == POSTINCREMENT_EXPR)))
                  && (complain & tf_warning))
		warning (OPT_Wunused_value, "value computed is not used");
	    }
	}
      expr = build1 (CONVERT_EXPR, void_type_node, expr);
    }
  if (! TREE_SIDE_EFFECTS (expr))
    expr = void_zero_node;
  return expr;
}
tree
maybe_fold_tmr (tree ref)
{
  struct mem_address addr;
  bool changed = false;
  tree ret, off;

  get_address_description (ref, &addr);

  if (addr.base
      && TREE_CODE (addr.base) == INTEGER_CST
      && !integer_zerop (addr.base))
    {
      addr.offset = fold_binary_to_constant (PLUS_EXPR,
					     TREE_TYPE (addr.offset),
					     addr.offset, addr.base);
      addr.base = NULL_TREE;
      changed = true;
    }

  if (addr.symbol
      && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
    {
      addr.offset = fold_binary_to_constant
			(PLUS_EXPR, TREE_TYPE (addr.offset),
			 addr.offset,
			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
      addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
      changed = true;
    }
  else if (addr.symbol
	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
    {
      HOST_WIDE_INT offset;
      addr.symbol = build_fold_addr_expr
		      (get_addr_base_and_unit_offset
		         (TREE_OPERAND (addr.symbol, 0), &offset));
      addr.offset = int_const_binop (PLUS_EXPR,
				     addr.offset, size_int (offset));
      changed = true;
    }

  if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
    {
      off = addr.index;
      if (addr.step)
	{
	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
					 off, addr.step);
	  addr.step = NULL_TREE;
	}

      addr.offset = fold_binary_to_constant (PLUS_EXPR,
					     TREE_TYPE (addr.offset),
					     addr.offset, off);
      addr.index = NULL_TREE;
      changed = true;
    }

  if (!changed)
    return NULL_TREE;

  /* If we have propagated something into this TARGET_MEM_REF and thus
     ended up folding it, always create a new TARGET_MEM_REF regardless
     if it is valid in this for on the target - the propagation result
     wouldn't be anyway.  */
  ret = create_mem_ref_raw (TREE_TYPE (ref),
			    TREE_TYPE (addr.offset), &addr, false);
  copy_mem_ref_info (ret, ref);
  return ret;
}
Beispiel #18
0
bool
find_rank (location_t loc, tree orig_expr, tree expr, bool ignore_builtin_fn,
	   size_t *rank)
{
  tree ii_tree;
  size_t ii = 0, current_rank = 0;

  if (TREE_CODE (expr) == ARRAY_NOTATION_REF)
    {
      ii_tree = expr;
      while (ii_tree)
	{
	  if (TREE_CODE (ii_tree) == ARRAY_NOTATION_REF)
	    {
	      current_rank++;
	      ii_tree = ARRAY_NOTATION_ARRAY (ii_tree);
	    }
	  else if (handled_component_p (ii_tree)
		   || TREE_CODE (ii_tree) == INDIRECT_REF)
	    ii_tree = TREE_OPERAND (ii_tree, 0);
	  else if (TREE_CODE (ii_tree) == PARM_DECL
		   || TREE_CODE (ii_tree) == VAR_DECL)
	    break;
	  else
	    gcc_unreachable ();
	}
      if (*rank == 0)
	/* In this case, all the expressions this function has encountered thus
	   far have been scalars or expressions with zero rank.  Please see
	   header comment for examples of such expression.  */
	*rank = current_rank;
      else if (*rank != current_rank)
	{
	  /* In this case, find rank is being recursed through a set of 
	     expression of the form A <OPERATION> B, where A and B both have
	     array notations in them and the rank of A is not equal to rank of
	     B.  
	     A simple example of such case is the following: X[:] + Y[:][:] */ 
	  *rank = current_rank;
	  return false;
	}
    }
  else if (TREE_CODE (expr) == STATEMENT_LIST)
    {
      tree_stmt_iterator ii_tsi;
      for (ii_tsi = tsi_start (expr); !tsi_end_p (ii_tsi);
	   tsi_next (&ii_tsi))
	if (!find_rank (loc, orig_expr, *tsi_stmt_ptr (ii_tsi),
			ignore_builtin_fn, rank))
	  return false;
    }
  else
    {
      if (TREE_CODE (expr) == CALL_EXPR)
	{
	  tree func_name = CALL_EXPR_FN (expr);
	  tree prev_arg = NULL_TREE, arg;
	  call_expr_arg_iterator iter;
	  size_t prev_rank = 0;
	  if (TREE_CODE (func_name) == ADDR_EXPR)
	    if (!ignore_builtin_fn)
	      if (is_cilkplus_reduce_builtin (func_name))
		/* If it is a built-in function, then we know it returns a 
		   scalar.  */
		return true;
	  if (!find_rank (loc, orig_expr, func_name, ignore_builtin_fn, rank))
	    return false;
	  FOR_EACH_CALL_EXPR_ARG (arg, iter, expr)
	    {
	      if (!find_rank (loc, orig_expr, arg, ignore_builtin_fn, rank))
		{
		  if (prev_arg && EXPR_HAS_LOCATION (prev_arg)
		      && prev_rank != *rank)
		    error_at (EXPR_LOCATION (prev_arg),
			      "rank mismatch between %qE and %qE", prev_arg,
			      arg);
		  else if (prev_arg && prev_rank != *rank)
		    /* Here the original expression is printed as a "heads-up"
		       to the programmer.  This is because since there is no 
		       location information for the offending argument, the 
		       error could be in some internally generated code that is
		       not visible for the programmer.  Thus, the correct fix
		       may lie in the original expression.  */
		    error_at (loc, "rank mismatch in expression %qE",
			      orig_expr);
		  return false;
		}
	      prev_arg = arg;
	      prev_rank = *rank;
	    }	
	}
      else
	{
Beispiel #19
0
static void
output_gimple_stmt (struct output_block *ob, gimple stmt)
{
  unsigned i;
  enum gimple_code code;
  enum LTO_tags tag;
  struct bitpack_d bp;
  histogram_value hist;

  /* Emit identifying tag.  */
  code = gimple_code (stmt);
  tag = lto_gimple_code_to_tag (code);
  streamer_write_record_start (ob, tag);

  /* Emit the tuple header.  */
  bp = bitpack_create (ob->main_stream);
  bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt));
  bp_pack_value (&bp, gimple_no_warning_p (stmt), 1);
  if (is_gimple_assign (stmt))
    bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1);
  bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1);
  hist = gimple_histogram_value (cfun, stmt);
  bp_pack_value (&bp, hist != NULL, 1);
  bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode);

  /* Emit location information for the statement.  */
  stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt)));
  streamer_write_bitpack (&bp);

  /* Emit the lexical block holding STMT.  */
  stream_write_tree (ob, gimple_block (stmt), true);

  /* Emit the operands.  */
  switch (gimple_code (stmt))
    {
    case GIMPLE_RESX:
      streamer_write_hwi (ob, gimple_resx_region (stmt));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true);
      break;

    case GIMPLE_EH_DISPATCH:
      streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt));
      break;

    case GIMPLE_ASM:
      streamer_write_uhwi (ob, gimple_asm_ninputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_noutputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt));
      streamer_write_uhwi (ob, gimple_asm_nlabels (stmt));
      streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt),
			     true);
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < gimple_num_ops (stmt); i++)
	{
	  tree op = gimple_op (stmt, i);
	  tree *basep = NULL;
	  /* Wrap all uses of non-automatic variables inside MEM_REFs
	     so that we do not have to deal with type mismatches on
	     merged symbols during IL read in.  The first operand
	     of GIMPLE_DEBUG must be a decl, not MEM_REF, though.  */
	  if (op && (i || !is_gimple_debug (stmt)))
	    {
	      basep = &op;
	      while (handled_component_p (*basep))
		basep = &TREE_OPERAND (*basep, 0);
	      if (TREE_CODE (*basep) == VAR_DECL
		  && !auto_var_in_fn_p (*basep, current_function_decl)
		  && !DECL_REGISTER (*basep))
		{
		  bool volatilep = TREE_THIS_VOLATILE (*basep);
		  *basep = build2 (MEM_REF, TREE_TYPE (*basep),
				   build_fold_addr_expr (*basep),
				   build_int_cst (build_pointer_type
						  (TREE_TYPE (*basep)), 0));
		  TREE_THIS_VOLATILE (*basep) = volatilep;
		}
	      else
		basep = NULL;
	    }
	  stream_write_tree (ob, op, true);
	  /* Restore the original base if we wrapped it inside a MEM_REF.  */
	  if (basep)
	    *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0);
	}
      if (is_gimple_call (stmt))
	{
	  if (gimple_call_internal_p (stmt))
	    streamer_write_enum (ob->main_stream, internal_fn,
				 IFN_LAST, gimple_call_internal_fn (stmt));
	  else
	    stream_write_tree (ob, gimple_call_fntype (stmt), true);
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gcc_assert (gimple_transaction_body (stmt) == NULL);
      stream_write_tree (ob, gimple_transaction_label (stmt), true);
      break;

    default:
      gcc_unreachable ();
    }
  if (hist)
    stream_out_histogram_value (ob, hist);
}
Beispiel #20
0
static bool
forward_propagate_addr_expr_1 (tree name, tree def_rhs,
			       gimple_stmt_iterator *use_stmt_gsi,
			       bool single_use_p)
{
  tree lhs, rhs, rhs2, array_ref;
  tree *rhsp, *lhsp;
  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
  enum tree_code rhs_code;

  gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);

  lhs = gimple_assign_lhs (use_stmt);
  rhs_code = gimple_assign_rhs_code (use_stmt);
  rhs = gimple_assign_rhs1 (use_stmt);

  /* Trivial cases.  The use statement could be a trivial copy or a
     useless conversion.  Recurse to the uses of the lhs as copyprop does
     not copy through different variant pointers and FRE does not catch
     all useless conversions.  Treat the case of a single-use name and
     a conversion to def_rhs type separate, though.  */
  if (TREE_CODE (lhs) == SSA_NAME
      && ((rhs_code == SSA_NAME && rhs == name)
	  || CONVERT_EXPR_CODE_P (rhs_code)))
    {
      /* Only recurse if we don't deal with a single use or we cannot
	 do the propagation to the current statement.  In particular
	 we can end up with a conversion needed for a non-invariant
	 address which we cannot do in a single statement.  */
      if (!single_use_p
	  || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
	      && !is_gimple_min_invariant (def_rhs)))
	return forward_propagate_addr_expr (lhs, def_rhs);

      gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
	gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
      else
	gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
      return true;
    }

  /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 
     ADDR_EXPR will not appear on the LHS.  */
  lhsp = gimple_assign_lhs_ptr (use_stmt);
  while (handled_component_p (*lhsp))
    lhsp = &TREE_OPERAND (*lhsp, 0);
  lhs = *lhsp;

  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (lhs) == INDIRECT_REF
      && TREE_OPERAND (lhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, lhs)
      && (lhsp != gimple_assign_lhs_ptr (use_stmt)
	  || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
					TREE_TYPE (rhs))))
    {
      *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);

      /* Continue propagating into the RHS if this was not the only use.  */
      if (single_use_p)
	return true;
    }

  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     nodes from the RHS.  */
  rhsp = gimple_assign_rhs1_ptr (use_stmt);
  while (handled_component_p (*rhsp)
	 || TREE_CODE (*rhsp) == ADDR_EXPR)
    rhsp = &TREE_OPERAND (*rhsp, 0);
  rhs = *rhsp;

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, rhs))
    {
      *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and try to
     create a VCE and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && TYPE_SIZE (TREE_TYPE (rhs))
      && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      /* Function decls should not be used for VCE either as it could be a
         function descriptor that we want and not the actual function code.  */
      && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
      /* We should not convert volatile loads to non volatile loads. */
      && !TYPE_VOLATILE (TREE_TYPE (rhs))
      && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
			  TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)) 
   {
     tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
     if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
       {
	 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
	    valid if we can replace the whole rhs of the use statement.  */
	 if (rhs != gimple_assign_rhs1 (use_stmt))
	   return false;
	 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
					     true, GSI_NEW_STMT);
	 gimple_assign_set_rhs1 (use_stmt, new_rhs);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
     /* If the defining rhs comes from an indirect reference, then do not
        convert into a VIEW_CONVERT_EXPR.  */
     def_rhs_base = TREE_OPERAND (def_rhs, 0);
     while (handled_component_p (def_rhs_base))
       def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
     if (!INDIRECT_REF_P (def_rhs_base))
       {
	 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
	    reference.  Place it there and fold the thing.  */
	 *rhsp = new_rhs;
	 fold_stmt_inplace (use_stmt);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
   }

  /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     is nothing to do. */
  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
      || gimple_assign_rhs1 (use_stmt) != name)
    return false;

  /* The remaining cases are all for turning pointer arithmetic into
     array indexing.  They only apply when we have the address of
     element zero in an array.  If that is not the case then there
     is nothing to do.  */
  array_ref = TREE_OPERAND (def_rhs, 0);
  if (TREE_CODE (array_ref) != ARRAY_REF
      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
      || !integer_zerop (TREE_OPERAND (array_ref, 1)))
    return false;

  rhs2 = gimple_assign_rhs2 (use_stmt);
  /* Try to optimize &x[0] p+ C where C is a multiple of the size
     of the elements in X into &x[C/element size].  */
  if (TREE_CODE (rhs2) == INTEGER_CST)
    {
      tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
					       array_ref, rhs2);
      if (new_rhs)
	{
	  gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
	  use_stmt = gsi_stmt (*use_stmt_gsi);
	  update_stmt (use_stmt);
	  tidy_after_forward_propagate_addr (use_stmt);
	  return true;
	}
    }

  /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
     converting a multiplication of an index by the size of the
     array elements, then the result is converted into the proper
     type for the arithmetic.  */
  if (TREE_CODE (rhs2) == SSA_NAME
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
    return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
							     use_stmt_gsi);
  return false;
}
Beispiel #21
0
static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info *osi, const_tree ptr,
		  int object_size_type)
{
  tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  while (handled_component_p (pt_var))
    pt_var = TREE_OPERAND (pt_var, 0);

  if (pt_var
      && TREE_CODE (pt_var) == MEM_REF)
    {
      unsigned HOST_WIDE_INT sz;

      if (!osi || (object_size_type & 1) != 0
	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
	{
	  sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
					    object_size_type & ~1);
	}
      else
	{
	  tree var = TREE_OPERAND (pt_var, 0);
	  if (osi->pass == 0)
	    collect_object_sizes_for (osi, var);
	  if (bitmap_bit_p (computed[object_size_type],
			    SSA_NAME_VERSION (var)))
	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
	  else
	    sz = unknown[object_size_type];
	}
      if (sz != unknown[object_size_type])
	{
	  double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
	  if (dsz.is_negative ())
	    sz = 0;
	  else if (dsz.fits_uhwi ())
	    sz = dsz.to_uhwi ();
	  else
	    sz = unknown[object_size_type];
	}

      if (sz != unknown[object_size_type] && sz < offset_limit)
	pt_var_size = size_int (sz);
    }
  else if (pt_var
	   && DECL_P (pt_var)
	   && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
	   && (unsigned HOST_WIDE_INT)
	        tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
    pt_var_size = DECL_SIZE_UNIT (pt_var);
  else if (pt_var
	   && TREE_CODE (pt_var) == STRING_CST
	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
	   && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	   && (unsigned HOST_WIDE_INT)
	      tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	      < offset_limit)
    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
  else
    return unknown[object_size_type];

  if (pt_var != TREE_OPERAND (ptr, 0))
    {
      tree var;

      if (object_size_type & 1)
	{
	  var = TREE_OPERAND (ptr, 0);

	  while (var != pt_var
		 && TREE_CODE (var) != BIT_FIELD_REF
		 && TREE_CODE (var) != COMPONENT_REF
		 && TREE_CODE (var) != ARRAY_REF
		 && TREE_CODE (var) != ARRAY_RANGE_REF
		 && TREE_CODE (var) != REALPART_EXPR
		 && TREE_CODE (var) != IMAGPART_EXPR)
	    var = TREE_OPERAND (var, 0);
	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
	    var = TREE_OPERAND (var, 0);
	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
	      || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
	      || (pt_var_size
		  && tree_int_cst_lt (pt_var_size,
				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
	    var = pt_var;
	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
	    {
	      tree v = var;
	      /* For &X->fld, compute object size only if fld isn't the last
		 field, as struct { int i; char c[1]; } is often used instead
		 of flexible array member.  */
	      while (v && v != pt_var)
		switch (TREE_CODE (v))
		  {
		  case ARRAY_REF:
		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
		      {
			tree domain
			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
			if (domain
			    && TYPE_MAX_VALUE (domain)
			    && TREE_CODE (TYPE_MAX_VALUE (domain))
			       == INTEGER_CST
			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
						TYPE_MAX_VALUE (domain)))
			  {
			    v = NULL_TREE;
			    break;
			  }
		      }
		    v = TREE_OPERAND (v, 0);
		    break;
		  case REALPART_EXPR:
		  case IMAGPART_EXPR:
		    v = NULL_TREE;
		    break;
		  case COMPONENT_REF:
		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
		      {
			v = NULL_TREE;
			break;
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (TREE_CODE (v) == COMPONENT_REF
			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			   == RECORD_TYPE)
		      {
			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
			  if (TREE_CODE (fld_chain) == FIELD_DECL)
			    break;

			if (fld_chain)
			  {
			    v = NULL_TREE;
			    break;
			  }
			v = TREE_OPERAND (v, 0);
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (v != pt_var)
		      v = NULL_TREE;
		    else
		      v = pt_var;
		    break;
		  default:
		    v = pt_var;
		    break;
		  }
	      if (v == pt_var)
		var = pt_var;
	    }
	}
      else
	var = pt_var;

      if (var != pt_var)
	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
      else if (!pt_var_size)
	return unknown[object_size_type];
      else
	var_size = pt_var_size;
      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
      if (bytes != error_mark_node)
	{
	  if (TREE_CODE (bytes) == INTEGER_CST
	      && tree_int_cst_lt (var_size, bytes))
	    bytes = size_zero_node;
	  else
	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
	}
      if (var != pt_var
	  && pt_var_size
	  && TREE_CODE (pt_var) == MEM_REF
	  && bytes != error_mark_node)
	{
	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
	  if (bytes2 != error_mark_node)
	    {
	      if (TREE_CODE (bytes2) == INTEGER_CST
		  && tree_int_cst_lt (pt_var_size, bytes2))
		bytes2 = size_zero_node;
	      else
		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
	    }
	}
    }
  else if (!pt_var_size)
    return unknown[object_size_type];
  else
    bytes = pt_var_size;

  if (host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Beispiel #22
0
static gimple *
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
		   enum LTO_tags tag)
{
  gimple *stmt;
  enum gimple_code code;
  unsigned HOST_WIDE_INT num_ops;
  size_t i;
  struct bitpack_d bp;
  bool has_hist;

  code = lto_tag_to_gimple_code (tag);

  /* Read the tuple header.  */
  bp = streamer_read_bitpack (ib);
  num_ops = bp_unpack_var_len_unsigned (&bp);
  stmt = gimple_alloc (code, num_ops);
  stmt->no_warning = bp_unpack_value (&bp, 1);
  if (is_gimple_assign (stmt))
    stmt->nontemporal_move = bp_unpack_value (&bp, 1);
  stmt->has_volatile_ops = bp_unpack_value (&bp, 1);
  has_hist = bp_unpack_value (&bp, 1);
  stmt->subcode = bp_unpack_var_len_unsigned (&bp);

  /* Read location information.  Caching here makes no sense until streamer
     cache can handle the following gimple_set_block.  */
  gimple_set_location (stmt, stream_input_location_now (&bp, data_in));

  /* Read lexical block reference.  */
  gimple_set_block (stmt, stream_read_tree (ib, data_in));

  /* Read in all the operands.  */
  switch (code)
    {
    case GIMPLE_RESX:
      gimple_resx_set_region (as_a <gresx *> (stmt),
			      streamer_read_hwi (ib));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      gimple_eh_must_not_throw_set_fndecl (
	as_a <geh_mnt *> (stmt),
	stream_read_tree (ib, data_in));
      break;

    case GIMPLE_EH_DISPATCH:
      gimple_eh_dispatch_set_region (as_a <geh_dispatch *> (stmt),
				     streamer_read_hwi (ib));
      break;

    case GIMPLE_ASM:
      {
	/* FIXME lto.  Move most of this into a new gimple_asm_set_string().  */
	gasm *asm_stmt = as_a <gasm *> (stmt);
	tree str;
	asm_stmt->ni = streamer_read_uhwi (ib);
	asm_stmt->no = streamer_read_uhwi (ib);
	asm_stmt->nc = streamer_read_uhwi (ib);
	asm_stmt->nl = streamer_read_uhwi (ib);
	str = streamer_read_string_cst (data_in, ib);
	asm_stmt->string = TREE_STRING_POINTER (str);
      }
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < num_ops; i++)
	{
	  tree *opp, op = stream_read_tree (ib, data_in);
	  gimple_set_op (stmt, i, op);
	  if (!op)
	    continue;

	  opp = gimple_op_ptr (stmt, i);
	  if (TREE_CODE (*opp) == ADDR_EXPR)
	    opp = &TREE_OPERAND (*opp, 0);
	  while (handled_component_p (*opp))
	    opp = &TREE_OPERAND (*opp, 0);
	  /* At LTO output time we wrap all global decls in MEM_REFs to
	     allow seamless replacement with prevailing decls.  Undo this
	     here if the prevailing decl allows for this.
	     ???  Maybe we should simply fold all stmts.  */
	  if (TREE_CODE (*opp) == MEM_REF
	      && TREE_CODE (TREE_OPERAND (*opp, 0)) == ADDR_EXPR
	      && integer_zerop (TREE_OPERAND (*opp, 1))
	      && (TREE_THIS_VOLATILE (*opp)
		  == TREE_THIS_VOLATILE
		       (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0)))
	      && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*opp, 1)))
	      && (TREE_TYPE (*opp)
		  == TREE_TYPE (TREE_TYPE (TREE_OPERAND (*opp, 1))))
	      && (TREE_TYPE (*opp)
		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))))
	    *opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0);
	}
      if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
	{
	  if (gimple_call_internal_p (call_stmt))
	    gimple_call_set_internal_fn
	      (call_stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
	  else
	    gimple_call_set_fntype (call_stmt, stream_read_tree (ib, data_in));
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gimple_transaction_set_label (as_a <gtransaction *> (stmt),
				    stream_read_tree (ib, data_in));
      break;

    default:
      internal_error ("bytecode stream: unknown GIMPLE statement tag %s",
		      lto_tag_name (tag));
    }

  /* Update the properties of symbols, SSA names and labels associated
     with STMT.  */
  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
    {
      tree lhs = gimple_get_lhs (stmt);
      if (lhs && TREE_CODE (lhs) == SSA_NAME)
	SSA_NAME_DEF_STMT (lhs) = stmt;
    }
  else if (code == GIMPLE_ASM)
    {
      gasm *asm_stmt = as_a <gasm *> (stmt);
      unsigned i;

      for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
	{
	  tree op = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
	  if (TREE_CODE (op) == SSA_NAME)
	    SSA_NAME_DEF_STMT (op) = stmt;
	}
    }

  /* Reset alias information.  */
  if (code == GIMPLE_CALL)
    gimple_call_reset_alias_info (as_a <gcall *> (stmt));

  /* Mark the statement modified so its operand vectors can be filled in.  */
  gimple_set_modified (stmt, true);
  if (has_hist)
    stream_in_histogram_value (ib, stmt);

  return stmt;
}
static gimple
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
		   struct function *fn, enum LTO_tags tag)
{
  gimple stmt;
  enum gimple_code code;
  unsigned HOST_WIDE_INT num_ops;
  size_t i;
  struct bitpack_d bp;

  code = lto_tag_to_gimple_code (tag);

  /* Read the tuple header.  */
  bp = streamer_read_bitpack (ib);
  num_ops = bp_unpack_var_len_unsigned (&bp);
  stmt = gimple_alloc (code, num_ops);
  stmt->gsbase.no_warning = bp_unpack_value (&bp, 1);
  if (is_gimple_assign (stmt))
    stmt->gsbase.nontemporal_move = bp_unpack_value (&bp, 1);
  stmt->gsbase.has_volatile_ops = bp_unpack_value (&bp, 1);
  stmt->gsbase.subcode = bp_unpack_var_len_unsigned (&bp);

  /* Read location information.  */
  gimple_set_location (stmt, lto_input_location (ib, data_in));

  /* Read lexical block reference.  */
  gimple_set_block (stmt, stream_read_tree (ib, data_in));

  /* Read in all the operands.  */
  switch (code)
    {
    case GIMPLE_RESX:
      gimple_resx_set_region (stmt, streamer_read_hwi (ib));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in));
      break;

    case GIMPLE_EH_DISPATCH:
      gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib));
      break;

    case GIMPLE_ASM:
      {
	/* FIXME lto.  Move most of this into a new gimple_asm_set_string().  */
	tree str;
	stmt->gimple_asm.ni = streamer_read_uhwi (ib);
	stmt->gimple_asm.no = streamer_read_uhwi (ib);
	stmt->gimple_asm.nc = streamer_read_uhwi (ib);
	stmt->gimple_asm.nl = streamer_read_uhwi (ib);
	str = streamer_read_string_cst (data_in, ib);
	stmt->gimple_asm.string = TREE_STRING_POINTER (str);
      }
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < num_ops; i++)
	{
	  tree op = stream_read_tree (ib, data_in);
	  gimple_set_op (stmt, i, op);
	  if (!op)
	    continue;

	  /* Fixup FIELD_DECLs in COMPONENT_REFs, they are not handled
	     by decl merging.  */
	  if (TREE_CODE (op) == ADDR_EXPR)
	    op = TREE_OPERAND (op, 0);
	  while (handled_component_p (op))
	    {
	      if (TREE_CODE (op) == COMPONENT_REF)
		{
		  tree field, type, tem;
		  tree closest_match = NULL_TREE;
		  field = TREE_OPERAND (op, 1);
		  type = DECL_CONTEXT (field);
		  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
		    {
		      if (TREE_CODE (tem) != FIELD_DECL)
			continue;
		      if (tem == field)
			break;
		      if (DECL_NONADDRESSABLE_P (tem)
			  == DECL_NONADDRESSABLE_P (field)
			  && gimple_compare_field_offset (tem, field))
			{
			  if (types_compatible_p (TREE_TYPE (tem),
						  TREE_TYPE (field)))
			    break;
			  else
			    closest_match = tem;
			}
		    }
		  /* In case of type mismatches across units we can fail
		     to unify some types and thus not find a proper
		     field-decl here.  */
		  if (tem == NULL_TREE)
		    {
		      /* Thus, emit a ODR violation warning.  */
		      if (warning_at (gimple_location (stmt), 0,
				      "use of type %<%E%> with two mismatching "
				      "declarations at field %<%E%>",
				      type, TREE_OPERAND (op, 1)))
			{
			  if (TYPE_FIELDS (type))
			    inform (DECL_SOURCE_LOCATION (TYPE_FIELDS (type)),
				    "original type declared here");
			  inform (DECL_SOURCE_LOCATION (TREE_OPERAND (op, 1)),
				  "field in mismatching type declared here");
			  if (TYPE_NAME (TREE_TYPE (field))
			      && (TREE_CODE (TYPE_NAME (TREE_TYPE (field)))
				  == TYPE_DECL))
			    inform (DECL_SOURCE_LOCATION
				      (TYPE_NAME (TREE_TYPE (field))),
				    "type of field declared here");
			  if (closest_match
			      && TYPE_NAME (TREE_TYPE (closest_match))
			      && (TREE_CODE (TYPE_NAME
				   (TREE_TYPE (closest_match))) == TYPE_DECL))
			    inform (DECL_SOURCE_LOCATION
				      (TYPE_NAME (TREE_TYPE (closest_match))),
				    "type of mismatching field declared here");
			}
		      /* And finally fixup the types.  */
		      TREE_OPERAND (op, 0)
			= build1 (VIEW_CONVERT_EXPR, type,
				  TREE_OPERAND (op, 0));
		    }
		  else
		    TREE_OPERAND (op, 1) = tem;
		}

	      op = TREE_OPERAND (op, 0);
	    }
	}
      if (is_gimple_call (stmt))
	{
	  if (gimple_call_internal_p (stmt))
	    gimple_call_set_internal_fn
	      (stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
	  else
	    gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in));
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in));
      break;

    default:
      internal_error ("bytecode stream: unknown GIMPLE statement tag %s",
		      lto_tag_name (tag));
    }

  /* Update the properties of symbols, SSA names and labels associated
     with STMT.  */
  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
    {
      tree lhs = gimple_get_lhs (stmt);
      if (lhs && TREE_CODE (lhs) == SSA_NAME)
	SSA_NAME_DEF_STMT (lhs) = stmt;
    }
  else if (code == GIMPLE_LABEL)
    gcc_assert (emit_label_in_global_context_p (gimple_label_label (stmt))
	        || DECL_CONTEXT (gimple_label_label (stmt)) == fn->decl);
  else if (code == GIMPLE_ASM)
    {
      unsigned i;

      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
	{
	  tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
	  if (TREE_CODE (op) == SSA_NAME)
	    SSA_NAME_DEF_STMT (op) = stmt;
	}
    }

  /* Reset alias information.  */
  if (code == GIMPLE_CALL)
    gimple_call_reset_alias_info (stmt);

  /* Mark the statement modified so its operand vectors can be filled in.  */
  gimple_set_modified (stmt, true);

  return stmt;
}
Beispiel #24
0
static unsigned
self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n,
		     struct loop *loop)
{
  tree stride, access_fn;
  HOST_WIDE_INT *strides, astride;
  VEC (tree, heap) *access_fns;
  tree ref = DR_REF (dr);
  unsigned i, ret = ~0u;

  /* In the following example:

     for (i = 0; i < N; i++)
       for (j = 0; j < N; j++)
         use (a[j][i]);
     the same cache line is accessed each N steps (except if the change from
     i to i + 1 crosses the boundary of the cache line).  Thus, for self-reuse,
     we cannot rely purely on the results of the data dependence analysis.

     Instead, we compute the stride of the reference in each loop, and consider
     the innermost loop in that the stride is less than cache size.  */

  strides = XCNEWVEC (HOST_WIDE_INT, n);
  access_fns = DR_ACCESS_FNS (dr);

  for (i = 0; VEC_iterate (tree, access_fns, i, access_fn); i++)
    {
      /* Keep track of the reference corresponding to the subscript, so that we
	 know its stride.  */
      while (handled_component_p (ref) && TREE_CODE (ref) != ARRAY_REF)
	ref = TREE_OPERAND (ref, 0);
      
      if (TREE_CODE (ref) == ARRAY_REF)
	{
	  stride = TYPE_SIZE_UNIT (TREE_TYPE (ref));
	  if (host_integerp (stride, 1))
	    astride = tree_low_cst (stride, 1);
	  else
	    astride = L1_CACHE_LINE_SIZE;

	  ref = TREE_OPERAND (ref, 0);
	}
      else
	astride = 1;

      add_subscript_strides (access_fn, astride, strides, n, loop);
    }

  for (i = n; i-- > 0; )
    {
      unsigned HOST_WIDE_INT s;

      s = strides[i] < 0 ?  -strides[i] : strides[i];

      if (s < (unsigned) L1_CACHE_LINE_SIZE
	  && (loop_sizes[i]
	      > (unsigned) (L1_CACHE_SIZE_BYTES / NONTEMPORAL_FRACTION)))
	{
	  ret = loop_sizes[i];
	  break;
	}
    }

  free (strides);
  return ret;
}
Beispiel #25
0
bool
is_gimple_addressable (tree t)
{
  return (is_gimple_id (t) || handled_component_p (t)
          || INDIRECT_REF_P (t));
}