static void
move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
		   aff_tree *addr)
{
  unsigned i;
  tree val = NULL_TREE;
  int qual;

  for (i = 0; i < addr->n; i++)
    {
      if (!double_int_one_p (addr->elts[i].coef))
	continue;

      val = addr->elts[i].val;
      if (operand_equal_p (val, base_hint, 0))
	break;
    }

  if (i == addr->n)
    return;

  /* Cast value to appropriate pointer type.  We cannot use a pointer
     to TYPE directly, as the back-end will assume registers of pointer
     type are aligned, and just the base itself may not actually be.
     We use void pointer to the type's address space instead.  */
  qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
  type = build_qualified_type (void_type_node, qual);
  parts->base = fold_convert (build_pointer_type (type), val);
  aff_combination_remove_elt (addr, i);
}
static tree
add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
		 aff_tree *comb)
{
  enum tree_code code;
  tree type1 = type;
  if (POINTER_TYPE_P (type))
    type1 = sizetype;

  scale = double_int_ext_for_comb (scale, comb);
  elt = fold_convert (type1, elt);

  if (double_int_one_p (scale))
    {
      if (!expr)
	return fold_convert (type, elt);

      if (POINTER_TYPE_P (type))
        return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
      return fold_build2 (PLUS_EXPR, type, expr, elt);
    }

  if (double_int_minus_one_p (scale))
    {
      if (!expr)
	return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));

      if (POINTER_TYPE_P (type))
	{
	  elt = fold_build1 (NEGATE_EXPR, type1, elt);
	  return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
	}
      return fold_build2 (MINUS_EXPR, type, expr, elt);
    }

  if (!expr)
    return fold_convert (type,
			 fold_build2 (MULT_EXPR, type1, elt,
				      double_int_to_tree (type1, scale)));

  if (double_int_negative_p (scale))
    {
      code = MINUS_EXPR;
      scale = double_int_neg (scale);
    }
  else
    code = PLUS_EXPR;

  elt = fold_build2 (MULT_EXPR, type1, elt,
		     double_int_to_tree (type1, scale));
  if (POINTER_TYPE_P (type))
    {
      if (code == MINUS_EXPR)
        elt = fold_build1 (NEGATE_EXPR, type1, elt);
      return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
    }
  return fold_build2 (code, type, expr, elt);
}
static void
addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
	       tree base_hint, struct mem_address *parts,
               bool speed)
{
  tree part;
  unsigned i;

  parts->symbol = NULL_TREE;
  parts->base = NULL_TREE;
  parts->index = NULL_TREE;
  parts->step = NULL_TREE;

  if (!double_int_zero_p (addr->offset))
    parts->offset = double_int_to_tree (sizetype, addr->offset);
  else
    parts->offset = NULL_TREE;

  /* Try to find a symbol.  */
  move_fixed_address_to_symbol (parts, addr);

  /* No need to do address parts reassociation if the number of parts
     is <= 2 -- in that case, no loop invariant code motion can be
     exposed.  */

  if (!base_hint && (addr->n > 2))
    move_variant_to_index (parts, addr, iv_cand);

  /* First move the most expensive feasible multiplication
     to index.  */
  if (!parts->index)
    most_expensive_mult_to_index (type, parts, addr, speed);

  /* Try to find a base of the reference.  Since at the moment
     there is no reliable way how to distinguish between pointer and its
     offset, this is just a guess.  */
  if (!parts->symbol && base_hint)
    move_hint_to_base (type, parts, base_hint, addr);
  if (!parts->symbol && !parts->base)
    move_pointer_to_base (parts, addr);

  /* Then try to process the remaining elements.  */
  for (i = 0; i < addr->n; i++)
    {
      part = fold_convert (sizetype, addr->elts[i].val);
      if (!double_int_one_p (addr->elts[i].coef))
	part = fold_build2 (MULT_EXPR, sizetype, part,
			    double_int_to_tree (sizetype, addr->elts[i].coef));
      add_to_parts (parts, part);
    }
  if (addr->rest)
    add_to_parts (parts, fold_convert (sizetype, addr->rest));
}
void
aff_combination_scale (aff_tree *comb, double_int scale)
{
  unsigned i, j;

  scale = double_int_ext_for_comb (scale, comb);
  if (double_int_one_p (scale))
    return;

  if (double_int_zero_p (scale))
    {
      aff_combination_zero (comb, comb->type);
      return;
    }

  comb->offset
    = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
  for (i = 0, j = 0; i < comb->n; i++)
    {
      double_int new_coef;

      new_coef
	= double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
				   comb);
      /* A coefficient may become zero due to overflow.  Remove the zero
	 elements.  */
      if (double_int_zero_p (new_coef))
	continue;
      comb->elts[j].coef = new_coef;
      comb->elts[j].val = comb->elts[i].val;
      j++;
    }
  comb->n = j;

  if (comb->rest)
    {
      tree type = comb->type;
      if (POINTER_TYPE_P (type))
	type = sizetype;
      if (comb->n < MAX_AFF_ELTS)
	{
	  comb->elts[comb->n].coef = scale;
	  comb->elts[comb->n].val = comb->rest;
	  comb->rest = NULL_TREE;
	  comb->n++;
	}
      else
	comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
				  double_int_to_tree (type, scale));
    }
}
static void
move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
{
  unsigned i;
  tree val = NULL_TREE;

  for (i = 0; i < addr->n; i++)
    {
      if (!double_int_one_p (addr->elts[i].coef))
	continue;

      val = addr->elts[i].val;
      if (POINTER_TYPE_P (TREE_TYPE (val)))
	break;
    }

  if (i == addr->n)
    return;

  parts->base = val;
  aff_combination_remove_elt (addr, i);
}
static void
move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
{
  unsigned i;
  tree val = NULL_TREE;

  for (i = 0; i < addr->n; i++)
    {
      if (!double_int_one_p (addr->elts[i].coef))
	continue;

      val = addr->elts[i].val;
      if (TREE_CODE (val) == ADDR_EXPR
	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
	break;
    }

  if (i == addr->n)
    return;

  parts->symbol = val;
  aff_combination_remove_elt (addr, i);
}
void
aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
{
  unsigned i;
  tree type;

  scale = double_int_ext_for_comb (scale, comb);
  if (double_int_zero_p (scale))
    return;

  for (i = 0; i < comb->n; i++)
    if (operand_equal_p (comb->elts[i].val, elt, 0))
      {
	double_int new_coef;

	new_coef = double_int_add (comb->elts[i].coef, scale);
	new_coef = double_int_ext_for_comb (new_coef, comb);
	if (!double_int_zero_p (new_coef))
	  {
	    comb->elts[i].coef = new_coef;
	    return;
	  }

	comb->n--;
	comb->elts[i] = comb->elts[comb->n];

	if (comb->rest)
	  {
	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
	    comb->elts[comb->n].coef = double_int_one;
	    comb->elts[comb->n].val = comb->rest;
	    comb->rest = NULL_TREE;
	    comb->n++;
	  }
	return;
      }
  if (comb->n < MAX_AFF_ELTS)
    {
      comb->elts[comb->n].coef = scale;
      comb->elts[comb->n].val = elt;
      comb->n++;
      return;
    }

  type = comb->type;
  if (POINTER_TYPE_P (type))
    type = sizetype;

  if (double_int_one_p (scale))
    elt = fold_convert (type, elt);
  else
    elt = fold_build2 (MULT_EXPR, type,
		       fold_convert (type, elt),
		       double_int_to_tree (type, scale));

  if (comb->rest)
    comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
			      elt);
  else
    comb->rest = elt;
}
Ejemplo n.º 8
0
static tree
add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
		 aff_tree *comb)
{
  enum tree_code code;
  tree type1 = type;
  if (POINTER_TYPE_P (type))
#ifdef TARGET_POINTER_SIZETYPE
    {
       /* sizetype is not good enough for pointers in ADDRESS_SPACES 
          on dsPIC; some pointers are larger than 'sizetype' (CAW) */
       type1 = TARGET_POINTER_SIZETYPE(type);
    }
#else
    type1 = sizetype;
#endif

  scale = double_int_ext_for_comb (scale, comb);
  elt = fold_convert (type1, elt);

  if (double_int_one_p (scale))
    {
      if (!expr)
	return fold_convert (type, elt);

      if (POINTER_TYPE_P (type))
        return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
      return fold_build2 (PLUS_EXPR, type, expr, elt);
    }

  if (double_int_minus_one_p (scale))
    {
      if (!expr)
	return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));

      if (POINTER_TYPE_P (type))
	{
	  elt = fold_build1 (NEGATE_EXPR, type1, elt);
	  return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
	}
      return fold_build2 (MINUS_EXPR, type, expr, elt);
    }

  if (!expr)
    return fold_convert (type,
			 fold_build2 (MULT_EXPR, type1, elt,
				      double_int_to_tree (type1, scale)));

  if (double_int_negative_p (scale))
    {
      code = MINUS_EXPR;
      scale = double_int_neg (scale);
    }
  else
    code = PLUS_EXPR;

  elt = fold_build2 (MULT_EXPR, type1, elt,
		     double_int_to_tree (type1, scale));
  if (POINTER_TYPE_P (type))
    {
      if (code == MINUS_EXPR)
        elt = fold_build1 (NEGATE_EXPR, type1, elt);
      return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
    }
  return fold_build2 (code, type, expr, elt);
}