static tree 
chrec_convert_1 (tree type, tree chrec, tree at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = get_chrec_loop (chrec);
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    TREE_OVERFLOW (res) = 0;

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}
Esempio n. 2
0
tree
chrec_convert_aggressive (tree type, tree chrec, bool *fold_conversions)
{
  tree inner_type, left, right, lc, rc, rtype;

  gcc_assert (fold_conversions != NULL);

  if (automatically_generated_chrec_p (chrec)
      || TREE_CODE (chrec) != POLYNOMIAL_CHREC)
    return NULL_TREE;

  inner_type = TREE_TYPE (chrec);
  if (TYPE_PRECISION (type) > TYPE_PRECISION (inner_type))
    return NULL_TREE;

  if (useless_type_conversion_p (type, inner_type))
    return NULL_TREE;

  if (!*fold_conversions && evolution_function_is_affine_p (chrec))
    {
      tree base, step;
      struct loop *loop;

      loop = get_chrec_loop (chrec);
      base = CHREC_LEFT (chrec);
      step = CHREC_RIGHT (chrec);
      if (convert_affine_scev (loop, type, &base, &step, NULL, true))
	return build_polynomial_chrec (loop->num, base, step);
    }
  rtype = POINTER_TYPE_P (type) ? sizetype : type;

  left = CHREC_LEFT (chrec);
  right = CHREC_RIGHT (chrec);
  lc = chrec_convert_aggressive (type, left, fold_conversions);
  if (!lc)
    lc = chrec_convert (type, left, NULL);
  rc = chrec_convert_aggressive (rtype, right, fold_conversions);
  if (!rc)
    rc = chrec_convert (rtype, right, NULL);

  *fold_conversions = true;

  return build_polynomial_chrec (CHREC_VARIABLE (chrec), lc, rc);
}
Esempio n. 3
0
static bool
evolution_function_is_invariant_rec_p (tree chrec, int loopnum)
{
  if (evolution_function_is_constant_p (chrec))
    return true;

  if (TREE_CODE (chrec) == SSA_NAME
      && (loopnum == 0
	  || expr_invariant_in_loop_p (get_loop (cfun, loopnum), chrec)))
    return true;

  if (TREE_CODE (chrec) == POLYNOMIAL_CHREC)
    {
      if (CHREC_VARIABLE (chrec) == (unsigned) loopnum
	  || flow_loop_nested_p (get_loop (cfun, loopnum),
				 get_chrec_loop (chrec))
	  || !evolution_function_is_invariant_rec_p (CHREC_RIGHT (chrec),
						     loopnum)
	  || !evolution_function_is_invariant_rec_p (CHREC_LEFT (chrec),
						     loopnum))
	return false;
      return true;
    }

  switch (TREE_OPERAND_LENGTH (chrec))
    {
    case 2:
      if (!evolution_function_is_invariant_rec_p (TREE_OPERAND (chrec, 1),
						  loopnum))
	return false;

    case 1:
      if (!evolution_function_is_invariant_rec_p (TREE_OPERAND (chrec, 0),
						  loopnum))
	return false;
      return true;

    default:
      return false;
    }

  return false;
}
Esempio n. 4
0
static inline tree
chrec_fold_plus_poly_poly (enum tree_code code,
			   tree type,
			   tree poly0,
			   tree poly1)
{
  tree left, right;
  struct loop *loop0 = get_chrec_loop (poly0);
  struct loop *loop1 = get_chrec_loop (poly1);
  tree rtype = code == POINTER_PLUS_EXPR ? chrec_type (poly1) : type;

  gcc_assert (poly0);
  gcc_assert (poly1);
  gcc_assert (TREE_CODE (poly0) == POLYNOMIAL_CHREC);
  gcc_assert (TREE_CODE (poly1) == POLYNOMIAL_CHREC);
  if (POINTER_TYPE_P (chrec_type (poly0)))
    gcc_checking_assert (ptrofftype_p (chrec_type (poly1))
			 && useless_type_conversion_p (type, chrec_type (poly0)));
  else
    gcc_checking_assert (useless_type_conversion_p (type, chrec_type (poly0))
			 && useless_type_conversion_p (type, chrec_type (poly1)));

  /*
    {a, +, b}_1 + {c, +, d}_2  ->  {{a, +, b}_1 + c, +, d}_2,
    {a, +, b}_2 + {c, +, d}_1  ->  {{c, +, d}_1 + a, +, b}_2,
    {a, +, b}_x + {c, +, d}_x  ->  {a+c, +, b+d}_x.  */
  if (flow_loop_nested_p (loop0, loop1))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_plus (type, poly0, CHREC_LEFT (poly1)),
	   CHREC_RIGHT (poly1));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_minus (type, poly0, CHREC_LEFT (poly1)),
	   chrec_fold_multiply (type, CHREC_RIGHT (poly1),
				SCALAR_FLOAT_TYPE_P (type)
				? build_real (type, dconstm1)
				: build_int_cst_type (type, -1)));
    }

  if (flow_loop_nested_p (loop1, loop0))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_plus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_minus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
    }

  /* This function should never be called for chrecs of loops that
     do not belong to the same loop nest.  */
  gcc_assert (loop0 == loop1);

  if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
    {
      left = chrec_fold_plus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_plus
	(rtype, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }
  else
    {
      left = chrec_fold_minus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_minus
	(type, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }

  if (chrec_zerop (right))
    return left;
  else
    return build_polynomial_chrec
      (CHREC_VARIABLE (poly0), left, right);
}
Esempio n. 5
0
static tree
chrec_convert_1 (tree type, tree chrec, gimple *at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;

  ct = chrec_type (chrec);
  if (useless_type_conversion_p (type, ct))
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = get_chrec_loop (chrec);
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  /* Fold will not canonicalize (long)(i - 1) to (long)i - 1 because that
     may be more expensive.  We do want to perform this optimization here
     though for canonicalization reasons.  */
  if (use_overflow_semantics
      && (TREE_CODE (chrec) == PLUS_EXPR
	  || TREE_CODE (chrec) == MINUS_EXPR)
      && TREE_CODE (type) == INTEGER_TYPE
      && TREE_CODE (ct) == INTEGER_TYPE
      && TYPE_PRECISION (type) > TYPE_PRECISION (ct)
      && TYPE_OVERFLOW_UNDEFINED (ct))
    res = fold_build2 (TREE_CODE (chrec), type,
		       fold_convert (type, TREE_OPERAND (chrec, 0)),
		       fold_convert (type, TREE_OPERAND (chrec, 1)));
  /* Similar perform the trick that (signed char)((int)x + 2) can be
     narrowed to (signed char)((unsigned char)x + 2).  */
  else if (use_overflow_semantics
	   && TREE_CODE (chrec) == POLYNOMIAL_CHREC
	   && TREE_CODE (ct) == INTEGER_TYPE
	   && TREE_CODE (type) == INTEGER_TYPE
	   && TYPE_OVERFLOW_UNDEFINED (type)
	   && TYPE_PRECISION (type) < TYPE_PRECISION (ct))
    {
      tree utype = unsigned_type_for (type);
      res = build_polynomial_chrec (CHREC_VARIABLE (chrec),
				    fold_convert (utype,
						  CHREC_LEFT (chrec)),
				    fold_convert (utype,
						  CHREC_RIGHT (chrec)));
      res = chrec_convert_1 (type, res, at_stmt, use_overflow_semantics);
    }
  else
    res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    TREE_OVERFLOW (res) = 0;

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}