Ejemplo n.º 1
0
tree
chrec_convert_aggressive (tree type, tree chrec)
{
  tree inner_type, left, right, lc, rc, rtype;

  if (automatically_generated_chrec_p (chrec)
      || TREE_CODE (chrec) != POLYNOMIAL_CHREC)
    return NULL_TREE;

  inner_type = TREE_TYPE (chrec);
  if (TYPE_PRECISION (type) > TYPE_PRECISION (inner_type))
    return NULL_TREE;

  /* If we cannot perform arithmetic in TYPE, avoid creating an scev.  */
  if (avoid_arithmetics_in_type_p (type))
    return NULL_TREE;

  rtype = POINTER_TYPE_P (type) ? sizetype : type;

  left = CHREC_LEFT (chrec);
  right = CHREC_RIGHT (chrec);
  lc = chrec_convert_aggressive (type, left);
  if (!lc)
    lc = chrec_convert (type, left, NULL);
  rc = chrec_convert_aggressive (rtype, right);
  if (!rc)
    rc = chrec_convert (rtype, right, NULL);
 
  return build_polynomial_chrec (CHREC_VARIABLE (chrec), lc, rc);
}
Ejemplo n.º 2
0
tree 
chrec_convert (tree type, 
	       tree chrec)
{
  tree ct;
  
  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (TYPE_PRECISION (ct) < TYPE_PRECISION (type))
    return count_ev_in_wider_type (type, chrec);

  switch (TREE_CODE (chrec))
    {
    case POLYNOMIAL_CHREC:
      return build_polynomial_chrec (CHREC_VARIABLE (chrec),
				     chrec_convert (type,
						    CHREC_LEFT (chrec)),
				     chrec_convert (type,
						    CHREC_RIGHT (chrec)));

    default:
      {
	tree res = fold_convert (type, chrec);

	/* Don't propagate overflows.  */
	TREE_OVERFLOW (res) = 0;
	if (CONSTANT_CLASS_P (res))
	  TREE_CONSTANT_OVERFLOW (res) = 0;

	/* But reject constants that don't fit in their type after conversion.
	   This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
	   natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
	   and can cause problems later when computing niters of loops.  Note
	   that we don't do the check before converting because we don't want
	   to reject conversions of negative chrecs to unsigned types.  */
	if (TREE_CODE (res) == INTEGER_CST
	    && TREE_CODE (type) == INTEGER_TYPE
	    && !int_fits_type_p (res, type))
	  res = chrec_dont_know;

	return res;
      }
    }
}
Ejemplo n.º 3
0
/* Convert CHREC for the right hand side of a CREC.
   The increment for a pointer type is always sizetype.  */
tree 
chrec_convert_rhs (tree type, tree chrec, gimple at_stmt)
{
  if (POINTER_TYPE_P (type))
   type = sizetype;
  return chrec_convert (type, chrec, at_stmt);
}
Ejemplo n.º 4
0
tree
chrec_convert_aggressive (tree type, tree chrec, bool *fold_conversions)
{
  tree inner_type, left, right, lc, rc, rtype;

  gcc_assert (fold_conversions != NULL);

  if (automatically_generated_chrec_p (chrec)
      || TREE_CODE (chrec) != POLYNOMIAL_CHREC)
    return NULL_TREE;

  inner_type = TREE_TYPE (chrec);
  if (TYPE_PRECISION (type) > TYPE_PRECISION (inner_type))
    return NULL_TREE;

  if (useless_type_conversion_p (type, inner_type))
    return NULL_TREE;

  if (!*fold_conversions && evolution_function_is_affine_p (chrec))
    {
      tree base, step;
      struct loop *loop;

      loop = get_chrec_loop (chrec);
      base = CHREC_LEFT (chrec);
      step = CHREC_RIGHT (chrec);
      if (convert_affine_scev (loop, type, &base, &step, NULL, true))
	return build_polynomial_chrec (loop->num, base, step);
    }
  rtype = POINTER_TYPE_P (type) ? sizetype : type;

  left = CHREC_LEFT (chrec);
  right = CHREC_RIGHT (chrec);
  lc = chrec_convert_aggressive (type, left, fold_conversions);
  if (!lc)
    lc = chrec_convert (type, left, NULL);
  rc = chrec_convert_aggressive (rtype, right, fold_conversions);
  if (!rc)
    rc = chrec_convert (rtype, right, NULL);

  *fold_conversions = true;

  return build_polynomial_chrec (CHREC_VARIABLE (chrec), lc, rc);
}
Ejemplo n.º 5
0
tree 
chrec_apply (unsigned var,
	     tree chrec, 
	     tree x)
{
  tree type = chrec_type (chrec);
  tree res = chrec_dont_know;

  if (automatically_generated_chrec_p (chrec)
      || automatically_generated_chrec_p (x)

      /* When the symbols are defined in an outer loop, it is possible
	 to symbolically compute the apply, since the symbols are
	 constants with respect to the varying loop.  */
      || chrec_contains_symbols_defined_in_loop (chrec, var))
    return chrec_dont_know;
 
  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "(chrec_apply \n");

  if (TREE_CODE (x) == INTEGER_CST && SCALAR_FLOAT_TYPE_P (type))
    x = build_real_from_int_cst (type, x);

  if (evolution_function_is_affine_p (chrec))
    {
      /* "{a, +, b} (x)"  ->  "a + b*x".  */
      x = chrec_convert (type, x, NULL_TREE);
      res = chrec_fold_multiply (type, CHREC_RIGHT (chrec), x);
      if (!integer_zerop (CHREC_LEFT (chrec)))
	res = chrec_fold_plus (type, CHREC_LEFT (chrec), res);
    }
  
  else if (TREE_CODE (chrec) != POLYNOMIAL_CHREC)
    res = chrec;
  
  else if (TREE_CODE (x) == INTEGER_CST
	   && tree_int_cst_sgn (x) == 1)
    /* testsuite/.../ssa-chrec-38.c.  */
    res = chrec_evaluate (var, chrec, x, 0);
  else
    res = chrec_dont_know;
  
  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "  (varying_loop = %d\n", var);
      fprintf (dump_file, ")\n  (chrec = ");
      print_generic_expr (dump_file, chrec, 0);
      fprintf (dump_file, ")\n  (x = ");
      print_generic_expr (dump_file, x, 0);
      fprintf (dump_file, ")\n  (res = ");
      print_generic_expr (dump_file, res, 0);
      fprintf (dump_file, "))\n");
    }
  
  return res;
}
Ejemplo n.º 6
0
tree
chrec_fold_plus (tree type,
		 tree op0,
		 tree op1)
{
  enum tree_code code;
  if (automatically_generated_chrec_p (op0)
      || automatically_generated_chrec_p (op1))
    return chrec_fold_automatically_generated_operands (op0, op1);

  if (integer_zerop (op0))
    return chrec_convert (type, op1, NULL);
  if (integer_zerop (op1))
    return chrec_convert (type, op0, NULL);

  if (POINTER_TYPE_P (type))
    code = POINTER_PLUS_EXPR;
  else
    code = PLUS_EXPR;

  return chrec_fold_plus_1 (code, type, op0, op1);
}
Ejemplo n.º 7
0
bool
convert_affine_scev (struct loop *loop, tree type,
		     tree *base, tree *step, gimple *at_stmt,
		     bool use_overflow_semantics)
{
  tree ct = TREE_TYPE (*step);
  bool enforce_overflow_semantics;
  bool must_check_src_overflow, must_check_rslt_overflow;
  tree new_base, new_step;
  tree step_type = POINTER_TYPE_P (type) ? sizetype : type;

  /* In general,
     (TYPE) (BASE + STEP * i) = (TYPE) BASE + (TYPE -- sign extend) STEP * i,
     but we must check some assumptions.

     1) If [BASE, +, STEP] wraps, the equation is not valid when precision
        of CT is smaller than the precision of TYPE.  For example, when we
	cast unsigned char [254, +, 1] to unsigned, the values on left side
	are 254, 255, 0, 1, ..., but those on the right side are
	254, 255, 256, 257, ...
     2) In case that we must also preserve the fact that signed ivs do not
        overflow, we must additionally check that the new iv does not wrap.
	For example, unsigned char [125, +, 1] casted to signed char could
	become a wrapping variable with values 125, 126, 127, -128, -127, ...,
	which would confuse optimizers that assume that this does not
	happen.  */
  must_check_src_overflow = TYPE_PRECISION (ct) < TYPE_PRECISION (type);

  enforce_overflow_semantics = (use_overflow_semantics
				&& nowrap_type_p (type));
  if (enforce_overflow_semantics)
    {
      /* We can avoid checking whether the result overflows in the following
	 cases:

	 -- must_check_src_overflow is true, and the range of TYPE is superset
	    of the range of CT -- i.e., in all cases except if CT signed and
	    TYPE unsigned.
         -- both CT and TYPE have the same precision and signedness, and we
	    verify instead that the source does not overflow (this may be
	    easier than verifying it for the result, as we may use the
	    information about the semantics of overflow in CT).  */
      if (must_check_src_overflow)
	{
	  if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (ct))
	    must_check_rslt_overflow = true;
	  else
	    must_check_rslt_overflow = false;
	}
      else if (TYPE_UNSIGNED (ct) == TYPE_UNSIGNED (type)
	       && TYPE_PRECISION (ct) == TYPE_PRECISION (type))
	{
	  must_check_rslt_overflow = false;
	  must_check_src_overflow = true;
	}
      else
	must_check_rslt_overflow = true;
    }
  else
    must_check_rslt_overflow = false;

  if (must_check_src_overflow
      && scev_probably_wraps_p (*base, *step, at_stmt, loop,
				use_overflow_semantics))
    return false;

  new_base = chrec_convert (type, *base, at_stmt, use_overflow_semantics);
  /* The step must be sign extended, regardless of the signedness
     of CT and TYPE.  This only needs to be handled specially when
     CT is unsigned -- to avoid e.g. unsigned char [100, +, 255]
     (with values 100, 99, 98, ...) from becoming signed or unsigned
     [100, +, 255] with values 100, 355, ...; the sign-extension is
     performed by default when CT is signed.  */
  new_step = *step;
  if (TYPE_PRECISION (step_type) > TYPE_PRECISION (ct) && TYPE_UNSIGNED (ct))
    {
      tree signed_ct = build_nonstandard_integer_type (TYPE_PRECISION (ct), 0);
      new_step = chrec_convert (signed_ct, new_step, at_stmt,
                                use_overflow_semantics);
    }
  new_step = chrec_convert (step_type, new_step, at_stmt,
			    use_overflow_semantics);

  if (automatically_generated_chrec_p (new_base)
      || automatically_generated_chrec_p (new_step))
    return false;

  if (must_check_rslt_overflow
      /* Note that in this case we cannot use the fact that signed variables
	 do not overflow, as this is what we are verifying for the new iv.  */
      && scev_probably_wraps_p (new_base, new_step, at_stmt, loop, false))
    return false;

  *base = new_base;
  *step = new_step;
  return true;
}