示例#1
0
tree
gimple_simplify (enum tree_code code, tree type,
		 tree op0, tree op1,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (op0) && constant_for_folding (op1))
    {
      tree res = const_binop (code, type, op0, op1);
      if (res != NULL_TREE
	  && CONSTANT_CLASS_P (res))
	return res;
    }

  /* Canonicalize operand order both for matching and fallback stmt
     generation.  */
  if ((commutative_tree_code (code)
       || TREE_CODE_CLASS (code) == tcc_comparison)
      && tree_swap_operands_p (op0, op1, false))
    {
      tree tem = op0;
      op0 = op1;
      op1 = tem;
      if (TREE_CODE_CLASS (code) == tcc_comparison)
	code = swap_tree_comparison (code);
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			code, type, op0, op1))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
示例#2
0
tree
gimple_simplify (enum tree_code code, tree type,
		 tree op0, tree op1, tree op2,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (op0) && constant_for_folding (op1)
      && constant_for_folding (op2))
    {
      tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
      if (res != NULL_TREE
	  && CONSTANT_CLASS_P (res))
	return res;
    }

  /* Canonicalize operand order both for matching and fallback stmt
     generation.  */
  if (commutative_ternary_tree_code (code)
      && tree_swap_operands_p (op0, op1, false))
    {
      tree tem = op0;
      op0 = op1;
      op1 = tem;
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			code, type, op0, op1, op2))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
示例#3
0
tree
gimple_simplify (enum built_in_function fn, tree type,
		 tree arg0, tree arg1, tree arg2,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (arg0)
      && constant_for_folding (arg1)
      && constant_for_folding (arg2))
    {
      tree decl = builtin_decl_implicit (fn);
      if (decl)
	{
	  tree args[3];
	  args[0] = arg0;
	  args[1] = arg1;
	  args[2] = arg2;
	  tree res = fold_builtin_n (UNKNOWN_LOCATION, decl, args, 3, false);
	  if (res)
	    {
	      /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
	      STRIP_NOPS (res);
	      res = fold_convert (type, res);
	      if (CONSTANT_CLASS_P (res))
		return res;
	    }
	}
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			fn, type, arg0, arg1, arg2))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
示例#4
0
static inline bool
constant_for_folding (tree t)
{
  return (CONSTANT_CLASS_P (t)
	  /* The following is only interesting to string builtins.  */
	  || (TREE_CODE (t) == ADDR_EXPR
	      && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
}
示例#5
0
bool
is_gimple_mem_ref_addr (tree t)
{
  return (is_gimple_reg (t)
	  || TREE_CODE (t) == INTEGER_CST
	  || (TREE_CODE (t) == ADDR_EXPR
	      && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
		  || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
}
static bool
ptr_deref_may_alias_decl_p (tree ptr, tree decl)
{
  struct ptr_info_def *pi;

  gcc_assert ((TREE_CODE (ptr) == SSA_NAME
	       || TREE_CODE (ptr) == ADDR_EXPR
	       || TREE_CODE (ptr) == INTEGER_CST)
	      && (TREE_CODE (decl) == VAR_DECL
		  || TREE_CODE (decl) == PARM_DECL
		  || TREE_CODE (decl) == RESULT_DECL));

  /* Non-aliased variables can not be pointed to.  */
  if (!may_be_aliased (decl))
    return false;

  /* ADDR_EXPR pointers either just offset another pointer or directly
     specify the pointed-to set.  */
  if (TREE_CODE (ptr) == ADDR_EXPR)
    {
      tree base = get_base_address (TREE_OPERAND (ptr, 0));
      if (base
	  && INDIRECT_REF_P (base))
	ptr = TREE_OPERAND (base, 0);
      else if (base
	       && SSA_VAR_P (base))
	return operand_equal_p (base, decl, 0);
      else if (base
	       && CONSTANT_CLASS_P (base))
	return false;
      else
	return true;
    }

  /* We can end up with dereferencing constant pointers.
     Just bail out in this case.  */
  if (TREE_CODE (ptr) == INTEGER_CST)
    return true;

  /* If we do not have useful points-to information for this pointer
     we cannot disambiguate anything else.  */
  pi = SSA_NAME_PTR_INFO (ptr);
  if (!pi)
    return true;

  /* If the decl can be used as a restrict tag and we have a restrict
     pointer and that pointers points-to set doesn't contain this decl
     then they can't alias.  */
  if (DECL_RESTRICTED_P (decl)
      && TYPE_RESTRICT (TREE_TYPE (ptr))
      && pi->pt.vars_contains_restrict)
    return bitmap_bit_p (pi->pt.vars, DECL_UID (decl));

  return pt_solution_includes (&pi->pt, decl);
}
示例#7
0
bool
gimple_resimplify3 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
      && constant_for_folding (res_ops[2]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
					    res_ops[1], res_ops[2]);
      else
	tem = fold_const_call (combined_fn (*res_code), type,
			       res_ops[0], res_ops[1], res_ops[2]);
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  if (TREE_OVERFLOW_P (tem))
	    tem = drop_tree_overflow (tem);
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  /* Canonicalize operand order.  */
  bool canonicalized = false;
  if (res_code->is_tree_code ()
      && commutative_ternary_tree_code (*res_code)
      && tree_swap_operands_p (res_ops[0], res_ops[1]))
    {
      std::swap (res_ops[0], res_ops[1]);
      canonicalized = true;
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type,
		       res_ops[0], res_ops[1], res_ops[2]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return canonicalized;
}
示例#8
0
tree
gfc_evaluate_now (tree expr, stmtblock_t * pblock)
{
  tree var;

  if (CONSTANT_CLASS_P (expr))
    return expr;

  var = gfc_create_var (TREE_TYPE (expr), NULL);
  gfc_add_modify_expr (pblock, var, expr);

  return var;
}
示例#9
0
bool
is_gimple_ip_invariant_address (const_tree t)
{
  const_tree op;

  if (TREE_CODE (t) != ADDR_EXPR)
    return false;

  op = strip_invariant_refs (TREE_OPERAND (t, 0));
  if (!op)
    return false;

  if (TREE_CODE (op) == MEM_REF)
    {
      const_tree op0 = TREE_OPERAND (op, 0);
      return (TREE_CODE (op0) == ADDR_EXPR
	      && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
		  || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
    }

  return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
}
示例#10
0
static tree 
chrec_convert_1 (tree type, tree chrec, tree at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = current_loops->parray[CHREC_VARIABLE (chrec)];
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    {
      TREE_CONSTANT_OVERFLOW (res) = 0;
      TREE_OVERFLOW (res) = 0;
    }

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}
示例#11
0
static bool
gimple_resimplify1 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = const_unop (*res_code, type, res_ops[0]);
      else
	{
	  tree decl = builtin_decl_implicit (*res_code);
	  if (decl)
	    {
	      tem = fold_builtin_n (UNKNOWN_LOCATION, decl, res_ops, 1, false);
	      if (tem)
		{
		  /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
		  STRIP_NOPS (tem);
		  tem = fold_convert (type, tem);
		}
	    }
	}
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type, res_ops[0]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return false;
}
示例#12
0
tree 
chrec_convert (tree type, 
	       tree chrec)
{
  tree ct;
  
  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (TYPE_PRECISION (ct) < TYPE_PRECISION (type))
    return count_ev_in_wider_type (type, chrec);

  switch (TREE_CODE (chrec))
    {
    case POLYNOMIAL_CHREC:
      return build_polynomial_chrec (CHREC_VARIABLE (chrec),
				     chrec_convert (type,
						    CHREC_LEFT (chrec)),
				     chrec_convert (type,
						    CHREC_RIGHT (chrec)));

    default:
      {
	tree res = fold_convert (type, chrec);

	/* Don't propagate overflows.  */
	TREE_OVERFLOW (res) = 0;
	if (CONSTANT_CLASS_P (res))
	  TREE_CONSTANT_OVERFLOW (res) = 0;

	/* But reject constants that don't fit in their type after conversion.
	   This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
	   natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
	   and can cause problems later when computing niters of loops.  Note
	   that we don't do the check before converting because we don't want
	   to reject conversions of negative chrecs to unsigned types.  */
	if (TREE_CODE (res) == INTEGER_CST
	    && TREE_CODE (type) == INTEGER_TYPE
	    && !int_fits_type_p (res, type))
	  res = chrec_dont_know;

	return res;
      }
    }
}
示例#13
0
static tree
fold_const_reduction (tree type, tree arg, tree_code code)
{
  unsigned HOST_WIDE_INT nelts;
  if (TREE_CODE (arg) != VECTOR_CST
      || !VECTOR_CST_NELTS (arg).is_constant (&nelts))
    return NULL_TREE;

  tree res = VECTOR_CST_ELT (arg, 0);
  for (unsigned HOST_WIDE_INT i = 1; i < nelts; i++)
    {
      res = const_binop (code, type, res, VECTOR_CST_ELT (arg, i));
      if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
	return NULL_TREE;
    }
  return res;
}
示例#14
0
static tree
fold_const_fold_left (tree type, tree arg0, tree arg1, tree_code code)
{
  if (TREE_CODE (arg1) != VECTOR_CST)
    return NULL_TREE;

  unsigned HOST_WIDE_INT nelts;
  if (!VECTOR_CST_NELTS (arg1).is_constant (&nelts))
    return NULL_TREE;

  for (unsigned HOST_WIDE_INT i = 0; i < nelts; i++)
    {
      arg0 = const_binop (code, type, arg0, VECTOR_CST_ELT (arg1, i));
      if (arg0 == NULL_TREE || !CONSTANT_CLASS_P (arg0))
	return NULL_TREE;
    }
  return arg0;
}
示例#15
0
static bool
constant_after_peeling (tree op, gimple *stmt, struct loop *loop)
{
  affine_iv iv;

  if (is_gimple_min_invariant (op))
    return true;

  /* We can still fold accesses to constant arrays when index is known.  */
  if (TREE_CODE (op) != SSA_NAME)
    {
      tree base = op;

      /* First make fast look if we see constant array inside.  */
      while (handled_component_p (base))
	base = TREE_OPERAND (base, 0);
      if ((DECL_P (base)
	   && ctor_for_folding (base) != error_mark_node)
	  || CONSTANT_CLASS_P (base))
	{
	  /* If so, see if we understand all the indices.  */
	  base = op;
	  while (handled_component_p (base))
	    {
	      if (TREE_CODE (base) == ARRAY_REF
		  && !constant_after_peeling (TREE_OPERAND (base, 1), stmt, loop))
		return false;
	      base = TREE_OPERAND (base, 0);
	    }
	  return true;
	}
      return false;
    }

  /* Induction variables are constants.  */
  if (!simple_iv (loop, loop_containing_stmt (stmt), op, &iv, false))
    return false;
  if (!is_gimple_min_invariant (iv.base))
    return false;
  if (!is_gimple_min_invariant (iv.step))
    return false;
  return true;
}
示例#16
0
tree
gimple_simplify (enum built_in_function fn, tree type,
		 tree arg0,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (arg0))
    {
      tree res = fold_const_call (as_combined_fn (fn), type, arg0);
      if (res && CONSTANT_CLASS_P (res))
	return res;
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			as_combined_fn (fn), type, arg0))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
示例#17
0
tree
gimple_simplify (enum tree_code code, tree type,
		 tree op0,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (op0))
    {
      tree res = const_unop (code, type, op0);
      if (res != NULL_TREE
	  && CONSTANT_CLASS_P (res))
	return res;
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			code, type, op0))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
示例#18
0
bool
gimple_resimplify1 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = const_unop (*res_code, type, res_ops[0]);
      else
	tem = fold_const_call (combined_fn (*res_code), type, res_ops[0]);
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  if (TREE_OVERFLOW_P (tem))
	    tem = drop_tree_overflow (tem);
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type, res_ops[0]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return false;
}
示例#19
0
bool
is_gimple_address (const_tree t)
{
  tree op;

  if (TREE_CODE (t) != ADDR_EXPR)
    return false;

  op = TREE_OPERAND (t, 0);
  while (handled_component_p (op))
    {
      if ((TREE_CODE (op) == ARRAY_REF
	   || TREE_CODE (op) == ARRAY_RANGE_REF)
	  && !is_gimple_val (TREE_OPERAND (op, 1)))
	    return false;

      op = TREE_OPERAND (op, 0);
    }

  if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
    return true;

  switch (TREE_CODE (op))
    {
    case PARM_DECL:
    case RESULT_DECL:
    case LABEL_DECL:
    case FUNCTION_DECL:
    case VAR_DECL:
    case CONST_DECL:
      return true;

    default:
      return false;
    }
}
示例#20
0
static bool
gimple_resimplify3 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
      && constant_for_folding (res_ops[2]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
					    res_ops[1], res_ops[2]);
      else
	{
	  tree decl = builtin_decl_implicit (*res_code);
	  if (decl)
	    {
	      tem = fold_builtin_n (UNKNOWN_LOCATION, decl, res_ops, 3, false);
	      if (tem)
		{
		  /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
		  STRIP_NOPS (tem);
		  tem = fold_convert (type, tem);
		}
	    }
	}
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  /* Canonicalize operand order.  */
  bool canonicalized = false;
  if (res_code->is_tree_code ()
      && commutative_ternary_tree_code (*res_code)
      && tree_swap_operands_p (res_ops[0], res_ops[1], false))
    {
      tree tem = res_ops[0];
      res_ops[0] = res_ops[1];
      res_ops[1] = tem;
      canonicalized = true;
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type,
		       res_ops[0], res_ops[1], res_ops[2]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return canonicalized;
}
示例#21
0
static tree
chrec_convert_1 (tree type, tree chrec, gimple *at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;

  ct = chrec_type (chrec);
  if (useless_type_conversion_p (type, ct))
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = get_chrec_loop (chrec);
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  /* Fold will not canonicalize (long)(i - 1) to (long)i - 1 because that
     may be more expensive.  We do want to perform this optimization here
     though for canonicalization reasons.  */
  if (use_overflow_semantics
      && (TREE_CODE (chrec) == PLUS_EXPR
	  || TREE_CODE (chrec) == MINUS_EXPR)
      && TREE_CODE (type) == INTEGER_TYPE
      && TREE_CODE (ct) == INTEGER_TYPE
      && TYPE_PRECISION (type) > TYPE_PRECISION (ct)
      && TYPE_OVERFLOW_UNDEFINED (ct))
    res = fold_build2 (TREE_CODE (chrec), type,
		       fold_convert (type, TREE_OPERAND (chrec, 0)),
		       fold_convert (type, TREE_OPERAND (chrec, 1)));
  /* Similar perform the trick that (signed char)((int)x + 2) can be
     narrowed to (signed char)((unsigned char)x + 2).  */
  else if (use_overflow_semantics
	   && TREE_CODE (chrec) == POLYNOMIAL_CHREC
	   && TREE_CODE (ct) == INTEGER_TYPE
	   && TREE_CODE (type) == INTEGER_TYPE
	   && TYPE_OVERFLOW_UNDEFINED (type)
	   && TYPE_PRECISION (type) < TYPE_PRECISION (ct))
    {
      tree utype = unsigned_type_for (type);
      res = build_polynomial_chrec (CHREC_VARIABLE (chrec),
				    fold_convert (utype,
						  CHREC_LEFT (chrec)),
				    fold_convert (utype,
						  CHREC_RIGHT (chrec)));
      res = chrec_convert_1 (type, res, at_stmt, use_overflow_semantics);
    }
  else
    res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    TREE_OVERFLOW (res) = 0;

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}
示例#22
0
static tree
forward_propagate_into_cond_1 (tree cond, tree *test_var_p)
{
  tree new_cond = NULL_TREE;
  enum tree_code cond_code = TREE_CODE (cond);
  tree test_var = NULL_TREE;
  tree def;
  tree def_rhs;

  /* If the condition is not a lone variable or an equality test of an
     SSA_NAME against an integral constant, then we do not have an
     optimizable case.

     Note these conditions also ensure the COND_EXPR has no
     virtual operands or other side effects.  */
  if (cond_code != SSA_NAME
      && !((cond_code == EQ_EXPR || cond_code == NE_EXPR)
	   && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
	   && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
	   && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
    return NULL_TREE;

  /* Extract the single variable used in the test into TEST_VAR.  */
  if (cond_code == SSA_NAME)
    test_var = cond;
  else
    test_var = TREE_OPERAND (cond, 0);

  /* Now get the defining statement for TEST_VAR.  Skip this case if
     it's not defined by some MODIFY_EXPR.  */
  def = SSA_NAME_DEF_STMT (test_var);
  if (TREE_CODE (def) != MODIFY_EXPR)
    return NULL_TREE;

  def_rhs = TREE_OPERAND (def, 1);

  /* If TEST_VAR is set by adding or subtracting a constant
     from an SSA_NAME, then it is interesting to us as we
     can adjust the constant in the conditional and thus
     eliminate the arithmetic operation.  */
  if (TREE_CODE (def_rhs) == PLUS_EXPR
      || TREE_CODE (def_rhs) == MINUS_EXPR)
    {
      tree op0 = TREE_OPERAND (def_rhs, 0);
      tree op1 = TREE_OPERAND (def_rhs, 1);

      /* The first operand must be an SSA_NAME and the second
	 operand must be a constant.  */
      if (TREE_CODE (op0) != SSA_NAME
	  || !CONSTANT_CLASS_P (op1)
	  || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
	return NULL_TREE;

      /* Don't propagate if the first operand occurs in
	 an abnormal PHI.  */
      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
	return NULL_TREE;

      if (has_single_use (test_var))
	{
	  enum tree_code new_code;
	  tree t;

	  /* If the variable was defined via X + C, then we must
	     subtract C from the constant in the conditional.
	     Otherwise we add C to the constant in the
	     conditional.  The result must fold into a valid
	     gimple operand to be optimizable.  */
	  new_code = (TREE_CODE (def_rhs) == PLUS_EXPR
		      ? MINUS_EXPR : PLUS_EXPR);
	  t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
	  if (!is_gimple_val (t))
	    return NULL_TREE;

	  new_cond = build (cond_code, boolean_type_node, op0, t);
	}
    }

  /* These cases require comparisons of a naked SSA_NAME or
     comparison of an SSA_NAME against zero or one.  */
  else if (TREE_CODE (cond) == SSA_NAME
	   || integer_zerop (TREE_OPERAND (cond, 1))
	   || integer_onep (TREE_OPERAND (cond, 1)))
    {
      /* If TEST_VAR is set from a relational operation
	 between two SSA_NAMEs or a combination of an SSA_NAME
	 and a constant, then it is interesting.  */
      if (COMPARISON_CLASS_P (def_rhs))
	{
	  tree op0 = TREE_OPERAND (def_rhs, 0);
	  tree op1 = TREE_OPERAND (def_rhs, 1);

	  /* Both operands of DEF_RHS must be SSA_NAMEs or
	     constants.  */
	  if ((TREE_CODE (op0) != SSA_NAME
	       && !is_gimple_min_invariant (op0))
	      || (TREE_CODE (op1) != SSA_NAME
		  && !is_gimple_min_invariant (op1)))
	    return NULL_TREE;

	  /* Don't propagate if the first operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (op0) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
	    return NULL_TREE;

	  /* Don't propagate if the second operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (op1) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
	    return NULL_TREE;

	  if (has_single_use (test_var))
	    {
	      /* TEST_VAR was set from a relational operator.  */
	      new_cond = build (TREE_CODE (def_rhs),
				boolean_type_node, op0, op1);

	      /* Invert the conditional if necessary.  */
	      if ((cond_code == EQ_EXPR
		   && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == NE_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		{
		  new_cond = invert_truthvalue (new_cond);

		  /* If we did not get a simple relational
		     expression or bare SSA_NAME, then we can
		     not optimize this case.  */
		  if (!COMPARISON_CLASS_P (new_cond)
		      && TREE_CODE (new_cond) != SSA_NAME)
		    new_cond = NULL_TREE;
		}
	    }
	}

      /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
	 is interesting.  */
      else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
	{
	  enum tree_code new_code;

	  def_rhs = TREE_OPERAND (def_rhs, 0);

	  /* DEF_RHS must be an SSA_NAME or constant.  */
	  if (TREE_CODE (def_rhs) != SSA_NAME
	      && !is_gimple_min_invariant (def_rhs))
	    return NULL_TREE;

	  /* Don't propagate if the operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (def_rhs) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
	    return NULL_TREE;

	  if (cond_code == SSA_NAME
	      || (cond_code == NE_EXPR
		  && integer_zerop (TREE_OPERAND (cond, 1)))
	      || (cond_code == EQ_EXPR
		  && integer_onep (TREE_OPERAND (cond, 1))))
	    new_code = EQ_EXPR;
	  else
	    new_code = NE_EXPR;

	  new_cond = build2 (new_code, boolean_type_node, def_rhs,
			     fold_convert (TREE_TYPE (def_rhs),
					   integer_zero_node));
	}

      /* If TEST_VAR was set from a cast of an integer type
	 to a boolean type or a cast of a boolean to an
	 integral, then it is interesting.  */
      else if (TREE_CODE (def_rhs) == NOP_EXPR
	       || TREE_CODE (def_rhs) == CONVERT_EXPR)
	{
	  tree outer_type;
	  tree inner_type;

	  outer_type = TREE_TYPE (def_rhs);
	  inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));

	  if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
	       && INTEGRAL_TYPE_P (inner_type))
	      || (TREE_CODE (inner_type) == BOOLEAN_TYPE
		  && INTEGRAL_TYPE_P (outer_type)))
	    ;
	  else if (INTEGRAL_TYPE_P (outer_type)
		   && INTEGRAL_TYPE_P (inner_type)
		   && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
		   && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs,
								      0)))
	    ;
	  else
	    return NULL_TREE;

	  /* Don't propagate if the operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
						  (def_rhs, 0)))
	    return NULL_TREE;

	  if (has_single_use (test_var))
	    {
	      enum tree_code new_code;
	      tree new_arg;

	      if (cond_code == SSA_NAME
		  || (cond_code == NE_EXPR
		      && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == EQ_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		new_code = NE_EXPR;
	      else
		new_code = EQ_EXPR;

	      new_arg = TREE_OPERAND (def_rhs, 0);
	      new_cond = build2 (new_code, boolean_type_node, new_arg,
				 fold_convert (TREE_TYPE (new_arg),
					       integer_zero_node));
	    }
	}
    }

  *test_var_p = test_var;
  return new_cond;
}
示例#23
0
/* The method walks the node hierarchy to the topmost node. This is
   exactly how its done in mudflap and has been borrowed.
*/
static tree
mf_walk_comp_ref(tree *tp, tree type, location_t location, \
        tree *addr_store, tree *base_store)
{
    tree var, t, addr, base, size;

    t = *tp;

    int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
    /* If we have a bitfield component reference, we must note the
       innermost addressable object in ELT, from which we will
       construct the byte-addressable bounds of the bitfield.  */
    tree elt = NULL_TREE;
    int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
            && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));

    /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
       containment hierarchy to find the outermost VAR_DECL.  */
    var = TREE_OPERAND (t, 0);
    while (1)
    {
        if (bitfield_ref_p && elt == NULL_TREE
                && (TREE_CODE (var) == ARRAY_REF
                    || TREE_CODE (var) == COMPONENT_REF))
            elt = var;

        if (TREE_CODE (var) == ARRAY_REF)
        {
            component_ref_only = 0;
            var = TREE_OPERAND (var, 0);
        }
        else if (TREE_CODE (var) == COMPONENT_REF)
            var = TREE_OPERAND (var, 0);
        else if (INDIRECT_REF_P (var)
                || TREE_CODE (var) == MEM_REF)
        {
            base = TREE_OPERAND (var, 0);
            break;
        }
        else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
        {
            var = TREE_OPERAND (var, 0);
            if (CONSTANT_CLASS_P (var)
                    && TREE_CODE (var) != STRING_CST)
                return NULL_TREE;
        }
        else
        {
            DEBUGLOG("TREE_CODE(temp) : %s comp_ref_only = %d eligigle = %d\n", \
                    tree_code_name[(int)TREE_CODE(var)], component_ref_only, \
                    mf_decl_eligible_p(var));
            gcc_assert (TREE_CODE (var) == VAR_DECL
                    || TREE_CODE (var) == SSA_NAME /* TODO: Check this */
                    || TREE_CODE (var) == PARM_DECL
                    || TREE_CODE (var) == RESULT_DECL
                    || TREE_CODE (var) == STRING_CST);
            /* Don't instrument this access if the underlying
               variable is not "eligible".  This test matches
               those arrays that have only known-valid indexes,
               and thus are not labeled TREE_ADDRESSABLE.  */
            if (! mf_decl_eligible_p (var)) //TODO is this needed? || component_ref_only)
                return NULL_TREE;
            else
            {
                base = build1 (ADDR_EXPR,
                        build_pointer_type (TREE_TYPE (var)), var);
                break;
            }
        }
    }

    /* Handle the case of ordinary non-indirection structure
       accesses.  These have only nested COMPONENT_REF nodes (no
       INDIRECT_REF), but pass through the above filter loop.
       Note that it's possible for such a struct variable to match
       the eligible_p test because someone else might take its
       address sometime.  */

    /* We need special processing for bitfield components, because
       their addresses cannot be taken.  */
    if (bitfield_ref_p)
    {
        tree field = TREE_OPERAND (t, 1);

        if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
            size = DECL_SIZE_UNIT (field);

        if (elt)
            elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
                    elt);
        addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
        addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
                addr, fold_convert_loc (location, sizetype,
                    byte_position (field)));
    }
    else
        addr = build1 (ADDR_EXPR, build_pointer_type (type), t);

    if (addr_store)
        *addr_store = addr;

    if (base_store)
        *base_store = addr;

    return var;
}
示例#24
0
static void
mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
                   location_t location, tree dirflag)
{
  tree type, base, limit, addr, size, t;

  /* Don't instrument read operations.  */
  if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
    return;

  /* Don't instrument marked nodes.  */
  if (mf_marked_p (*tp))
    return;

  t = *tp;
  type = TREE_TYPE (t);

  if (type == error_mark_node)
    return;

  size = TYPE_SIZE_UNIT (type);

  switch (TREE_CODE (t))
    {
    case ARRAY_REF:
    case COMPONENT_REF:
      {
        /* This is trickier than it may first appear.  The reason is
           that we are looking at expressions from the "inside out" at
           this point.  We may have a complex nested aggregate/array
           expression (e.g. "a.b[i].c"), maybe with an indirection as
           the leftmost operator ("p->a.b.d"), where instrumentation
           is necessary.  Or we may have an innocent "a.b.c"
           expression that must not be instrumented.  We need to
           recurse all the way down the nesting structure to figure it
           out: looking just at the outer node is not enough.  */
        tree var;
        int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
	/* If we have a bitfield component reference, we must note the
	   innermost addressable object in ELT, from which we will
	   construct the byte-addressable bounds of the bitfield.  */
	tree elt = NULL_TREE;
	int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
			      && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));

        /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
           containment hierarchy to find the outermost VAR_DECL.  */
        var = TREE_OPERAND (t, 0);
        while (1)
          {
	    if (bitfield_ref_p && elt == NULL_TREE
		&& (TREE_CODE (var) == ARRAY_REF
		    || TREE_CODE (var) == COMPONENT_REF))
	      elt = var;

            if (TREE_CODE (var) == ARRAY_REF)
              {
                component_ref_only = 0;
                var = TREE_OPERAND (var, 0);
              }
            else if (TREE_CODE (var) == COMPONENT_REF)
              var = TREE_OPERAND (var, 0);
            else if (INDIRECT_REF_P (var)
		     || TREE_CODE (var) == MEM_REF)
              {
		base = TREE_OPERAND (var, 0);
                break;
              }
            else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
	      {
		var = TREE_OPERAND (var, 0);
		if (CONSTANT_CLASS_P (var)
		    && TREE_CODE (var) != STRING_CST)
		  return;
	      }
            else
              {
                gcc_assert (TREE_CODE (var) == VAR_DECL
                            || TREE_CODE (var) == PARM_DECL
                            || TREE_CODE (var) == RESULT_DECL
                            || TREE_CODE (var) == STRING_CST);
                /* Don't instrument this access if the underlying
                   variable is not "eligible".  This test matches
                   those arrays that have only known-valid indexes,
                   and thus are not labeled TREE_ADDRESSABLE.  */
                if (! mf_decl_eligible_p (var) || component_ref_only)
                  return;
                else
		  {
		    base = build1 (ADDR_EXPR,
				   build_pointer_type (TREE_TYPE (var)), var);
		    break;
		  }
              }
          }

        /* Handle the case of ordinary non-indirection structure
           accesses.  These have only nested COMPONENT_REF nodes (no
           INDIRECT_REF), but pass through the above filter loop.
           Note that it's possible for such a struct variable to match
           the eligible_p test because someone else might take its
           address sometime.  */

        /* We need special processing for bitfield components, because
           their addresses cannot be taken.  */
        if (bitfield_ref_p)
          {
            tree field = TREE_OPERAND (t, 1);

            if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
              size = DECL_SIZE_UNIT (field);

	    if (elt)
	      elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
			    elt);
            addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
            addr = fold_build_pointer_plus_loc (location,
						addr, byte_position (field));
          }
        else
          addr = build1 (ADDR_EXPR, build_pointer_type (type), t);

        limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
                             fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
					  fold_convert (mf_uintptr_type, addr),
					  size),
                             integer_one_node);
      }
      break;

    case INDIRECT_REF:
      addr = TREE_OPERAND (t, 0);
      base = addr;
      limit = fold_build_pointer_plus_hwi_loc
	(location, fold_build_pointer_plus_loc (location, base, size), -1);
      break;

    case MEM_REF:
      if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0)))
	return;

      addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
					  TREE_OPERAND (t, 1));
      base = addr;
      limit = fold_build_pointer_plus_hwi_loc (location,
			   fold_build_pointer_plus_loc (location,
							base, size), -1);
      break;

    case TARGET_MEM_REF:
      if (addr_expr_of_non_mem_decl_p (TMR_BASE (t)))
	return;

      addr = tree_mem_ref_addr (ptr_type_node, t);
      base = addr;
      limit = fold_build_pointer_plus_hwi_loc (location,
			   fold_build_pointer_plus_loc (location,
							base, size), -1);
      break;

    case ARRAY_RANGE_REF:
      warning (OPT_Wmudflap,
	       "mudflap checking not yet implemented for ARRAY_RANGE_REF");
      return;

    case BIT_FIELD_REF:
      /* ??? merge with COMPONENT_REF code above? */
      {
        tree ofs, rem, bpu;

        /* If we're not dereferencing something, then the access
           must be ok.  */
        if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
          return;

        bpu = bitsize_int (BITS_PER_UNIT);
        ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
        rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
        ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);

        size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
        size = size_binop_loc (location, PLUS_EXPR, size, rem);
        size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
        size = fold_convert (sizetype, size);

        addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
        addr = fold_convert (ptr_type_node, addr);
        addr = fold_build_pointer_plus_loc (location, addr, ofs);

        base = addr;
        limit = fold_build_pointer_plus_hwi_loc (location,
                             fold_build_pointer_plus_loc (location,
							  base, size), -1);
      }
      break;

    default:
      return;
    }

  mf_build_check_statement_for (base, limit, iter, location, dirflag);
}
示例#25
0
static void
record_single_argument_cond_exprs (varray_type cond_worklist,
				   varray_type *vars_worklist,
				   bitmap vars)

{
  /* The first pass over the blocks gathers the set of variables we need
     immediate uses for as well as the set of interesting COND_EXPRs.

     A simpler implementation may be appropriate if/when we have a lower
     overhead means of getting immediate use information.  */
  while (VARRAY_ACTIVE_SIZE (cond_worklist) > 0)
    {
      tree last = VARRAY_TOP_TREE (cond_worklist);

      VARRAY_POP (cond_worklist);

      /* See if this block ends in a COND_EXPR.  */
      if (last && TREE_CODE (last) == COND_EXPR)
	{
	  tree cond = COND_EXPR_COND (last);
	  enum tree_code cond_code = TREE_CODE (cond);

	  /* If the condition is a lone variable or an equality test of
	     an SSA_NAME against an integral constant, then we may have an 
	     optimizable case.

	     Note these conditions also ensure the COND_EXPR has no
	     virtual operands or other side effects.  */
	  if (cond_code == SSA_NAME
	      || ((cond_code == EQ_EXPR || cond_code == NE_EXPR)
		  && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
		  && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
		  && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
	    {
	      tree def;
	      tree test_var;

	      /* Extract the single variable used in the test into TEST_VAR.  */
	      if (cond_code == SSA_NAME)
		test_var = cond;
	      else
		test_var = TREE_OPERAND (cond, 0);

	      /* If we have already recorded this SSA_NAME as interesting,
		 do not do so again.  */
	      if (bitmap_bit_p (vars, SSA_NAME_VERSION (test_var)))
		continue;

	      /* Now get the defining statement for TEST_VAR and see if it
		 something we are interested in.  */
	      def = SSA_NAME_DEF_STMT (test_var);
	      if (TREE_CODE (def) == MODIFY_EXPR)
		{
		  tree def_rhs = TREE_OPERAND (def, 1);

		  /* If TEST_VAR is set by adding or subtracting a constant
		     from an SSA_NAME, then it is interesting to us as we
		     can adjust the constant in the conditional and thus
		     eliminate the arithmetic operation.  */
		  if (TREE_CODE (def_rhs) == PLUS_EXPR
			 || TREE_CODE (def_rhs) == MINUS_EXPR)
		    {
		      tree op0 = TREE_OPERAND (def_rhs, 0);
		      tree op1 = TREE_OPERAND (def_rhs, 1);

		      /* The first operand must be an SSA_NAME and the second
			 operand must be a constant.  */
		      if (TREE_CODE (op0) != SSA_NAME
			  || !CONSTANT_CLASS_P (op1)
			  || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
			continue;
		      
		      /* Don't propagate if the first operand occurs in
		         an abnormal PHI.  */
		      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
		        continue;
		    }

		  /* These cases require comparisons of a naked SSA_NAME or
		     comparison of an SSA_NAME against zero or one.  */
		  else if (TREE_CODE (cond) == SSA_NAME
			   || integer_zerop (TREE_OPERAND (cond, 1))
			   || integer_onep (TREE_OPERAND (cond, 1)))
		    {
		      /* If TEST_VAR is set from a relational operation
			 between two SSA_NAMEs or a combination of an SSA_NAME
			 and a constant, then it is interesting.  */
		      if (COMPARISON_CLASS_P (def_rhs))
			{
			  tree op0 = TREE_OPERAND (def_rhs, 0);
			  tree op1 = TREE_OPERAND (def_rhs, 1);

			  /* Both operands of DEF_RHS must be SSA_NAMEs or
			     constants.  */
			  if ((TREE_CODE (op0) != SSA_NAME
			       && !is_gimple_min_invariant (op0))
			      || (TREE_CODE (op1) != SSA_NAME
				  && !is_gimple_min_invariant (op1)))
			    continue;
		      
			  /* Don't propagate if the first operand occurs in
			     an abnormal PHI.  */
			  if (TREE_CODE (op0) == SSA_NAME
			      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
			    continue;
		      
			  /* Don't propagate if the second operand occurs in
			     an abnormal PHI.  */
			  if (TREE_CODE (op1) == SSA_NAME
			      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
			    continue;
		        }

		      /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
			 is interesting.  */
		      else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
			{
			  def_rhs = TREE_OPERAND (def_rhs, 0);

			  /* DEF_RHS must be an SSA_NAME or constant.  */
			  if (TREE_CODE (def_rhs) != SSA_NAME
			      && !is_gimple_min_invariant (def_rhs))
			    continue;
		      
			  /* Don't propagate if the operand occurs in
			     an abnormal PHI.  */
			  if (TREE_CODE (def_rhs) == SSA_NAME
			      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
			    continue;
			}

		      /* If TEST_VAR was set from a cast of an integer type
			 to a boolean type or a cast of a boolean to an
			 integral, then it is interesting.  */
		      else if (TREE_CODE (def_rhs) == NOP_EXPR
			       || TREE_CODE (def_rhs) == CONVERT_EXPR)
			{
			  tree outer_type;
			  tree inner_type;

			  outer_type = TREE_TYPE (def_rhs);
			  inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));

			  if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
			       && INTEGRAL_TYPE_P (inner_type))
			      || (TREE_CODE (inner_type) == BOOLEAN_TYPE
				  && INTEGRAL_TYPE_P (outer_type)))
			    ;
			  else
			    continue;
		      
			  /* Don't propagate if the operand occurs in
			     an abnormal PHI.  */
			  if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
			      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
					                          (def_rhs, 0)))
			    continue;
			}
		      else
			continue;
		    }
		  else
		    continue;

		  /* All the tests passed, record TEST_VAR as interesting.  */
		  VARRAY_PUSH_TREE (*vars_worklist, test_var);
		  bitmap_set_bit (vars, SSA_NAME_VERSION (test_var));
		}
	    }
	}
    }
}