Example #1
0
static void
tree_if_convert_cond_expr (struct loop *loop, tree stmt, tree cond,
			   block_stmt_iterator *bsi)
{
  tree c, c2;
  edge true_edge, false_edge;

  gcc_assert (TREE_CODE (stmt) == COND_EXPR);

  c = COND_EXPR_COND (stmt);

  extract_true_false_edges_from_block (bb_for_stmt (stmt),
 				       &true_edge, &false_edge);

  /* Add new condition into destination's predicate list.  */

  /* If 'c' is true then TRUE_EDGE is taken.  */
  add_to_dst_predicate_list (loop, true_edge, cond,
			     unshare_expr (c), bsi);

  /* If 'c' is false then FALSE_EDGE is taken.  */
  c2 = invert_truthvalue (unshare_expr (c));
  add_to_dst_predicate_list (loop, false_edge, cond, c2, bsi);

  /* Now this conditional statement is redundant. Remove it.
     But, do not remove exit condition! Update exit condition
     using new condition.  */
  if (!bb_with_exit_edge_p (loop, bb_for_stmt (stmt)))
    {
      bsi_remove (bsi, true);
      cond = NULL_TREE;
    }
  return;
}
void
unshare_aff_combination (aff_tree *comb)
{
  unsigned i;

  for (i = 0; i < comb->n; i++)
    comb->elts[i].val = unshare_expr (comb->elts[i].val);
  if (comb->rest)
    comb->rest = unshare_expr (comb->rest);
}
Example #3
0
void
insert_debug_temp_for_var_def (gimple stmt)
{
  gimple def_stmt = ((void *) 0);
  int usecount = 0;
  tree value = ((void *) 0);
  for (; arf ();)
    {
      if (!gimple_debug_bind_p (stmt))
        continue;
      if (usecount++)
        break;
      unsigned char no_value = 0;
      if (!gimple_bb (def_stmt))
        no_value = 1;
      if (!no_value)
        value = gimple_assign_rhs_to_tree ();
    }
  if (value)
    {
      if ((tree_code_type[(int) (((value)->code))] == 42)
          || (usecount == 1 && (is_gimple_min_invariant (value))))
        value = unshare_expr (value);
    }
}
Example #4
0
void
gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
{
  tree tmp1;
  gassign *stmt1, *stmt2, *stmt3;
  gimple stmt = value->hvalue.stmt;
  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
  tree ref_ptr = tree_coverage_counter_addr (tag, base);

  if ( (PARAM_VALUE (PARAM_INDIR_CALL_TOPN_PROFILE) &&
        tag == GCOV_COUNTER_V_INDIR) ||
       (!PARAM_VALUE (PARAM_INDIR_CALL_TOPN_PROFILE) &&
        tag == GCOV_COUNTER_ICALL_TOPNV))
    return;

  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
				      true, NULL_TREE, true, GSI_SAME_STMT);

  /* Insert code:

    stmt1: __gcov_indirect_call_counters = get_relevant_counter_ptr ();
    stmt2: tmp1 = (void *) (indirect call argument value)
    stmt3: __gcov_indirect_call_callee = tmp1;
   */

  stmt1 = gimple_build_assign (ic_gcov_type_ptr_var, ref_ptr);
  tmp1 = make_temp_ssa_name (ptr_void, NULL, "PROF");
  stmt2 = gimple_build_assign (tmp1, unshare_expr (value->hvalue.value));
  stmt3 = gimple_build_assign (ic_void_ptr_var, gimple_assign_lhs (stmt2));

  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
}
void
gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
{
  tree tmp1;
  gimple stmt1, stmt2, stmt3;
  gimple stmt = value->hvalue.stmt;
  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
  tree ref_ptr = tree_coverage_counter_addr (tag, base);

  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
				      true, NULL_TREE, true, GSI_SAME_STMT);

  /* Insert code:

    __gcov_indirect_call_counters = get_relevant_counter_ptr ();
    __gcov_indirect_call_callee = (void *) indirect call argument;
   */

  tmp1 = create_tmp_reg (ptr_void, "PROF");
  stmt1 = gimple_build_assign (ic_gcov_type_ptr_var, ref_ptr);
  stmt2 = gimple_build_assign (tmp1, unshare_expr (value->hvalue.value));
  gimple_assign_set_lhs (stmt2, make_ssa_name (tmp1, stmt2));
  stmt3 = gimple_build_assign (ic_void_ptr_var, gimple_assign_lhs (stmt2));

  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
}
Example #6
0
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements. 
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     offset = gimple_assign_rhs1 (offset_def);
     if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
	 || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
	 || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
			       TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
	return false;

      /* The first operand to the MULT_EXPR is the desired index.  */
      index = offset;
    }

  /* Replace the pointer addition with array indexing.  */
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
static bool
forward_propagate_addr_into_variable_array_index (tree offset, tree lhs,
						  tree stmt, tree use_stmt)
{
  tree index;

  /* The offset must be defined by a simple MODIFY_EXPR statement.  */
  if (TREE_CODE (offset) != MODIFY_EXPR)
    return false;

  /* The RHS of the statement which defines OFFSET must be a gimple
     cast of another SSA_NAME.  */
  offset = TREE_OPERAND (offset, 1);
  if (!is_gimple_cast (offset))
    return false;

  offset = TREE_OPERAND (offset, 0);
  if (TREE_CODE (offset) != SSA_NAME)
    return false;

  /* Get the defining statement of the offset before type
     conversion.  */
  offset = SSA_NAME_DEF_STMT (offset);

  /* The statement which defines OFFSET before type conversion
     must be a simple MODIFY_EXPR.  */
  if (TREE_CODE (offset) != MODIFY_EXPR)
    return false;

  /* The RHS of the statement which defines OFFSET must be a
     multiplication of an object by the size of the array elements. 
     This implicitly verifies that the size of the array elements
     is constant.  */
  offset = TREE_OPERAND (offset, 1);
  if (TREE_CODE (offset) != MULT_EXPR
      || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
      || !simple_cst_equal (TREE_OPERAND (offset, 1),
			    TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (lhs)))))
    return false;

  /* The first operand to the MULT_EXPR is the desired index.  */
  index = TREE_OPERAND (offset, 0);

  /* Replace the pointer addition with array indexing.  */
  TREE_OPERAND (use_stmt, 1) = unshare_expr (TREE_OPERAND (stmt, 1));
  TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (use_stmt, 1), 0), 1) = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
Example #8
0
static void
issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
{
  HOST_WIDE_INT delta;
  tree addr, addr_base, write_p, local;
  gimple prefetch;
  gimple_stmt_iterator bsi;
  unsigned n_prefetches, ap;
  bool nontemporal = ref->reuse_distance >= L2_CACHE_SIZE_BYTES;

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "Issued%s prefetch for %p.\n",
	     nontemporal ? " nontemporal" : "",
	     (void *) ref);

  bsi = gsi_for_stmt (ref->stmt);

  n_prefetches = ((unroll_factor + ref->prefetch_mod - 1)
		  / ref->prefetch_mod);
  addr_base = build_fold_addr_expr_with_type (ref->mem, ptr_type_node);
  addr_base = force_gimple_operand_gsi (&bsi, unshare_expr (addr_base),
					true, NULL, true, GSI_SAME_STMT);
  write_p = ref->write_p ? integer_one_node : integer_zero_node;
  local = build_int_cst (integer_type_node, nontemporal ? 0 : 3);

  for (ap = 0; ap < n_prefetches; ap++)
    {
      /* Determine the address to prefetch.  */
      delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
      addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
			  addr_base, size_int (delta));
      addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
				       true, GSI_SAME_STMT);

      /* Create the prefetch instruction.  */
      prefetch = gimple_build_call (built_in_decls[BUILT_IN_PREFETCH],
				    3, addr, write_p, local);
      gsi_insert_before (&bsi, prefetch, GSI_SAME_STMT);
    }
}
Example #9
0
void
propagate_tree_value (tree *op_p, tree val)
{
  gcc_checking_assert (!(TREE_CODE (val) == SSA_NAME
			 && *op_p
			 && TREE_CODE (*op_p) == SSA_NAME
			 && !may_propagate_copy (*op_p, val)));

  if (TREE_CODE (val) == SSA_NAME)
    *op_p = val;
  else
    *op_p = unshare_expr (val);
}
Example #10
0
static void
issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
{
    HOST_WIDE_INT delta;
    tree addr, addr_base, prefetch, params, write_p;
    block_stmt_iterator bsi;
    unsigned n_prefetches, ap;

    if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file, "Issued prefetch for %p.\n", (void *) ref);

    bsi = bsi_for_stmt (ref->stmt);

    n_prefetches = ((unroll_factor + ref->prefetch_mod - 1)
                    / ref->prefetch_mod);
    addr_base = build_fold_addr_expr_with_type (ref->mem, ptr_type_node);
    addr_base = force_gimple_operand_bsi (&bsi, unshare_expr (addr_base), true, NULL);

    for (ap = 0; ap < n_prefetches; ap++)
    {
        /* Determine the address to prefetch.  */
        delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
        addr = fold_build2 (PLUS_EXPR, ptr_type_node,
                            addr_base, build_int_cst (ptr_type_node, delta));
        addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL);

        /* Create the prefetch instruction.  */
        write_p = ref->write_p ? integer_one_node : integer_zero_node;
        params = tree_cons (NULL_TREE, addr,
                            tree_cons (NULL_TREE, write_p, NULL_TREE));

        prefetch = build_function_call_expr (built_in_decls[BUILT_IN_PREFETCH],
                                             params);
        bsi_insert_before (&bsi, prefetch, BSI_SAME_STMT);
    }
}
Example #11
0
static void
replace_exp_1 (use_operand_p op_p, tree val,
    	       bool for_propagation ATTRIBUTE_UNUSED)
{
#if defined ENABLE_CHECKING
  tree op = USE_FROM_PTR (op_p);

  gcc_assert (!(for_propagation
		&& TREE_CODE (op) == SSA_NAME
		&& TREE_CODE (val) == SSA_NAME
		&& !may_propagate_copy (op, val)));
#endif

  if (TREE_CODE (val) == SSA_NAME)
    SET_USE (op_p, val);
  else
    SET_USE (op_p, unshare_expr (val));
}
Example #12
0
File: c-fold.c Project: 0day-ci/gcc
tree
decl_constant_value_for_optimization (tree exp)
{
  tree ret;

  if (!optimize
      || !VAR_P (exp)
      || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
      || DECL_MODE (exp) == BLKmode)
    return exp;

  ret = decl_constant_value (exp);
  /* Avoid unwanted tree sharing between the initializer and current
     function's body where the tree can be modified e.g. by the
     gimplifier.  */
  if (ret != exp && TREE_STATIC (exp))
    ret = unshare_expr (ret);
  return ret;
}
Example #13
0
void
gimple_gen_edge_profiler (int edgeno, edge e)
{
  tree ref, one;
  gimple stmt1, stmt2, stmt3;

  /* We share one temporary variable declaration per function.  This
     gets re-set in tree_profiling.  */
  if (gcov_type_tmp_var == NULL_TREE)
    gcov_type_tmp_var = create_tmp_reg (gcov_type_node, "PROF_edge_counter");

  if (PROFILE_GEN_EDGE_ATOMIC)
    ref = tree_coverage_counter_addr (GCOV_COUNTER_ARCS, edgeno);
  else
    ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);

  one = build_int_cst (gcov_type_node, 1);
  if (PROFILE_GEN_EDGE_ATOMIC)
    {
      /* __atomic_fetch_add (&counter, 1, MEMMODEL_RELAXED); */
      stmt3 = gimple_build_call (builtin_decl_explicit (
                                   GCOV_TYPE_ATOMIC_FETCH_ADD),
                                 3, ref, one,
                                 build_int_cst (integer_type_node,
                                   MEMMODEL_RELAXED));
      find_referenced_vars_in (stmt3);
    }
  else
    {
      stmt1 = gimple_build_assign (gcov_type_tmp_var, ref);
      gimple_assign_set_lhs (stmt1, make_ssa_name (gcov_type_tmp_var, stmt1));
      find_referenced_vars_in (stmt1);
      stmt2 = gimple_build_assign_with_ops (PLUS_EXPR, gcov_type_tmp_var,
            				gimple_assign_lhs (stmt1), one);
      gimple_assign_set_lhs (stmt2, make_ssa_name (gcov_type_tmp_var, stmt2));
      stmt3 = gimple_build_assign (unshare_expr (ref), gimple_assign_lhs (stmt2));

      gsi_insert_on_edge (e, stmt1);
      gsi_insert_on_edge (e, stmt2);
    }
   gsi_insert_on_edge (e, stmt3);
} 
Example #14
0
void
gimple_gen_edge_profiler (int edgeno, edge e)
{
  tree ref, one, gcov_type_tmp_var;
  gassign *stmt1, *stmt2, *stmt3;

  ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
  one = build_int_cst (gcov_type_node, 1);
  gcov_type_tmp_var = make_temp_ssa_name (gcov_type_node,
					  NULL, "PROF_edge_counter");
  stmt1 = gimple_build_assign (gcov_type_tmp_var, ref);
  gcov_type_tmp_var = make_temp_ssa_name (gcov_type_node,
					  NULL, "PROF_edge_counter");
  stmt2 = gimple_build_assign (gcov_type_tmp_var, PLUS_EXPR,
			       gimple_assign_lhs (stmt1), one);
  stmt3 = gimple_build_assign (unshare_expr (ref), gimple_assign_lhs (stmt2));
  gsi_insert_on_edge (e, stmt1);
  gsi_insert_on_edge (e, stmt2);
  gsi_insert_on_edge (e, stmt3);
}
void
gimple_gen_edge_profiler (int edgeno, edge e)
{
  tree ref, one;
  gimple stmt1, stmt2, stmt3;

  /* We share one temporary variable declaration per function.  This
     gets re-set in tree_profiling.  */
  if (gcov_type_tmp_var == NULL_TREE)
    gcov_type_tmp_var = create_tmp_reg (gcov_type_node, "PROF_edge_counter");
  ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
  one = build_int_cst (gcov_type_node, 1);
  stmt1 = gimple_build_assign (gcov_type_tmp_var, ref);
  gimple_assign_set_lhs (stmt1, make_ssa_name (gcov_type_tmp_var, stmt1));
  stmt2 = gimple_build_assign_with_ops (PLUS_EXPR, gcov_type_tmp_var,
					gimple_assign_lhs (stmt1), one);
  gimple_assign_set_lhs (stmt2, make_ssa_name (gcov_type_tmp_var, stmt2));
  stmt3 = gimple_build_assign (unshare_expr (ref), gimple_assign_lhs (stmt2));
  gsi_insert_on_edge (e, stmt1);
  gsi_insert_on_edge (e, stmt2);
  gsi_insert_on_edge (e, stmt3);
}
Example #16
0
static bool
analyze_ref (struct loop *loop, tree *ref_p, tree *base,
	     HOST_WIDE_INT *step, HOST_WIDE_INT *delta,
	     gimple stmt)
{
  struct ar_data ar_data;
  tree off;
  HOST_WIDE_INT bit_offset;
  tree ref = *ref_p;

  *step = 0;
  *delta = 0;

  /* First strip off the component references.  Ignore bitfields.  */
  if (TREE_CODE (ref) == COMPONENT_REF
      && DECL_NONADDRESSABLE_P (TREE_OPERAND (ref, 1)))
    ref = TREE_OPERAND (ref, 0);

  *ref_p = ref;

  for (; TREE_CODE (ref) == COMPONENT_REF; ref = TREE_OPERAND (ref, 0))
    {
      off = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
      bit_offset = TREE_INT_CST_LOW (off);
      gcc_assert (bit_offset % BITS_PER_UNIT == 0);
      
      *delta += bit_offset / BITS_PER_UNIT;
    }

  *base = unshare_expr (ref);
  ar_data.loop = loop;
  ar_data.stmt = stmt;
  ar_data.step = step;
  ar_data.delta = delta;
  return for_each_index (base, idx_analyze_ref, &ar_data);
}
Example #17
0
tree
ubsan_instrument_division (location_t loc, tree op0, tree op1)
{
  tree t, tt;
  tree type = TREE_TYPE (op0);

  /* At this point both operands should have the same type,
     because they are already converted to RESULT_TYPE.
     Use TYPE_MAIN_VARIANT since typedefs can confuse us.  */
  gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (op0))
	      == TYPE_MAIN_VARIANT (TREE_TYPE (op1)));

  op0 = unshare_expr (op0);
  op1 = unshare_expr (op1);

  if (TREE_CODE (type) == INTEGER_TYPE
      && (flag_sanitize & SANITIZE_DIVIDE))
    t = fold_build2 (EQ_EXPR, boolean_type_node,
		     op1, build_int_cst (type, 0));
  else if (TREE_CODE (type) == REAL_TYPE
	   && (flag_sanitize & SANITIZE_FLOAT_DIVIDE))
    t = fold_build2 (EQ_EXPR, boolean_type_node,
		     op1, build_real (type, dconst0));
  else
    return NULL_TREE;

  /* We check INT_MIN / -1 only for signed types.  */
  if (TREE_CODE (type) == INTEGER_TYPE
      && (flag_sanitize & SANITIZE_DIVIDE)
      && !TYPE_UNSIGNED (type))
    {
      tree x;
      tt = fold_build2 (EQ_EXPR, boolean_type_node, unshare_expr (op1),
			build_int_cst (type, -1));
      x = fold_build2 (EQ_EXPR, boolean_type_node, op0,
		       TYPE_MIN_VALUE (type));
      x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt);
      t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x);
    }

  /* If the condition was folded to 0, no need to instrument
     this expression.  */
  if (integer_zerop (t))
    return NULL_TREE;

  /* In case we have a SAVE_EXPR in a conditional context, we need to
     make sure it gets evaluated before the condition.  */
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t);
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op1), t);
  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
				     ubsan_type_descriptor (type), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      enum built_in_function bcode
	= (flag_sanitize_recover & SANITIZE_DIVIDE)
	  ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW
	  : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT;
      tt = builtin_decl_explicit (bcode);
      op0 = unshare_expr (op0);
      op1 = unshare_expr (op1);
      tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
				ubsan_encode_value (op1));
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node);

  return t;
}
Example #18
0
tree
ubsan_instrument_shift (location_t loc, enum tree_code code,
			tree op0, tree op1)
{
  tree t, tt = NULL_TREE;
  tree type0 = TREE_TYPE (op0);
  tree type1 = TREE_TYPE (op1);
  if (!INTEGRAL_TYPE_P (type0))
    return NULL_TREE;

  tree op1_utype = unsigned_type_for (type1);
  HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0);
  tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1);

  op0 = unshare_expr (op0);
  op1 = unshare_expr (op1);

  t = fold_convert_loc (loc, op1_utype, op1);
  t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1);

  /* If this is not a signed operation, don't perform overflow checks.
     Also punt on bit-fields.  */
  if (TYPE_OVERFLOW_WRAPS (type0)
      || GET_MODE_BITSIZE (TYPE_MODE (type0)) != TYPE_PRECISION (type0)
      || (flag_sanitize & SANITIZE_SHIFT_BASE) == 0)
    ;

  /* For signed x << y, in C99/C11, the following:
     (unsigned) x >> (uprecm1 - y)
     if non-zero, is undefined.  */
  else if (code == LSHIFT_EXPR && flag_isoc99 && cxx_dialect < cxx11)
    {
      tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
			    fold_convert (op1_utype, unshare_expr (op1)));
      tt = fold_convert_loc (loc, unsigned_type_for (type0), op0);
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (NE_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 0));
    }

  /* For signed x << y, in C++11 and later, the following:
     x < 0 || ((unsigned) x >> (uprecm1 - y))
     if > 1, is undefined.  */
  else if (code == LSHIFT_EXPR && cxx_dialect >= cxx11)
    {
      tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
			    fold_convert (op1_utype, unshare_expr (op1)));
      tt = fold_convert_loc (loc, unsigned_type_for (type0),
			     unshare_expr (op0));
      tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
      tt = fold_build2 (GT_EXPR, boolean_type_node, tt,
			build_int_cst (TREE_TYPE (tt), 1));
      x = fold_build2 (LT_EXPR, boolean_type_node, unshare_expr (op0),
		       build_int_cst (type0, 0));
      tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt);
    }

  /* If the condition was folded to 0, no need to instrument
     this expression.  */
  if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt)))
    return NULL_TREE;

  /* In case we have a SAVE_EXPR in a conditional context, we need to
     make sure it gets evaluated before the condition.  */
  t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t);

  enum sanitize_code recover_kind = SANITIZE_SHIFT_EXPONENT;
  tree else_t = void_node;
  if (tt)
    {
      if ((flag_sanitize & SANITIZE_SHIFT_EXPONENT) == 0)
	{
	  t = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, t);
	  t = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, t, tt);
	  recover_kind = SANITIZE_SHIFT_BASE;
	}
      else
	{
	  if (flag_sanitize_undefined_trap_on_error
	      || ((!(flag_sanitize_recover & SANITIZE_SHIFT_EXPONENT))
		  == (!(flag_sanitize_recover & SANITIZE_SHIFT_BASE))))
	    t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
	  else
	    else_t = tt;
	}
    }

  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc,
				     ubsan_type_descriptor (type0),
				     ubsan_type_descriptor (type1), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);

      enum built_in_function bcode
	= (flag_sanitize_recover & recover_kind)
	  ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
	  : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
      tt = builtin_decl_explicit (bcode);
      op0 = unshare_expr (op0);
      op1 = unshare_expr (op1);
      tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
				ubsan_encode_value (op1));
      if (else_t != void_node)
	{
	  bcode = (flag_sanitize_recover & SANITIZE_SHIFT_BASE)
		  ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
		  : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
	  tree else_tt = builtin_decl_explicit (bcode);
	  op0 = unshare_expr (op0);
	  op1 = unshare_expr (op1);
	  else_tt = build_call_expr_loc (loc, else_tt, 3, data,
					 ubsan_encode_value (op0),
					 ubsan_encode_value (op1));
	  else_t = fold_build3 (COND_EXPR, void_type_node, else_t,
				else_tt, void_node);
	}
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, else_t);

  return t;
}
Example #19
0
static bool
forward_propagate_addr_expr_1 (tree name, tree def_rhs,
			       gimple_stmt_iterator *use_stmt_gsi,
			       bool single_use_p)
{
  tree lhs, rhs, rhs2, array_ref;
  tree *rhsp, *lhsp;
  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
  enum tree_code rhs_code;

  gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);

  lhs = gimple_assign_lhs (use_stmt);
  rhs_code = gimple_assign_rhs_code (use_stmt);
  rhs = gimple_assign_rhs1 (use_stmt);

  /* Trivial cases.  The use statement could be a trivial copy or a
     useless conversion.  Recurse to the uses of the lhs as copyprop does
     not copy through different variant pointers and FRE does not catch
     all useless conversions.  Treat the case of a single-use name and
     a conversion to def_rhs type separate, though.  */
  if (TREE_CODE (lhs) == SSA_NAME
      && ((rhs_code == SSA_NAME && rhs == name)
	  || CONVERT_EXPR_CODE_P (rhs_code)))
    {
      /* Only recurse if we don't deal with a single use or we cannot
	 do the propagation to the current statement.  In particular
	 we can end up with a conversion needed for a non-invariant
	 address which we cannot do in a single statement.  */
      if (!single_use_p
	  || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
	      && !is_gimple_min_invariant (def_rhs)))
	return forward_propagate_addr_expr (lhs, def_rhs);

      gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
	gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
      else
	gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
      return true;
    }

  /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 
     ADDR_EXPR will not appear on the LHS.  */
  lhsp = gimple_assign_lhs_ptr (use_stmt);
  while (handled_component_p (*lhsp))
    lhsp = &TREE_OPERAND (*lhsp, 0);
  lhs = *lhsp;

  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (lhs) == INDIRECT_REF
      && TREE_OPERAND (lhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, lhs)
      && (lhsp != gimple_assign_lhs_ptr (use_stmt)
	  || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
					TREE_TYPE (rhs))))
    {
      *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);

      /* Continue propagating into the RHS if this was not the only use.  */
      if (single_use_p)
	return true;
    }

  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     nodes from the RHS.  */
  rhsp = gimple_assign_rhs1_ptr (use_stmt);
  while (handled_component_p (*rhsp)
	 || TREE_CODE (*rhsp) == ADDR_EXPR)
    rhsp = &TREE_OPERAND (*rhsp, 0);
  rhs = *rhsp;

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, rhs))
    {
      *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and try to
     create a VCE and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && TYPE_SIZE (TREE_TYPE (rhs))
      && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      /* Function decls should not be used for VCE either as it could be a
         function descriptor that we want and not the actual function code.  */
      && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
      /* We should not convert volatile loads to non volatile loads. */
      && !TYPE_VOLATILE (TREE_TYPE (rhs))
      && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
			  TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)) 
   {
     tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
     if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
       {
	 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
	    valid if we can replace the whole rhs of the use statement.  */
	 if (rhs != gimple_assign_rhs1 (use_stmt))
	   return false;
	 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
					     true, GSI_NEW_STMT);
	 gimple_assign_set_rhs1 (use_stmt, new_rhs);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
     /* If the defining rhs comes from an indirect reference, then do not
        convert into a VIEW_CONVERT_EXPR.  */
     def_rhs_base = TREE_OPERAND (def_rhs, 0);
     while (handled_component_p (def_rhs_base))
       def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
     if (!INDIRECT_REF_P (def_rhs_base))
       {
	 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
	    reference.  Place it there and fold the thing.  */
	 *rhsp = new_rhs;
	 fold_stmt_inplace (use_stmt);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
   }

  /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     is nothing to do. */
  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
      || gimple_assign_rhs1 (use_stmt) != name)
    return false;

  /* The remaining cases are all for turning pointer arithmetic into
     array indexing.  They only apply when we have the address of
     element zero in an array.  If that is not the case then there
     is nothing to do.  */
  array_ref = TREE_OPERAND (def_rhs, 0);
  if (TREE_CODE (array_ref) != ARRAY_REF
      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
      || !integer_zerop (TREE_OPERAND (array_ref, 1)))
    return false;

  rhs2 = gimple_assign_rhs2 (use_stmt);
  /* Try to optimize &x[0] p+ C where C is a multiple of the size
     of the elements in X into &x[C/element size].  */
  if (TREE_CODE (rhs2) == INTEGER_CST)
    {
      tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
					       array_ref, rhs2);
      if (new_rhs)
	{
	  gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
	  use_stmt = gsi_stmt (*use_stmt_gsi);
	  update_stmt (use_stmt);
	  tidy_after_forward_propagate_addr (use_stmt);
	  return true;
	}
    }

  /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
     converting a multiplication of an index by the size of the
     array elements, then the result is converted into the proper
     type for the arithmetic.  */
  if (TREE_CODE (rhs2) == SSA_NAME
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
    return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
							     use_stmt_gsi);
  return false;
}
Example #20
0
static bool
forward_propagate_addr_expr (tree stmt)
{
  int stmt_loop_depth = bb_for_stmt (stmt)->loop_depth;
  tree name = TREE_OPERAND (stmt, 0);
  use_operand_p imm_use;
  tree use_stmt, lhs, rhs, array_ref;

  /* We require that the SSA_NAME holding the result of the ADDR_EXPR
     be used only once.  That may be overly conservative in that we
     could propagate into multiple uses.  However, that would effectively
     be un-cseing the ADDR_EXPR, which is probably not what we want.  */
  single_imm_use (name, &imm_use, &use_stmt);
  if (!use_stmt)
    return false;

  /* If the use is not in a simple assignment statement, then
     there is nothing we can do.  */
  if (TREE_CODE (use_stmt) != MODIFY_EXPR)
    return false;

  /* If the use is in a deeper loop nest, then we do not want
     to propagate the ADDR_EXPR into the loop as that is likely
     adding expression evaluations into the loop.  */
  if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
    return false;

  /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 
     ADDR_EXPR will not appear on the LHS.  */
  lhs = TREE_OPERAND (use_stmt, 0);
  while (TREE_CODE (lhs) == COMPONENT_REF || TREE_CODE (lhs) == ARRAY_REF)
    lhs = TREE_OPERAND (lhs, 0);

  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
    {
      /* This should always succeed in creating gimple, so there is
	 no need to save enough state to undo this propagation.  */
      TREE_OPERAND (lhs, 0) = unshare_expr (TREE_OPERAND (stmt, 1));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* Trivial case.  The use statement could be a trivial copy.  We
     go ahead and handle that case here since it's trivial and
     removes the need to run copy-prop before this pass to get
     the best results.  Also note that by handling this case here
     we can catch some cascading effects, ie the single use is
     in a copy, and the copy is used later by a single INDIRECT_REF
     for example.  */
  if (TREE_CODE (lhs) == SSA_NAME && TREE_OPERAND (use_stmt, 1) == name)
    {
      TREE_OPERAND (use_stmt, 1) = unshare_expr (TREE_OPERAND (stmt, 1));
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     nodes from the RHS.  */
  rhs = TREE_OPERAND (use_stmt, 1);
  while (TREE_CODE (rhs) == COMPONENT_REF
	 || TREE_CODE (rhs) == ARRAY_REF
	 || TREE_CODE (rhs) == ADDR_EXPR)
    rhs = TREE_OPERAND (rhs, 0);

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
    {
      /* This should always succeed in creating gimple, so there is
         no need to save enough state to undo this propagation.  */
      TREE_OPERAND (rhs, 0) = unshare_expr (TREE_OPERAND (stmt, 1));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* The remaining cases are all for turning pointer arithmetic into
     array indexing.  They only apply when we have the address of
     element zero in an array.  If that is not the case then there
     is nothing to do.  */
  array_ref = TREE_OPERAND (TREE_OPERAND (stmt, 1), 0);
  if (TREE_CODE (array_ref) != ARRAY_REF
      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
      || !integer_zerop (TREE_OPERAND (array_ref, 1)))
    return false;

  /* If the use of the ADDR_EXPR must be a PLUS_EXPR, or else there
     is nothing to do. */
  if (TREE_CODE (rhs) != PLUS_EXPR)
    return false;

  /* Try to optimize &x[0] + C where C is a multiple of the size
     of the elements in X into &x[C/element size].  */
  if (TREE_OPERAND (rhs, 0) == name
      && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
    {
      tree orig = unshare_expr (rhs);
      TREE_OPERAND (rhs, 0) = unshare_expr (TREE_OPERAND (stmt, 1));

      /* If folding succeeds, then we have just exposed new variables
	 in USE_STMT which will need to be renamed.  If folding fails,
	 then we need to put everything back the way it was.  */
      if (fold_stmt_inplace (use_stmt))
	{
	  tidy_after_forward_propagate_addr (use_stmt);
	  return true;
	}
      else
	{
	  TREE_OPERAND (use_stmt, 1) = orig;
	  update_stmt (use_stmt);
	  return false;
	}
    }

  /* Try to optimize &x[0] + OFFSET where OFFSET is defined by
     converting a multiplication of an index by the size of the
     array elements, then the result is converted into the proper
     type for the arithmetic.  */
  if (TREE_OPERAND (rhs, 0) == name
      && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
    {
      tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
      return forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
							       stmt, use_stmt);
    }
	      
  /* Same as the previous case, except the operands of the PLUS_EXPR
     were reversed.  */
  if (TREE_OPERAND (rhs, 1) == name
      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
    {
      tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
      return forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
							       stmt, use_stmt);
    }
  return false;
}
Example #21
0
static int
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
  gimple stmt = gsi_stmt (*gsi_p);
  int did_something = 0;

  do {
    tree tmp = NULL_TREE;
    tree cond = gimple_assign_rhs1 (stmt);
    tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
    gimple def_stmt;
    bool single_use0_p = false, single_use1_p = false;

    /* We can do tree combining on SSA_NAME and comparison expressions.  */
    if (COMPARISON_CLASS_P (cond)
	&& TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
      {
	/* For comparisons use the first operand, that is likely to
	   simplify comparisons against constants.  */
	name = TREE_OPERAND (cond, 0);
	def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
	if (def_stmt && can_propagate_from (def_stmt))
	  {
	    tree op1 = TREE_OPERAND (cond, 1);
	    rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
	    tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
					  rhs0, op1, !single_use0_p);
	  }
	/* If that wasn't successful, try the second operand.  */
	if (tmp == NULL_TREE
	    && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
	  {
	    tree op0 = TREE_OPERAND (cond, 0);
	    name = TREE_OPERAND (cond, 1);
	    def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
	    if (!def_stmt || !can_propagate_from (def_stmt))
	      return did_something;

	    rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
	    tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
					  op0, rhs1, !single_use1_p);
	  }
	/* If that wasn't successful either, try both operands.  */
	if (tmp == NULL_TREE
	    && rhs0 != NULL_TREE
	    && rhs1 != NULL_TREE)
	  tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
					rhs0, fold_convert (TREE_TYPE (rhs0),
							    rhs1),
					!(single_use0_p && single_use1_p));
      }
    else if (TREE_CODE (cond) == SSA_NAME)
      {
	name = cond;
	def_stmt = get_prop_source_stmt (name, true, NULL);
	if (def_stmt || !can_propagate_from (def_stmt))
	  return did_something;

	rhs0 = gimple_assign_rhs1 (def_stmt);
	tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
				      build_int_cst (TREE_TYPE (rhs0), 0),
				      false);
      }

    if (tmp)
      {
	if (dump_file && tmp)
	  {
	    fprintf (dump_file, "  Replaced '");
	    print_generic_expr (dump_file, cond, 0);
	    fprintf (dump_file, "' with '");
	    print_generic_expr (dump_file, tmp, 0);
	    fprintf (dump_file, "'\n");
	  }

	gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
	stmt = gsi_stmt (*gsi_p);
	update_stmt (stmt);

	/* Remove defining statements.  */
	remove_prop_source_from_use (name, NULL);

	if (is_gimple_min_invariant (tmp))
	  did_something = 2;
	else if (did_something == 0)
	  did_something = 1;

	/* Continue combining.  */
	continue;
      }

    break;
  } while (1);

  return did_something;
}
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index, tunit;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
  tree tmp;

  tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
  if (!host_integerp (tunit, 1))
    return false;

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (tunit))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements.
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
	 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
       {
	 /* The first operand to the MULT_EXPR is the desired index.  */
	 index = gimple_assign_rhs1 (offset_def);
       }
     /* If we have idx * tunit + CST * tunit re-associate that.  */
     else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
	       || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
	      && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	      && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
					       gimple_assign_rhs2 (offset_def),
					       tunit)) != NULL_TREE)
       {
	 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
	 if (is_gimple_assign (offset_def2)
	     && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
	     && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
	     && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
	   {
	     index = fold_build2 (gimple_assign_rhs_code (offset_def),
				  TREE_TYPE (offset),
				  gimple_assign_rhs1 (offset_def2), tmp);
	   }
	 else
	   return false;
       }
     else
	return false;
    }

  /* Replace the pointer addition with array indexing.  */
  index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
				    true, GSI_SAME_STMT);
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
Example #23
0
static void
mf_build_check_statement_for (tree base, tree limit,
                              block_stmt_iterator *instr_bsi,
                              location_t *locus, tree dirflag)
{
  tree_stmt_iterator head, tsi;
  block_stmt_iterator bsi;
  basic_block cond_bb, then_bb, join_bb;
  edge e;
  tree cond, t, u, v;
  tree mf_base;
  tree mf_elem;
  tree mf_limit;

  /* We first need to split the current basic block, and start altering
     the CFG.  This allows us to insert the statements we're about to
     construct into the right basic blocks.  */

  cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi));
  bsi = *instr_bsi;
  bsi_prev (&bsi);
  if (! bsi_end_p (bsi))
    e = split_block (cond_bb, bsi_stmt (bsi));
  else
    e = split_block_after_labels (cond_bb);
  cond_bb = e->src;
  join_bb = e->dest;

  /* A recap at this point: join_bb is the basic block at whose head
     is the gimple statement for which this check expression is being
     built.  cond_bb is the (possibly new, synthetic) basic block the
     end of which will contain the cache-lookup code, and a
     conditional that jumps to the cache-miss code or, much more
     likely, over to join_bb.  */

  /* Create the bb that contains the cache-miss fallback block (mf_check).  */
  then_bb = create_empty_bb (cond_bb);
  make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
  make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);

  /* Mark the pseudo-fallthrough edge from cond_bb to join_bb.  */
  e = find_edge (cond_bb, join_bb);
  e->flags = EDGE_FALSE_VALUE;
  e->count = cond_bb->count;
  e->probability = REG_BR_PROB_BASE;

  /* Update dominance info.  Note that bb_join's data was
     updated by split_block.  */
  if (dom_info_available_p (CDI_DOMINATORS))
    {
      set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
      set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
    }

  /* Build our local variables.  */
  mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem");
  mf_base = create_tmp_var (mf_uintptr_type, "__mf_base");
  mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");

  /* Build: __mf_base = (uintptr_t) <base address expression>.  */
  t = build2 (MODIFY_EXPR, void_type_node, mf_base,
              convert (mf_uintptr_type, unshare_expr (base)));
  SET_EXPR_LOCUS (t, locus);
  gimplify_to_stmt_list (&t);
  head = tsi_start (t);
  tsi = tsi_last (t);

  /* Build: __mf_limit = (uintptr_t) <limit address expression>.  */
  t = build2 (MODIFY_EXPR, void_type_node, mf_limit,
              convert (mf_uintptr_type, unshare_expr (limit)));
  SET_EXPR_LOCUS (t, locus);
  gimplify_to_stmt_list (&t);
  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);

  /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
                                            & __mf_mask].  */
  t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
              (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l));
  t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
              (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l));
  t = build4 (ARRAY_REF,
              TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
              mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
  t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
  t = build2 (MODIFY_EXPR, void_type_node, mf_elem, t);
  SET_EXPR_LOCUS (t, locus);
  gimplify_to_stmt_list (&t);
  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);

  /* Quick validity check.

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))
        {
          __mf_check ();
          ... and only if single-threaded:
          __mf_lookup_shift_1 = f...;
          __mf_lookup_mask_l = ...;
        }

     It is expected that this body of code is rarely executed so we mark
     the edge to the THEN clause of the conditional jump as unlikely.  */

  /* Construct t <-- '__mf_elem->low  > __mf_base'.  */
  t = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
  t = build2 (GT_EXPR, boolean_type_node, t, mf_base);

  /* Construct '__mf_elem->high < __mf_limit'.

     First build:
        1) u <--  '__mf_elem->high'
        2) v <--  '__mf_limit'.

     Then build 'u <-- (u < v).  */

  u = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);

  v = mf_limit;

  u = build2 (LT_EXPR, boolean_type_node, u, v);

  /* Build the composed conditional: t <-- 't || u'.  Then store the
     result of the evaluation of 't' in a temporary variable which we
     can use as the condition for the conditional jump.  */
  t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
  cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
  t = build2 (MODIFY_EXPR, boolean_type_node, cond, t);
  gimplify_to_stmt_list (&t);
  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);

  /* Build the conditional jump.  'cond' is just a temporary so we can
     simply build a void COND_EXPR.  We do need labels in both arms though.  */
  t = build3 (COND_EXPR, void_type_node, cond,
              build1 (GOTO_EXPR, void_type_node, tree_block_label (then_bb)),
              build1 (GOTO_EXPR, void_type_node, tree_block_label (join_bb)));
  SET_EXPR_LOCUS (t, locus);
  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);

  /* At this point, after so much hard work, we have only constructed
     the conditional jump,

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))

     The lowered GIMPLE tree representing this code is in the statement
     list starting at 'head'.

     We can insert this now in the current basic block, i.e. the one that
     the statement we're instrumenting was originally in.  */
  bsi = bsi_last (cond_bb);
  for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
    bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);

  /*  Now build up the body of the cache-miss handling:

     __mf_check();
     refresh *_l vars.

     This is the body of the conditional.  */
  
  u = tree_cons (NULL_TREE,
                 mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION
                                             : *locus),
                 NULL_TREE);
  u = tree_cons (NULL_TREE, dirflag, u);
  /* NB: we pass the overall [base..limit] range to mf_check.  */
  u = tree_cons (NULL_TREE, 
                 fold_build2 (PLUS_EXPR, integer_type_node,
			      fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
			      integer_one_node),
                 u);
  u = tree_cons (NULL_TREE, mf_base, u);
  t = build_function_call_expr (mf_check_fndecl, u);
  gimplify_to_stmt_list (&t);
  head = tsi_start (t);
  tsi = tsi_last (t);

  if (! flag_mudflap_threads)
    {
      t = build2 (MODIFY_EXPR, void_type_node,
                  mf_cache_shift_decl_l, mf_cache_shift_decl);
      tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);

      t = build2 (MODIFY_EXPR, void_type_node,
                  mf_cache_mask_decl_l, mf_cache_mask_decl);
      tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
    }

  /* Insert the check code in the THEN block.  */
  bsi = bsi_start (then_bb);
  for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
    bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);

  *instr_bsi = bsi_start (join_bb);
  bsi_next (instr_bsi);
}
Example #24
0
static tree
expand_cilk_sync (void)
{
  tree frame = cfun->cilk_frame_decl;

  /* Cilk_sync is converted to the following code:

     sf.pedigree = sf.worker->pedigree;
     if (frame.flags & CILK_FRAME_UNSYNCHED)
     {
        __cilkrts_save_fp_state (&sf);
        if (!builtin_setjmp (sf.ctx) 
	    __cilkrts_sync (&sf); 
	else 
	   if (sf.flags & CILK_FRAME_EXCEPTING) 
	     __cilkrts_rethrow (&sf); 
      }
      sf.worker->pedigree.rank = sf.worker->pedigree.rank + 1;  */

  tree flags = cilk_dot (frame, CILK_TI_FRAME_FLAGS, false);
  
  tree unsynched = fold_build2 (BIT_AND_EXPR, TREE_TYPE (flags), flags,
				build_int_cst (TREE_TYPE (flags),
					       CILK_FRAME_UNSYNCHED));

  unsynched = fold_build2 (NE_EXPR, TREE_TYPE (unsynched), unsynched,
			   build_int_cst (TREE_TYPE (unsynched), 0));

  tree frame_addr = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl, frame);

  /* Check if exception (0x10) bit is set in the sf->flags.  */
  tree except_flag = fold_build2 (BIT_AND_EXPR, TREE_TYPE (flags), flags,
				  build_int_cst (TREE_TYPE (flags),
						 CILK_FRAME_EXCEPTING));
  except_flag = fold_build2 (NE_EXPR, TREE_TYPE (except_flag), except_flag,
			     build_int_cst (TREE_TYPE (except_flag), 0));

  /* If the exception flag is set then call the __cilkrts_rethrow (&sf).  */
  tree except_cond = fold_build3 (COND_EXPR, void_type_node, except_flag,
				  build_call_expr (cilk_rethrow_fndecl, 1,
						   frame_addr),
				  build_empty_stmt (EXPR_LOCATION (unsynched)));
  
  tree sync_expr = build_call_expr (cilk_sync_fndecl, 1, frame_addr);
  tree setjmp_expr = cilk_call_setjmp (frame);
  setjmp_expr = fold_build2 (EQ_EXPR, TREE_TYPE (setjmp_expr), setjmp_expr,
			     build_int_cst (TREE_TYPE (setjmp_expr), 0));
  
  setjmp_expr = fold_build3 (COND_EXPR, void_type_node, setjmp_expr,
			     sync_expr, except_cond);
  tree sync_list = alloc_stmt_list ();
  append_to_statement_list (build_call_expr (cilk_save_fp_fndecl, 1,
					     frame_addr), &sync_list);
  append_to_statement_list (setjmp_expr, &sync_list);
  tree sync = fold_build3 (COND_EXPR, void_type_node, unsynched, sync_list,
			   build_empty_stmt (EXPR_LOCATION (unsynched)));
  tree parent_pedigree = cilk_dot (frame, CILK_TI_FRAME_PEDIGREE, false);
  tree worker = cilk_dot (frame, CILK_TI_FRAME_WORKER, false);
  tree worker_pedigree = cilk_arrow (worker, CILK_TI_WORKER_PEDIGREE, false);
  tree assign_pedigree = fold_build2 (MODIFY_EXPR, void_type_node,
				      parent_pedigree, worker_pedigree);
  tree w_ped_rank = cilk_dot (unshare_expr (worker_pedigree), 
			      CILK_TI_PEDIGREE_RANK, false);
  tree incr_ped_rank = fold_build2 (PLUS_EXPR, TREE_TYPE (w_ped_rank),
				    w_ped_rank,
				    build_one_cst (TREE_TYPE (w_ped_rank)));
  incr_ped_rank = fold_build2 (MODIFY_EXPR, void_type_node, w_ped_rank,
			       incr_ped_rank);
  tree ret_sync_exp = alloc_stmt_list ();
  append_to_statement_list (assign_pedigree, &ret_sync_exp);
  append_to_statement_list (sync, &ret_sync_exp);
  append_to_statement_list (incr_ped_rank, &ret_sync_exp);
  return ret_sync_exp;
}
Example #25
0
static void
mf_build_check_statement_for (tree base, tree limit,
                              gimple_stmt_iterator *instr_gsi,
                              location_t location, tree dirflag)
{
  gimple_stmt_iterator gsi;
  basic_block cond_bb, then_bb, join_bb;
  edge e;
  tree cond, t, u, v;
  tree mf_base;
  tree mf_elem;
  tree mf_limit;
  gimple g;
  gimple_seq seq, stmts;

  /* We first need to split the current basic block, and start altering
     the CFG.  This allows us to insert the statements we're about to
     construct into the right basic blocks.  */

  cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
  gsi = *instr_gsi;
  gsi_prev (&gsi);
  if (! gsi_end_p (gsi))
    e = split_block (cond_bb, gsi_stmt (gsi));
  else
    e = split_block_after_labels (cond_bb);
  cond_bb = e->src;
  join_bb = e->dest;

  /* A recap at this point: join_bb is the basic block at whose head
     is the gimple statement for which this check expression is being
     built.  cond_bb is the (possibly new, synthetic) basic block the
     end of which will contain the cache-lookup code, and a
     conditional that jumps to the cache-miss code or, much more
     likely, over to join_bb.  */

  /* Create the bb that contains the cache-miss fallback block (mf_check).  */
  then_bb = create_empty_bb (cond_bb);
  make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
  make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);

  /* Mark the pseudo-fallthrough edge from cond_bb to join_bb.  */
  e = find_edge (cond_bb, join_bb);
  e->flags = EDGE_FALSE_VALUE;
  e->count = cond_bb->count;
  e->probability = REG_BR_PROB_BASE;

  /* Update dominance info.  Note that bb_join's data was
     updated by split_block.  */
  if (dom_info_available_p (CDI_DOMINATORS))
    {
      set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
      set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
    }

  /* Update loop info.  */
  if (current_loops)
    add_bb_to_loop (then_bb, cond_bb->loop_father);

  /* Build our local variables.  */
  mf_elem = create_tmp_reg (mf_cache_structptr_type, "__mf_elem");
  mf_base = create_tmp_reg (mf_uintptr_type, "__mf_base");
  mf_limit = create_tmp_reg (mf_uintptr_type, "__mf_limit");

  /* Build: __mf_base = (uintptr_t) <base address expression>.  */
  seq = NULL;
  t = fold_convert_loc (location, mf_uintptr_type,
			unshare_expr (base));
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_base, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build: __mf_limit = (uintptr_t) <limit address expression>.  */
  t = fold_convert_loc (location, mf_uintptr_type,
			unshare_expr (limit));
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_limit, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
                                            & __mf_mask].  */
  t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
              flag_mudflap_threads ? mf_cache_shift_decl
	       : mf_cache_shift_decl_l);
  t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
              flag_mudflap_threads ? mf_cache_mask_decl
	       : mf_cache_mask_decl_l);
  t = build4 (ARRAY_REF,
              TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
              mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
  t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_elem, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Quick validity check.

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))
        {
          __mf_check ();
          ... and only if single-threaded:
          __mf_lookup_shift_1 = f...;
          __mf_lookup_mask_l = ...;
        }

     It is expected that this body of code is rarely executed so we mark
     the edge to the THEN clause of the conditional jump as unlikely.  */

  /* Construct t <-- '__mf_elem->low  > __mf_base'.  */
  t = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
  t = build2 (GT_EXPR, boolean_type_node, t, mf_base);

  /* Construct '__mf_elem->high < __mf_limit'.

     First build:
        1) u <--  '__mf_elem->high'
        2) v <--  '__mf_limit'.

     Then build 'u <-- (u < v).  */

  u = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);

  v = mf_limit;

  u = build2 (LT_EXPR, boolean_type_node, u, v);

  /* Build the composed conditional: t <-- 't || u'.  Then store the
     result of the evaluation of 't' in a temporary variable which we
     can use as the condition for the conditional jump.  */
  t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  cond = create_tmp_reg (boolean_type_node, "__mf_unlikely_cond");
  g = gimple_build_assign  (cond, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build the conditional jump.  'cond' is just a temporary so we can
     simply build a void COND_EXPR.  We do need labels in both arms though.  */
  g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
			 NULL_TREE);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* At this point, after so much hard work, we have only constructed
     the conditional jump,

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))

     The lowered GIMPLE tree representing this code is in the statement
     list starting at 'head'.

     We can insert this now in the current basic block, i.e. the one that
     the statement we're instrumenting was originally in.  */
  gsi = gsi_last_bb (cond_bb);
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);

  /*  Now build up the body of the cache-miss handling:

     __mf_check();
     refresh *_l vars.

     This is the body of the conditional.  */

  seq = NULL;
  /* u is a string, so it is already a gimple value.  */
  u = mf_file_function_line_tree (location);
  /* NB: we pass the overall [base..limit] range to mf_check.  */
  v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
		   fold_build2_loc (location,
				MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
		   build_int_cst (mf_uintptr_type, 1));
  v = force_gimple_operand (v, &stmts, true, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
  gimple_seq_add_stmt (&seq, g);

  if (! flag_mudflap_threads)
    {
      if (stmt_ends_bb_p (g))
	{
	  gsi = gsi_start_bb (then_bb);
	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
	  e = split_block (then_bb, g);
	  then_bb = e->dest;
	  seq = NULL;
	}

      g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
      gimple_seq_add_stmt (&seq, g);

      g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
      gimple_seq_add_stmt (&seq, g);
    }

  /* Insert the check code in the THEN block.  */
  gsi = gsi_start_bb (then_bb);
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);

  *instr_gsi = gsi_start_bb (join_bb);
}
static int
forward_propagate_into_gimple_cond (gimple stmt)
{
  int did_something = 0;
  location_t loc = gimple_location (stmt);

  do {
    tree tmp = NULL_TREE;
    tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
    gimple def_stmt;
    bool single_use0_p = false, single_use1_p = false;
    enum tree_code code = gimple_cond_code (stmt);

    /* We can do tree combining on SSA_NAME and comparison expressions.  */
    if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
        && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
      {
	/* For comparisons use the first operand, that is likely to
	   simplify comparisons against constants.  */
	name = gimple_cond_lhs (stmt);
	def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
	if (def_stmt && can_propagate_from (def_stmt))
	  {
	    tree op1 = gimple_cond_rhs (stmt);
	    rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
	    tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
					  op1, !single_use0_p);
	  }
	/* If that wasn't successful, try the second operand.  */
	if (tmp == NULL_TREE
	    && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
	  {
	    tree op0 = gimple_cond_lhs (stmt);
	    name = gimple_cond_rhs (stmt);
	    def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
	    if (!def_stmt || !can_propagate_from (def_stmt))
	      return did_something;

	    rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
	    tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
					  rhs1, !single_use1_p);
	  }
	/* If that wasn't successful either, try both operands.  */
	if (tmp == NULL_TREE
	    && rhs0 != NULL_TREE
	    && rhs1 != NULL_TREE)
	  tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
					fold_convert_loc (loc,
							  TREE_TYPE (rhs0),
							  rhs1),
					!(single_use0_p && single_use1_p));
      }

    if (tmp)
      {
	if (dump_file && tmp)
	  {
            tree cond = build2 (gimple_cond_code (stmt),
				boolean_type_node,
				gimple_cond_lhs (stmt),
				gimple_cond_rhs (stmt));
	    fprintf (dump_file, "  Replaced '");
	    print_generic_expr (dump_file, cond, 0);
	    fprintf (dump_file, "' with '");
	    print_generic_expr (dump_file, tmp, 0);
	    fprintf (dump_file, "'\n");
	  }

        gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
	update_stmt (stmt);

	/* Remove defining statements.  */
	remove_prop_source_from_use (name, NULL);

	if (is_gimple_min_invariant (tmp))
	  did_something = 2;
	else if (did_something == 0)
	  did_something = 1;

	/* Continue combining.  */
	continue;
      }

    break;
  } while (1);

  return did_something;
}