Ejemplo n.º 1
0
static void
genericize_eh_spec_block (tree *stmt_p)
{
  tree body = EH_SPEC_STMTS (*stmt_p);
  tree allowed = EH_SPEC_RAISES (*stmt_p);
  tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());

  *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
  TREE_NO_WARNING (*stmt_p) = true;
  TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
}
Ejemplo n.º 2
0
static inline void
unpack_ts_base_value_fields (struct bitpack_d *bp, tree expr)
{
  /* Note that the code for EXPR has already been unpacked to create EXPR in
     streamer_alloc_tree.  */
  if (!TYPE_P (expr))
    {
      TREE_SIDE_EFFECTS (expr) = (unsigned) bp_unpack_value (bp, 1);
      TREE_CONSTANT (expr) = (unsigned) bp_unpack_value (bp, 1);
      TREE_READONLY (expr) = (unsigned) bp_unpack_value (bp, 1);

      /* TREE_PUBLIC is used on types to indicate that the type
	 has a TYPE_CACHED_VALUES vector.  This is not streamed out,
	 so we skip it here.  */
      TREE_PUBLIC (expr) = (unsigned) bp_unpack_value (bp, 1);
    }
  else
    bp_unpack_value (bp, 4);
  TREE_ADDRESSABLE (expr) = (unsigned) bp_unpack_value (bp, 1);
  TREE_THIS_VOLATILE (expr) = (unsigned) bp_unpack_value (bp, 1);
  if (DECL_P (expr))
    DECL_UNSIGNED (expr) = (unsigned) bp_unpack_value (bp, 1);
  else if (TYPE_P (expr))
    TYPE_UNSIGNED (expr) = (unsigned) bp_unpack_value (bp, 1);
  else
    bp_unpack_value (bp, 1);
  TREE_ASM_WRITTEN (expr) = (unsigned) bp_unpack_value (bp, 1);
  if (TYPE_P (expr))
    TYPE_ARTIFICIAL (expr) = (unsigned) bp_unpack_value (bp, 1);
  else
    TREE_NO_WARNING (expr) = (unsigned) bp_unpack_value (bp, 1);
  TREE_NOTHROW (expr) = (unsigned) bp_unpack_value (bp, 1);
  TREE_STATIC (expr) = (unsigned) bp_unpack_value (bp, 1);
  if (TREE_CODE (expr) != TREE_BINFO)
    TREE_PRIVATE (expr) = (unsigned) bp_unpack_value (bp, 1);
  else
    bp_unpack_value (bp, 1);
  TREE_PROTECTED (expr) = (unsigned) bp_unpack_value (bp, 1);
  TREE_DEPRECATED (expr) = (unsigned) bp_unpack_value (bp, 1);
  if (TYPE_P (expr))
    {
      if (AGGREGATE_TYPE_P (expr))
	TYPE_REVERSE_STORAGE_ORDER (expr) = (unsigned) bp_unpack_value (bp, 1);
      else
	TYPE_SATURATING (expr) = (unsigned) bp_unpack_value (bp, 1);
      TYPE_ADDR_SPACE (expr) = (unsigned) bp_unpack_value (bp, 8);
    }
  else if (TREE_CODE (expr) == BIT_FIELD_REF || TREE_CODE (expr) == MEM_REF)
    {
      REF_REVERSE_STORAGE_ORDER (expr) = (unsigned) bp_unpack_value (bp, 1);
      bp_unpack_value (bp, 8);
    }
  else if (TREE_CODE (expr) == SSA_NAME)
    {
      SSA_NAME_IS_DEFAULT_DEF (expr) = (unsigned) bp_unpack_value (bp, 1);
      bp_unpack_value (bp, 8);
    }
  else
    bp_unpack_value (bp, 9);
}
Ejemplo n.º 3
0
static void
gimplify_expr_stmt (tree *stmt_p)
{
  tree stmt = EXPR_STMT_EXPR (*stmt_p);

  if (stmt == error_mark_node)
    stmt = NULL;

  /* Gimplification of a statement expression will nullify the
     statement if all its side effects are moved to *PRE_P and *POST_P.

     In this case we will not want to emit the gimplified statement.
     However, we may still want to emit a warning, so we do that before
     gimplification.  */
  if (stmt && (extra_warnings || warn_unused_value))
    {
      if (!TREE_SIDE_EFFECTS (stmt))
	{
	  if (!IS_EMPTY_STMT (stmt)
	      && !VOID_TYPE_P (TREE_TYPE (stmt))
	      && !TREE_NO_WARNING (stmt))
	    warning (0, "statement with no effect");
	}
      else if (warn_unused_value)
	warn_if_unused_value (stmt, input_location);
    }

  if (stmt == NULL_TREE)
    stmt = alloc_stmt_list ();

  *stmt_p = stmt;
}
Ejemplo n.º 4
0
tree
copy_var_decl (tree var, tree name, tree type)
{
  tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);

  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
  DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
  DECL_CONTEXT (copy) = DECL_CONTEXT (var);
  TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
  TREE_USED (copy) = 1;
  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
  DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);

  return copy;
}
Ejemplo n.º 5
0
tree
gfc_create_var_np (tree type, const char *prefix)
{
  tree t;
  
  t = create_tmp_var_raw (type, prefix);

  /* No warnings for anonymous variables.  */
  if (prefix == NULL)
    TREE_NO_WARNING (t) = 1;

  return t;
}
Ejemplo n.º 6
0
static void
pack_ts_base_value_fields (struct bitpack_d *bp, tree expr)
{
  bp_pack_value (bp, TREE_CODE (expr), 16);
  if (!TYPE_P (expr))
    {
      bp_pack_value (bp, TREE_SIDE_EFFECTS (expr), 1);
      bp_pack_value (bp, TREE_CONSTANT (expr), 1);
      bp_pack_value (bp, TREE_READONLY (expr), 1);

      /* TREE_PUBLIC is used on types to indicate that the type
	 has a TYPE_CACHED_VALUES vector.  This is not streamed out,
	 so we skip it here.  */
      bp_pack_value (bp, TREE_PUBLIC (expr), 1);
    }
  else
    bp_pack_value (bp, 0, 4);
  bp_pack_value (bp, TREE_ADDRESSABLE (expr), 1);
  bp_pack_value (bp, TREE_THIS_VOLATILE (expr), 1);
  if (DECL_P (expr))
    bp_pack_value (bp, DECL_UNSIGNED (expr), 1);
  else if (TYPE_P (expr))
    bp_pack_value (bp, TYPE_UNSIGNED (expr), 1);
  else
    bp_pack_value (bp, 0, 1);
  /* We write debug info two times, do not confuse the second one.
     The only relevant TREE_ASM_WRITTEN use is on SSA names.  */
  bp_pack_value (bp, (TREE_CODE (expr) != SSA_NAME
		      ? 0 : TREE_ASM_WRITTEN (expr)), 1);
  if (TYPE_P (expr))
    bp_pack_value (bp, TYPE_ARTIFICIAL (expr), 1);
  else
    bp_pack_value (bp, TREE_NO_WARNING (expr), 1);
  bp_pack_value (bp, TREE_NOTHROW (expr), 1);
  bp_pack_value (bp, TREE_STATIC (expr), 1);
  if (TREE_CODE (expr) != TREE_BINFO)
    bp_pack_value (bp, TREE_PRIVATE (expr), 1);
  bp_pack_value (bp, TREE_PROTECTED (expr), 1);
  bp_pack_value (bp, TREE_DEPRECATED (expr), 1);
  if (TYPE_P (expr))
    {
      bp_pack_value (bp, TYPE_SATURATING (expr), 1);
      bp_pack_value (bp, TYPE_ADDR_SPACE (expr), 8);
    }
  else if (TREE_CODE (expr) == SSA_NAME)
    bp_pack_value (bp, SSA_NAME_IS_DEFAULT_DEF (expr), 1);
  else
    bp_pack_value (bp, 0, 1);
}
Ejemplo n.º 7
0
void
expand_end_catch_block (void)
{
  if (! doing_eh ())
    return;

  /* The exception being handled is rethrown if control reaches the end of
     a handler of the function-try-block of a constructor or destructor.  */
  if (in_function_try_handler
      && (DECL_CONSTRUCTOR_P (current_function_decl)
	  || DECL_DESTRUCTOR_P (current_function_decl)))
    {
      tree rethrow = build_throw (NULL_TREE);
      TREE_NO_WARNING (rethrow) = true;
      finish_expr_stmt (rethrow);
    }
}
Ejemplo n.º 8
0
static tree
cp_ubsan_instrument_vptr (location_t loc, tree op, tree type, bool is_addr,
			  enum ubsan_null_ckind ckind)
{
  type = TYPE_MAIN_VARIANT (type);
  const char *mangled = mangle_type_string (type);
  hashval_t str_hash1 = htab_hash_string (mangled);
  hashval_t str_hash2 = iterative_hash (mangled, strlen (mangled), 0);
  tree str_hash = wide_int_to_tree (uint64_type_node,
				    wi::uhwi (((uint64_t) str_hash1 << 32)
					      | str_hash2, 64));
  if (!is_addr)
    op = build_fold_addr_expr_loc (loc, op);
  op = save_expr (op);
  tree vptr = fold_build3_loc (loc, COMPONENT_REF,
			       TREE_TYPE (TYPE_VFIELD (type)),
			       build_fold_indirect_ref_loc (loc, op),
			       TYPE_VFIELD (type), NULL_TREE);
  vptr = fold_convert_loc (loc, pointer_sized_int_node, vptr);
  vptr = fold_convert_loc (loc, uint64_type_node, vptr);
  if (ckind == UBSAN_DOWNCAST_POINTER)
    {
      tree cond = build2_loc (loc, NE_EXPR, boolean_type_node, op,
			      build_zero_cst (TREE_TYPE (op)));
      /* This is a compiler generated comparison, don't emit
	 e.g. -Wnonnull-compare warning for it.  */
      TREE_NO_WARNING (cond) = 1;
      vptr = build3_loc (loc, COND_EXPR, uint64_type_node, cond,
			 vptr, build_int_cst (uint64_type_node, 0));
    }
  tree ti_decl = get_tinfo_decl (type);
  mark_used (ti_decl);
  tree ptype = build_pointer_type (type);
  tree call
    = build_call_expr_internal_loc (loc, IFN_UBSAN_VPTR,
				    void_type_node, 5, op, vptr, str_hash,
				    build_address (ti_decl),
				    build_int_cst (ptype, ckind));
  TREE_SIDE_EFFECTS (call) = 1;
  return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op);
}
Ejemplo n.º 9
0
Archivo: cvt.c Proyecto: h4ck3rm1k3/gcc
tree
convert_to_void (tree expr, const char *implicit, tsubst_flags_t complain)
{
  if (expr == error_mark_node
      || TREE_TYPE (expr) == error_mark_node)
    return error_mark_node;
  if (!TREE_TYPE (expr))
    return expr;
  if (invalid_nonstatic_memfn_p (expr, complain))
    return error_mark_node;
  if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
    {
      if (complain & tf_error)
        error ("pseudo-destructor is not called");
      return error_mark_node;
    }
  if (VOID_TYPE_P (TREE_TYPE (expr)))
    return expr;
  switch (TREE_CODE (expr))
    {
    case COND_EXPR:
      {
	/* The two parts of a cond expr might be separate lvalues.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree op2 = TREE_OPERAND (expr,2);
	tree new_op1 = convert_to_void
	  (op1, (implicit && !TREE_SIDE_EFFECTS (op2)
		 ? "second operand of conditional" : NULL), complain);
	tree new_op2 = convert_to_void
	  (op2, (implicit && !TREE_SIDE_EFFECTS (op1)
		 ? "third operand of conditional" : NULL), complain);

	expr = build3 (COND_EXPR, TREE_TYPE (new_op1),
		       TREE_OPERAND (expr, 0), new_op1, new_op2);
	break;
      }

    case COMPOUND_EXPR:
      {
	/* The second part of a compound expr contains the value.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree new_op1 = convert_to_void
	  (op1, (implicit && !TREE_NO_WARNING (expr)
		 ? "right-hand operand of comma" : NULL), complain);

	if (new_op1 != op1)
	  {
	    tree t = build2 (COMPOUND_EXPR, TREE_TYPE (new_op1),
			     TREE_OPERAND (expr, 0), new_op1);
	    expr = t;
	  }

	break;
      }

    case NON_LVALUE_EXPR:
    case NOP_EXPR:
      /* These have already decayed to rvalue.  */
      break;

    case CALL_EXPR:   /* We have a special meaning for volatile void fn().  */
      break;

    case INDIRECT_REF:
      {
	tree type = TREE_TYPE (expr);
	int is_reference = TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0)))
			   == REFERENCE_TYPE;
	int is_volatile = TYPE_VOLATILE (type);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	/* Can't load the value if we don't know the type.  */
	if (is_volatile && !is_complete)
          {
            if (complain & tf_warning)
              warning (0, "object of incomplete type %qT will not be accessed in %s",
                       type, implicit ? implicit : "void context");
          }
	/* Don't load the value if this is an implicit dereference, or if
	   the type needs to be handled by ctors/dtors.  */
	else if (is_volatile && (is_reference || TREE_ADDRESSABLE (type)))
          {
            if (complain & tf_warning)
              warning (0, "object of type %qT will not be accessed in %s",
                       TREE_TYPE (TREE_OPERAND (expr, 0)),
                       implicit ? implicit : "void context");
          }
	if (is_reference || !is_volatile || !is_complete || TREE_ADDRESSABLE (type))
	  expr = TREE_OPERAND (expr, 0);

	break;
      }

    case VAR_DECL:
      {
	/* External variables might be incomplete.  */
	tree type = TREE_TYPE (expr);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	if (TYPE_VOLATILE (type) && !is_complete && (complain & tf_warning))
	  warning (0, "object %qE of incomplete type %qT will not be accessed in %s",
		   expr, type, implicit ? implicit : "void context");
	break;
      }

    case TARGET_EXPR:
      /* Don't bother with the temporary object returned from a function if
	 we don't use it and don't need to destroy it.  We'll still
	 allocate space for it in expand_call or declare_return_variable,
	 but we don't need to track it through all the tree phases.  */
      if (TARGET_EXPR_IMPLICIT_P (expr)
	  && TYPE_HAS_TRIVIAL_DESTRUCTOR (TREE_TYPE (expr)))
	{
	  tree init = TARGET_EXPR_INITIAL (expr);
	  if (TREE_CODE (init) == AGGR_INIT_EXPR
	      && !AGGR_INIT_VIA_CTOR_P (init))
	    {
	      tree fn = AGGR_INIT_EXPR_FN (init);
	      expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
				       fn,
				       aggr_init_expr_nargs (init),
				       AGGR_INIT_EXPR_ARGP (init));
	    }
	}
      break;

    default:;
    }
  {
    tree probe = expr;

    if (TREE_CODE (probe) == ADDR_EXPR)
      probe = TREE_OPERAND (expr, 0);
    if (type_unknown_p (probe))
      {
	/* [over.over] enumerates the places where we can take the address
	   of an overloaded function, and this is not one of them.  */
	if (complain & tf_error)
	  error ("%s cannot resolve address of overloaded function",
		 implicit ? implicit : "void cast");
	else
	  return error_mark_node;
	expr = void_zero_node;
      }
    else if (implicit && probe == expr && is_overloaded_fn (probe))
      {
	/* Only warn when there is no &.  */
	if (complain & tf_warning)
	  warning (OPT_Waddress, "%s is a reference, not call, to function %qE",
		   implicit, expr);
	if (TREE_CODE (expr) == COMPONENT_REF)
	  expr = TREE_OPERAND (expr, 0);
      }
  }

  if (expr != error_mark_node && !VOID_TYPE_P (TREE_TYPE (expr)))
    {
      if (implicit
	  && warn_unused_value
	  && !TREE_NO_WARNING (expr)
	  && !processing_template_decl)
	{
	  /* The middle end does not warn about expressions that have
	     been explicitly cast to void, so we must do so here.  */
	  if (!TREE_SIDE_EFFECTS (expr)) {
            if (complain & tf_warning)
              warning (OPT_Wunused_value, "%s has no effect", implicit);
          }
	  else
	    {
	      tree e;
	      enum tree_code code;
	      enum tree_code_class tclass;

	      e = expr;
	      /* We might like to warn about (say) "(int) f()", as the
		 cast has no effect, but the compiler itself will
		 generate implicit conversions under some
		 circumstances.  (For example a block copy will be
		 turned into a call to "__builtin_memcpy", with a
		 conversion of the return value to an appropriate
		 type.)  So, to avoid false positives, we strip
		 conversions.  Do not use STRIP_NOPs because it will
		 not strip conversions to "void", as that is not a
		 mode-preserving conversion.  */
	      while (TREE_CODE (e) == NOP_EXPR)
		e = TREE_OPERAND (e, 0);

	      code = TREE_CODE (e);
	      tclass = TREE_CODE_CLASS (code);
	      if ((tclass == tcc_comparison
		   || tclass == tcc_unary
		   || (tclass == tcc_binary
		       && !(code == MODIFY_EXPR
			    || code == INIT_EXPR
			    || code == PREDECREMENT_EXPR
			    || code == PREINCREMENT_EXPR
			    || code == POSTDECREMENT_EXPR
			    || code == POSTINCREMENT_EXPR)))
                  && (complain & tf_warning))
		warning (OPT_Wunused_value, "value computed is not used");
	    }
	}
      expr = build1 (CONVERT_EXPR, void_type_node, expr);
    }
  if (! TREE_SIDE_EFFECTS (expr))
    expr = void_zero_node;
  return expr;
}
Ejemplo n.º 10
0
static struct c_expr
c_parser_postfix_expression (c_parser *parser)
{
  struct c_expr expr, e1, e2, e3;
  struct c_type_name *t1, *t2;
  switch (c_parser_peek_token (parser)->type)
    {
    case T_DIGITS:
    case CPP_NUMBER:
    case CPP_CHAR:
    case CPP_WCHAR:
      expr.value = c_parser_peek_token (parser)->value;
      expr.original_code = ERROR_MARK;
      c_parser_consume_token (parser);
      break;
    case T_STRING:
    case CPP_STRING:
    case CPP_WSTRING:
      expr.value = c_parser_peek_token (parser)->value;
      expr.original_code = STRING_CST;
      c_parser_consume_token (parser);
      break;
    case T_NAME:
    case CPP_NAME:
      {
	tree id = c_parser_peek_token (parser)->value;
	location_t loc = c_parser_peek_token (parser)->location;
	c_parser_consume_token (parser);
	expr.value = id;
	expr.original_code = ERROR_MARK;
      }
      break;
    case '(':
    case CPP_OPEN_PAREN:
      /* A parenthesized expression, statement expression or compound
	 literal.  */
	{
	  /* A parenthesized expression.  */
	  c_parser_consume_token (parser);
	  expr = c_parser_expression (parser);
#if 0
	  if (TREE_CODE (expr.value) == MODIFY_EXPR)
	    TREE_NO_WARNING (expr.value) = 1;
	  expr.original_code = ERROR_MARK;
#endif
	  c_parser_skip_until_found (parser, CPP_CLOSE_PAREN,
				     "expected %<)%>");
	}
      break;
    case CPP_KEYWORD:
      switch (c_parser_peek_token (parser)->keyword)
	{
	default:
#if 0
	  c_parser_error (parser, "expected expression");
	  expr.value = error_mark_node;
#endif
	  expr.original_code = ERROR_MARK;
	  break;
	}
      break;
      /* Else fall through to report error.  */
    default:
#if 0
      c_parser_error (parser, "expected expression");
      expr.value = error_mark_node;
#endif
      expr.original_code = ERROR_MARK;
      break;
    }
  return c_parser_postfix_expression_after_primary (parser, expr);
}
Ejemplo n.º 11
0
static unsigned int
copy_loop_headers (void)
{
  struct loops *loops;
  unsigned i;
  struct loop *loop;
  basic_block header;
  edge exit, entry;
  basic_block *bbs, *copied_bbs;
  unsigned n_bbs;
  unsigned bbs_size;

  loops = loop_optimizer_init (LOOPS_HAVE_PREHEADERS
			       | LOOPS_HAVE_SIMPLE_LATCHES);
  if (!loops)
    return 0;

#ifdef ENABLE_CHECKING
  verify_loop_structure (loops);
#endif

  bbs = XNEWVEC (basic_block, n_basic_blocks);
  copied_bbs = XNEWVEC (basic_block, n_basic_blocks);
  bbs_size = n_basic_blocks;

  for (i = 1; i < loops->num; i++)
    {
      /* Copy at most 20 insns.  */
      int limit = 20;

      loop = loops->parray[i];
      if (!loop)
	continue;
      header = loop->header;

      /* If the loop is already a do-while style one (either because it was
	 written as such, or because jump threading transformed it into one),
	 we might be in fact peeling the first iteration of the loop.  This
	 in general is not a good idea.  */
      if (do_while_loop_p (loop))
	continue;

      /* Iterate the header copying up to limit; this takes care of the cases
	 like while (a && b) {...}, where we want to have both of the conditions
	 copied.  TODO -- handle while (a || b) - like cases, by not requiring
	 the header to have just a single successor and copying up to
	 postdominator.  */

      exit = NULL;
      n_bbs = 0;
      while (should_duplicate_loop_header_p (header, loop, &limit))
	{
	  /* Find a successor of header that is inside a loop; i.e. the new
	     header after the condition is copied.  */
	  if (flow_bb_inside_loop_p (loop, EDGE_SUCC (header, 0)->dest))
	    exit = EDGE_SUCC (header, 0);
	  else
	    exit = EDGE_SUCC (header, 1);
	  bbs[n_bbs++] = header;
	  gcc_assert (bbs_size > n_bbs);
	  header = exit->dest;
	}

      if (!exit)
	continue;

      if (dump_file && (dump_flags & TDF_DETAILS))
	fprintf (dump_file,
		 "Duplicating header of the loop %d up to edge %d->%d.\n",
		 loop->num, exit->src->index, exit->dest->index);

      /* Ensure that the header will have just the latch as a predecessor
	 inside the loop.  */
      if (!single_pred_p (exit->dest))
	exit = single_pred_edge (loop_split_edge_with (exit, NULL));

      entry = loop_preheader_edge (loop);

      if (!tree_duplicate_sese_region (entry, exit, bbs, n_bbs, copied_bbs))
	{
	  fprintf (dump_file, "Duplication failed.\n");
	  continue;
	}

      /* If the loop has the form "for (i = j; i < j + 10; i++)" then
	 this copying can introduce a case where we rely on undefined
	 signed overflow to eliminate the preheader condition, because
	 we assume that "j < j + 10" is true.  We don't want to warn
	 about that case for -Wstrict-overflow, because in general we
	 don't warn about overflow involving loops.  Prevent the
	 warning by setting TREE_NO_WARNING.  */
      if (warn_strict_overflow > 0)
	{
	  unsigned int i;

	  for (i = 0; i < n_bbs; ++i)
	    {
	      tree last;

	      last = last_stmt (copied_bbs[i]);
	      if (TREE_CODE (last) == COND_EXPR)
		TREE_NO_WARNING (last) = 1;
	    }
	}

      /* Ensure that the latch and the preheader is simple (we know that they
	 are not now, since there was the loop exit condition.  */
      loop_split_edge_with (loop_preheader_edge (loop), NULL);
      loop_split_edge_with (loop_latch_edge (loop), NULL);
    }

  free (bbs);
  free (copied_bbs);

  loop_optimizer_finalize (loops);
  return 0;
}
Ejemplo n.º 12
0
Archivo: c-fold.c Proyecto: 0day-ci/gcc
static tree
c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
		       bool *maybe_const_itself, bool for_int_const)
{
  tree ret = expr;
  enum tree_code code = TREE_CODE (expr);
  enum tree_code_class kind = TREE_CODE_CLASS (code);
  location_t loc = EXPR_LOCATION (expr);
  tree op0, op1, op2, op3;
  tree orig_op0, orig_op1, orig_op2;
  bool op0_const = true, op1_const = true, op2_const = true;
  bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
  bool nowarning = TREE_NO_WARNING (expr);
  bool unused_p;
  source_range old_range;

  /* Constants, declarations, statements, errors, SAVE_EXPRs and
     anything else not counted as an expression cannot usefully be
     folded further at this point.  */
  if (!IS_EXPR_CODE_CLASS (kind)
      || kind == tcc_statement
      || code == SAVE_EXPR)
    return expr;

  if (IS_EXPR_CODE_CLASS (kind))
    old_range = EXPR_LOCATION_RANGE (expr);

  /* Operands of variable-length expressions (function calls) have
     already been folded, as have __builtin_* function calls, and such
     expressions cannot occur in constant expressions.  */
  if (kind == tcc_vl_exp)
    {
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;
    }

  if (code == C_MAYBE_CONST_EXPR)
    {
      tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
      tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
      if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
	*maybe_const_operands = false;
      if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
	{
	  *maybe_const_itself = false;
	  inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
					 maybe_const_itself, true);
	}
      if (pre && !in_init)
	ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
      else
	ret = inner;
      goto out;
    }

  /* Assignment, increment, decrement, function call and comma
     operators, and statement expressions, cannot occur in constant
     expressions if evaluated / outside of sizeof.  (Function calls
     were handled above, though VA_ARG_EXPR is treated like a function
     call here, and statement expressions are handled through
     C_MAYBE_CONST_EXPR to avoid folding inside them.)  */
  switch (code)
    {
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case COMPOUND_EXPR:
      *maybe_const_operands = false;
      break;

    case VA_ARG_EXPR:
    case TARGET_EXPR:
    case BIND_EXPR:
    case OBJ_TYPE_REF:
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;

    default:
      break;
    }

  /* Fold individual tree codes as appropriate.  */
  switch (code)
    {
    case COMPOUND_LITERAL_EXPR:
      /* Any non-constancy will have been marked in a containing
	 C_MAYBE_CONST_EXPR; there is no more folding to do here.  */
      goto out;

    case COMPONENT_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (op0 != orig_op0)
	ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      goto out;

    case ARRAY_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op3 = TREE_OPERAND (expr, 3);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);
      if (op0 != orig_op0 || op1 != orig_op1)
	ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      ret = fold (ret);
      goto out;

    case COMPOUND_EXPR:
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case PLUS_EXPR:
    case MINUS_EXPR:
    case MULT_EXPR:
    case POINTER_PLUS_EXPR:
    case TRUNC_DIV_EXPR:
    case CEIL_DIV_EXPR:
    case FLOOR_DIV_EXPR:
    case TRUNC_MOD_EXPR:
    case RDIV_EXPR:
    case EXACT_DIV_EXPR:
    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
    case BIT_IOR_EXPR:
    case BIT_XOR_EXPR:
    case BIT_AND_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case EQ_EXPR:
    case NE_EXPR:
    case COMPLEX_EXPR:
    case TRUTH_AND_EXPR:
    case TRUTH_OR_EXPR:
    case TRUTH_XOR_EXPR:
    case UNORDERED_EXPR:
    case ORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
      /* Binary operations evaluating both arguments (increment and
	 decrement are binary internally in GCC).  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != MODIFY_EXPR
	  && code != PREDECREMENT_EXPR
	  && code != PREINCREMENT_EXPR
	  && code != POSTDECREMENT_EXPR
	  && code != POSTINCREMENT_EXPR)
	op0 = decl_constant_value_for_optimization (op0);
      /* The RHS of a MODIFY_EXPR was fully folded when building that
	 expression for the sake of conversion warnings.  */
      if (code != MODIFY_EXPR)
	op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				     maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);

      if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
			    || TREE_CODE (op1) != INTEGER_CST))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      if (TREE_OVERFLOW_P (ret)
	  && !TREE_OVERFLOW_P (op0)
	  && !TREE_OVERFLOW_P (op1))
	overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret);
      if (code == LSHIFT_EXPR
	  && TREE_CODE (orig_op0) != INTEGER_CST
	  && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	  && TREE_CODE (op0) == INTEGER_CST
	  && c_inhibit_evaluation_warnings == 0
	  && tree_int_cst_sgn (op0) < 0)
	warning_at (loc, OPT_Wshift_negative_value,
		    "left shift of negative value");
      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
	  && c_inhibit_evaluation_warnings == 0)
	{
	  if (tree_int_cst_sgn (op1) < 0)
	    warning_at (loc, OPT_Wshift_count_negative,
			(code == LSHIFT_EXPR
			 ? G_("left shift count is negative")
			 : G_("right shift count is negative")));
	  else if (compare_tree_int (op1,
				     TYPE_PRECISION (TREE_TYPE (orig_op0)))
		   >= 0)
	    warning_at (loc, OPT_Wshift_count_overflow,
			(code == LSHIFT_EXPR
			 ? G_("left shift count >= width of type")
			 : G_("right shift count >= width of type")));
	}
      if (code == LSHIFT_EXPR
	  /* If either OP0 has been folded to INTEGER_CST...  */
	  && ((TREE_CODE (orig_op0) != INTEGER_CST
	       && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	       && TREE_CODE (op0) == INTEGER_CST)
	      /* ...or if OP1 has been folded to INTEGER_CST...  */
	      || (TREE_CODE (orig_op1) != INTEGER_CST
		  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
		  && TREE_CODE (op1) == INTEGER_CST))
	  && c_inhibit_evaluation_warnings == 0)
	/* ...then maybe we can detect an overflow.  */
	maybe_warn_shift_overflow (loc, op0, op1);
      if ((code == TRUNC_DIV_EXPR
	   || code == CEIL_DIV_EXPR
	   || code == FLOOR_DIV_EXPR
	   || code == EXACT_DIV_EXPR
	   || code == TRUNC_MOD_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
	warn_for_div_by_zero (loc, op1);
      goto out;

    case INDIRECT_REF:
    case FIX_TRUNC_EXPR:
    case FLOAT_EXPR:
    CASE_CONVERT:
    case ADDR_SPACE_CONVERT_EXPR:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
    case NEGATE_EXPR:
    case BIT_NOT_EXPR:
    case TRUTH_NOT_EXPR:
    case ADDR_EXPR:
    case CONJ_EXPR:
    case REALPART_EXPR:
    case IMAGPART_EXPR:
      /* Unary operations.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR)
	op0 = decl_constant_value_for_optimization (op0);

      if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
	goto out;

      /* ??? Cope with user tricks that amount to offsetof.  The middle-end is
	 not prepared to deal with them if they occur in initializers.  */
      if (op0 != orig_op0
	  && code == ADDR_EXPR
	  && (op1 = get_base_address (op0)) != NULL_TREE
	  && INDIRECT_REF_P (op1)
	  && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
	ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
      else if (op0 != orig_op0 || in_init)
	ret = in_init
	  ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
	  : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
      else
	ret = fold (expr);
      if (code == INDIRECT_REF
	  && ret != expr
	  && INDIRECT_REF_P (ret))
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      switch (code)
	{
	case FIX_TRUNC_EXPR:
	case FLOAT_EXPR:
	CASE_CONVERT:
	  /* Don't warn about explicit conversions.  We will already
	     have warned about suspect implicit conversions.  */
	  break;

	default:
	  if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
	    overflow_warning (EXPR_LOCATION (expr), ret);
	  break;
	}
      goto out;

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
      /* Binary operations not necessarily evaluating both
	 arguments.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op0);

      unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
			  ? truthvalue_false_node
			  : truthvalue_true_node));
      c_disable_warnings (unused_p);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (unused_p);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Require OP1 be an INTEGER_CST only if it's evaluated.  */
	      || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_itself &= op1_const_self;
      goto out;

    case COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);

      STRIP_TYPE_NOPS (op0);
      c_disable_warnings (op0 == truthvalue_false_node);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (op0 == truthvalue_false_node);

      c_disable_warnings (op0 == truthvalue_true_node);
      op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op2);
      c_enable_warnings (op0 == truthvalue_true_node);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Only the evaluated operand must be an INTEGER_CST.  */
	      || (op0 == truthvalue_true_node
		  ? TREE_CODE (op1) != INTEGER_CST
		  : TREE_CODE (op2) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_itself &= op1_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_operands &= op2_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_itself &= op2_const_self;
      goto out;

    case VEC_COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op2);

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      goto out;

    case EXCESS_PRECISION_EXPR:
      /* Each case where an operand with excess precision may be
	 encountered must remove the EXCESS_PRECISION_EXPR around
	 inner operands and possibly put one around the whole
	 expression or possibly convert to the semantic type (which
	 c_fully_fold does); we cannot tell at this stage which is
	 appropriate in any particular case.  */
      gcc_unreachable ();

    default:
      /* Various codes may appear through folding built-in functions
	 and their arguments.  */
      goto out;
    }

 out:
  /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
     have been done by this point, so remove them again.  */
  nowarning |= TREE_NO_WARNING (ret);
  STRIP_TYPE_NOPS (ret);
  if (nowarning && !TREE_NO_WARNING (ret))
    {
      if (!CAN_HAVE_LOCATION_P (ret))
	ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
      TREE_NO_WARNING (ret) = 1;
    }
  if (ret != expr)
    {
      protected_set_expr_location (ret, loc);
      if (IS_EXPR_CODE_CLASS (kind))
	set_source_range (ret, old_range.m_start, old_range.m_finish);
    }
  return ret;
}
Ejemplo n.º 13
0
tree
convert_to_void (tree expr, impl_conv_void implicit, tsubst_flags_t complain)
{
  if (expr == error_mark_node
      || TREE_TYPE (expr) == error_mark_node)
    return error_mark_node;

  if (implicit == ICV_CAST)
    mark_exp_read (expr);
  else
    {
      tree exprv = expr;

      while (TREE_CODE (exprv) == COMPOUND_EXPR)
	exprv = TREE_OPERAND (exprv, 1);
      if (DECL_P (exprv)
	  || handled_component_p (exprv)
	  || TREE_CODE (exprv) == INDIRECT_REF)
	/* Expr is not being 'used' here, otherwise we whould have
	   called mark_{rl}value_use use here, which would have in turn
	   called mark_exp_read.  Rather, we call mark_exp_read directly
	   to avoid some warnings when
	   -Wunused-but-set-{variable,parameter} is in effect.  */
	mark_exp_read (exprv);
    }

  if (!TREE_TYPE (expr))
    return expr;
  if (invalid_nonstatic_memfn_p (expr, complain))
    return error_mark_node;
  if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
    {
      if (complain & tf_error)
        error ("pseudo-destructor is not called");
      return error_mark_node;
    }
  if (VOID_TYPE_P (TREE_TYPE (expr)))
    return expr;
  switch (TREE_CODE (expr))
    {
    case COND_EXPR:
      {
	/* The two parts of a cond expr might be separate lvalues.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree op2 = TREE_OPERAND (expr,2);
	bool side_effects = TREE_SIDE_EFFECTS (op1) || TREE_SIDE_EFFECTS (op2);
	tree new_op1, new_op2;
	if (implicit != ICV_CAST && !side_effects)
	  {
	    new_op1 = convert_to_void (op1, ICV_SECOND_OF_COND, complain);
	    new_op2 = convert_to_void (op2, ICV_THIRD_OF_COND, complain);
	  }
	else
	  {
	    new_op1 = convert_to_void (op1, ICV_CAST, complain);
	    new_op2 = convert_to_void (op2, ICV_CAST, complain);
	  }

	expr = build3 (COND_EXPR, TREE_TYPE (new_op1),
		       TREE_OPERAND (expr, 0), new_op1, new_op2);
	break;
      }

    case COMPOUND_EXPR:
      {
	/* The second part of a compound expr contains the value.  */
	tree op1 = TREE_OPERAND (expr,1);
	tree new_op1;
	if (implicit != ICV_CAST && !TREE_NO_WARNING (expr))
	  new_op1 = convert_to_void (op1, ICV_RIGHT_OF_COMMA, complain);
	else
	  new_op1 = convert_to_void (op1, ICV_CAST, complain);

	if (new_op1 != op1)
	  {
	    tree t = build2 (COMPOUND_EXPR, TREE_TYPE (new_op1),
			     TREE_OPERAND (expr, 0), new_op1);
	    expr = t;
	  }

	break;
      }

    case NON_LVALUE_EXPR:
    case NOP_EXPR:
      /* These have already decayed to rvalue.  */
      break;

    case CALL_EXPR:   /* We have a special meaning for volatile void fn().  */
      break;

    case INDIRECT_REF:
      {
	tree type = TREE_TYPE (expr);
	int is_reference = TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0)))
			   == REFERENCE_TYPE;
	int is_volatile = TYPE_VOLATILE (type);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	/* Can't load the value if we don't know the type.  */
	if (is_volatile && !is_complete)
          {
            if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of incomplete type %qT", type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in second operand "
				"of conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in third operand "
				"of conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in right operand of "
				"comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in left operand of "
				"comma operator", type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in statement", type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "indirection will not access object of "
				"incomplete type %qT in for increment "
				"expression", type);
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	/* Don't load the value if this is an implicit dereference, or if
	   the type needs to be handled by ctors/dtors.  */
	else if (is_volatile && is_reference)
          {
            if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of type %qT", type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "implicit dereference will not access object "
				"of type %qT in second operand of "
				"conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "implicit dereference will not access object "
		  	      	"of type %qT in third operand of "
				"conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in right operand of "
				"comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in left operand of comma operator",
			     type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "implicit dereference will not access object "
		     		"of type %qT in statement",  type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "implicit dereference will not access object "
		    		"of type %qT in for increment expression",
			     type);
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	else if (is_volatile && TREE_ADDRESSABLE (type))
	  {
	    if (complain & tf_warning)
	      switch (implicit)
		{
	      	  case ICV_CAST:
		    warning (0, "conversion to void will not access "
				"object of non-trivially-copyable type %qT",
			     type);
		    break;
		  case ICV_SECOND_OF_COND:
		    warning (0, "indirection will not access object of "
				"non-trivially-copyable type %qT in second "
				"operand of conditional expression", type);
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (0, "indirection will not access object of "
		  	      	"non-trivially-copyable type %qT in third "
				"operand of conditional expression", type);
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in right "
				"operand of comma operator", type);
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in left "
				"operand of comma operator", type);
		    break;
		  case ICV_STATEMENT:
		    warning (0, "indirection will not access object of "
		     		"non-trivially-copyable type %qT in statement",
			      type);
		     break;
		  case ICV_THIRD_IN_FOR:
		    warning (0, "indirection will not access object of "
		    		"non-trivially-copyable type %qT in for "
				"increment expression", type);
		    break;
		  default:
		    gcc_unreachable ();
		}
	  }
	if (is_reference || !is_volatile || !is_complete || TREE_ADDRESSABLE (type))
          {
            /* Emit a warning (if enabled) when the "effect-less" INDIRECT_REF
               operation is stripped off. Note that we don't warn about
               - an expression with TREE_NO_WARNING set. (For an example of
                 such expressions, see build_over_call in call.c.)
               - automatic dereferencing of references, since the user cannot
                 control it. (See also warn_if_unused_value() in stmt.c.)  */
            if (warn_unused_value
		&& implicit != ICV_CAST
                && (complain & tf_warning)
                && !TREE_NO_WARNING (expr)
                && !is_reference)
              warning (OPT_Wunused_value, "value computed is not used");
            expr = TREE_OPERAND (expr, 0);
          }

	break;
      }

    case VAR_DECL:
      {
	/* External variables might be incomplete.  */
	tree type = TREE_TYPE (expr);
	int is_complete = COMPLETE_TYPE_P (complete_type (type));

	if (TYPE_VOLATILE (type) && !is_complete && (complain & tf_warning))
	  switch (implicit)
	    {
	      case ICV_CAST:
		warning (0, "conversion to void will not access "
			    "object %qE of incomplete type %qT", expr, type);
		break;
	      case ICV_SECOND_OF_COND:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in second operand of "
			    "conditional expression", expr, type);
		break;
	      case ICV_THIRD_OF_COND:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in third operand of "
			    "conditional expression", expr, type);
		break;
	      case ICV_RIGHT_OF_COMMA:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in right operand of comma operator",
			 expr, type);
		break;
	      case ICV_LEFT_OF_COMMA:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in left operand of comma operator",
			 expr, type);
		break;
	      case ICV_STATEMENT:
	        warning (0, "variable %qE of incomplete type %qT will not "
		            "be accessed in statement", expr, type);
		break;
	      case ICV_THIRD_IN_FOR:
	        warning (0, "variable %qE of incomplete type %qT will not "
			    "be accessed in for increment expression",
		         expr, type);
		break;
	      default:
	        gcc_unreachable ();
	    }

	break;
      }

    case TARGET_EXPR:
      /* Don't bother with the temporary object returned from a function if
	 we don't use it and don't need to destroy it.  We'll still
	 allocate space for it in expand_call or declare_return_variable,
	 but we don't need to track it through all the tree phases.  */
      if (TARGET_EXPR_IMPLICIT_P (expr)
	  && TYPE_HAS_TRIVIAL_DESTRUCTOR (TREE_TYPE (expr)))
	{
	  tree init = TARGET_EXPR_INITIAL (expr);
	  if (TREE_CODE (init) == AGGR_INIT_EXPR
	      && !AGGR_INIT_VIA_CTOR_P (init))
	    {
	      tree fn = AGGR_INIT_EXPR_FN (init);
	      expr = build_call_array_loc (input_location,
					   TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
					   fn,
					   aggr_init_expr_nargs (init),
					   AGGR_INIT_EXPR_ARGP (init));
	    }
	}
      break;

    default:;
    }
  expr = resolve_nondeduced_context (expr);
  {
    tree probe = expr;

    if (TREE_CODE (probe) == ADDR_EXPR)
      probe = TREE_OPERAND (expr, 0);
    if (type_unknown_p (probe))
      {
	/* [over.over] enumerates the places where we can take the address
	   of an overloaded function, and this is not one of them.  */
	if (complain & tf_error)
	  switch (implicit)
	    {
	      case ICV_CAST:
		error ("conversion to void "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_SECOND_OF_COND:
		error ("second operand of conditional expression "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_THIRD_OF_COND:
		error ("third operand of conditional expression "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_RIGHT_OF_COMMA:
		error ("right operand of comma operator "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_LEFT_OF_COMMA:
		error ("left operand of comma operator "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_STATEMENT:
		error ("statement "
		       "cannot resolve address of overloaded function");
		break;
	      case ICV_THIRD_IN_FOR:
		error ("for increment expression "
		       "cannot resolve address of overloaded function");
		break;
	    }
	else
	  return error_mark_node;
	expr = void_zero_node;
      }
    else if (implicit != ICV_CAST && probe == expr && is_overloaded_fn (probe))
      {
	/* Only warn when there is no &.  */
	if (complain & tf_warning)
	  switch (implicit)
	    {
	      case ICV_SECOND_OF_COND:
	        warning (OPT_Waddress,
			 "second operand of conditional expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_THIRD_OF_COND:
	        warning (OPT_Waddress,
			 "third operand of conditional expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_RIGHT_OF_COMMA:
	        warning (OPT_Waddress,
			 "right operand of comma operator "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_LEFT_OF_COMMA:
	        warning (OPT_Waddress,
			 "left operand of comma operator "
			 "is a reference, not call, to function %qE", expr);
		break;
	      case ICV_STATEMENT:
	        warning (OPT_Waddress,
			 "statement is a reference, not call, to function %qE",
			 expr);
		break;
	      case ICV_THIRD_IN_FOR:
	        warning (OPT_Waddress,
			 "for increment expression "
			 "is a reference, not call, to function %qE", expr);
		break;
	      default:
	        gcc_unreachable ();
	    }

	if (TREE_CODE (expr) == COMPONENT_REF)
	  expr = TREE_OPERAND (expr, 0);
      }
  }

  if (expr != error_mark_node && !VOID_TYPE_P (TREE_TYPE (expr)))
    {
      if (implicit != ICV_CAST
	  && warn_unused_value
	  && !TREE_NO_WARNING (expr)
	  && !processing_template_decl)
	{
	  /* The middle end does not warn about expressions that have
	     been explicitly cast to void, so we must do so here.  */
	  if (!TREE_SIDE_EFFECTS (expr)) {
            if (complain & tf_warning)
	      switch (implicit)
		{
		  case ICV_SECOND_OF_COND:
		    warning (OPT_Wunused_value,
			     "second operand of conditional expression has no effect");
		    break;
		  case ICV_THIRD_OF_COND:
		    warning (OPT_Wunused_value,
		    	     "third operand of conditional expression has no effect");
		    break;
		  case ICV_RIGHT_OF_COMMA:
		    warning (OPT_Wunused_value,
		    	     "right operand of comma operator has no effect");
		    break;
		  case ICV_LEFT_OF_COMMA:
		    warning (OPT_Wunused_value,
		    	     "left operand of comma operator has no effect");
		    break;
		  case ICV_STATEMENT:
		    warning (OPT_Wunused_value,
		    	     "statement has no effect");
		    break;
		  case ICV_THIRD_IN_FOR:
		    warning (OPT_Wunused_value,
		    	     "for increment expression has no effect");
		    break;
		  default:
		    gcc_unreachable ();
		}
          }
	  else
	    {
	      tree e;
	      enum tree_code code;
	      enum tree_code_class tclass;

	      e = expr;
	      /* We might like to warn about (say) "(int) f()", as the
		 cast has no effect, but the compiler itself will
		 generate implicit conversions under some
		 circumstances.  (For example a block copy will be
		 turned into a call to "__builtin_memcpy", with a
		 conversion of the return value to an appropriate
		 type.)  So, to avoid false positives, we strip
		 conversions.  Do not use STRIP_NOPs because it will
		 not strip conversions to "void", as that is not a
		 mode-preserving conversion.  */
	      while (TREE_CODE (e) == NOP_EXPR)
		e = TREE_OPERAND (e, 0);

	      code = TREE_CODE (e);
	      tclass = TREE_CODE_CLASS (code);
	      if ((tclass == tcc_comparison
		   || tclass == tcc_unary
		   || (tclass == tcc_binary
		       && !(code == MODIFY_EXPR
			    || code == INIT_EXPR
			    || code == PREDECREMENT_EXPR
			    || code == PREINCREMENT_EXPR
			    || code == POSTDECREMENT_EXPR
			    || code == POSTINCREMENT_EXPR)))
                  && (complain & tf_warning))
		warning (OPT_Wunused_value, "value computed is not used");
	    }
	}
      expr = build1 (CONVERT_EXPR, void_type_node, expr);
    }
  if (! TREE_SIDE_EFFECTS (expr))
    expr = void_zero_node;
  return expr;
}