Esempio n. 1
0
static void
pack_ts_function_decl_value_fields (struct bitpack_d *bp, tree expr)
{
  /* For normal/md builtins we only write the class and code, so they
     should never be handled here.  */
  gcc_assert (!streamer_handle_as_builtin_p (expr));

  bp_pack_enum (bp, built_in_class, BUILT_IN_LAST,
		DECL_BUILT_IN_CLASS (expr));
  bp_pack_value (bp, DECL_STATIC_CONSTRUCTOR (expr), 1);
  bp_pack_value (bp, DECL_STATIC_DESTRUCTOR (expr), 1);
  bp_pack_value (bp, DECL_UNINLINABLE (expr), 1);
  bp_pack_value (bp, DECL_POSSIBLY_INLINED (expr), 1);
  bp_pack_value (bp, DECL_IS_NOVOPS (expr), 1);
  bp_pack_value (bp, DECL_IS_RETURNS_TWICE (expr), 1);
  bp_pack_value (bp, DECL_IS_MALLOC (expr), 1);
  bp_pack_value (bp, DECL_IS_OPERATOR_NEW (expr), 1);
  bp_pack_value (bp, DECL_DECLARED_INLINE_P (expr), 1);
  bp_pack_value (bp, DECL_STATIC_CHAIN (expr), 1);
  bp_pack_value (bp, DECL_NO_INLINE_WARNING_P (expr), 1);
  bp_pack_value (bp, DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (expr), 1);
  bp_pack_value (bp, DECL_NO_LIMIT_STACK (expr), 1);
  bp_pack_value (bp, DECL_DISREGARD_INLINE_LIMITS (expr), 1);
  bp_pack_value (bp, DECL_PURE_P (expr), 1);
  bp_pack_value (bp, DECL_LOOPING_CONST_OR_PURE_P (expr), 1);
  if (DECL_BUILT_IN_CLASS (expr) != NOT_BUILT_IN)
    bp_pack_value (bp, DECL_FUNCTION_CODE (expr), 11);
  if (DECL_STATIC_DESTRUCTOR (expr))
    bp_pack_var_len_unsigned (bp, DECL_FINI_PRIORITY (expr));
}
Esempio n. 2
0
static bool
ipcp_versionable_function_p (struct cgraph_node *node)
{
  struct cgraph_edge *edge;

  /* There are a number of generic reasons functions cannot be versioned.  We
     also cannot remove parameters if there are type attributes such as fnspec
     present.  */
  if (!node->local.versionable
      || TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
    return false;

  /* Removing arguments doesn't work if the function takes varargs
     or use __builtin_apply_args. */
  for (edge = node->callees; edge; edge = edge->next_callee)
    {
      tree t = edge->callee->decl;
      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
	  && (DECL_FUNCTION_CODE (t) == BUILT_IN_APPLY_ARGS
	     || DECL_FUNCTION_CODE (t) == BUILT_IN_VA_START))
	return false;
    }

  return true;
}
Esempio n. 3
0
static tree
pass_through_call (tree call)
{
  tree callee = get_callee_fndecl (call);
  tree arglist = TREE_OPERAND (call, 1);

  if (callee
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_MEMCPY:
      case BUILT_IN_MEMMOVE:
      case BUILT_IN_MEMSET:
      case BUILT_IN_STRCPY:
      case BUILT_IN_STRNCPY:
      case BUILT_IN_STRCAT:
      case BUILT_IN_STRNCAT:
      case BUILT_IN_MEMCPY_CHK:
      case BUILT_IN_MEMMOVE_CHK:
      case BUILT_IN_MEMSET_CHK:
      case BUILT_IN_STRCPY_CHK:
      case BUILT_IN_STRNCPY_CHK:
      case BUILT_IN_STRCAT_CHK:
      case BUILT_IN_STRNCAT_CHK:
	if (arglist)
	  return TREE_VALUE (arglist);
	break;
      default:
	break;
      }

  return NULL_TREE;
}
Esempio n. 4
0
enum built_in_function
is_cilkplus_reduce_builtin (tree fndecl)
{
  if (!fndecl)
    return BUILT_IN_NONE;
  if (TREE_CODE (fndecl) == ADDR_EXPR)
    fndecl = TREE_OPERAND (fndecl, 0);

  if (TREE_CODE (fndecl) == FUNCTION_DECL
      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (fndecl))
      {
      case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      case BUILT_IN_CILKPLUS_SEC_REDUCE:
      case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
	return DECL_FUNCTION_CODE (fndecl);
      default:
	break;
      }

  return BUILT_IN_NONE;
}
Esempio n. 5
0
/* Recognize special cases of builtins that are by themselves not pure or const
   but function using them is.  */
static bool
special_builtin_state (enum pure_const_state_e *state, bool *looping,
			tree callee)
{
  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
	case BUILT_IN_RETURN:
	case BUILT_IN_UNREACHABLE:
	case BUILT_IN_ALLOCA:
	case BUILT_IN_ALLOCA_WITH_ALIGN:
	case BUILT_IN_STACK_SAVE:
	case BUILT_IN_STACK_RESTORE:
	case BUILT_IN_EH_POINTER:
	case BUILT_IN_EH_FILTER:
	case BUILT_IN_UNWIND_RESUME:
	case BUILT_IN_CXA_END_CLEANUP:
	case BUILT_IN_EH_COPY_VALUES:
	case BUILT_IN_FRAME_ADDRESS:
	case BUILT_IN_APPLY:
	case BUILT_IN_APPLY_ARGS:
	  *looping = false;
	  *state = IPA_CONST;
	  return true;
	case BUILT_IN_PREFETCH:
	  *looping = true;
	  *state = IPA_CONST;
	  return true;
      }
  return false;
}
Esempio n. 6
0
static tree
pass_through_call (const_gimple call)
{
  tree callee = gimple_call_fndecl (call);

  if (callee
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_MEMCPY:
      case BUILT_IN_MEMMOVE:
      case BUILT_IN_MEMSET:
      case BUILT_IN_STRCPY:
      case BUILT_IN_STRNCPY:
      case BUILT_IN_STRCAT:
      case BUILT_IN_STRNCAT:
      case BUILT_IN_MEMCPY_CHK:
      case BUILT_IN_MEMMOVE_CHK:
      case BUILT_IN_MEMSET_CHK:
      case BUILT_IN_STRCPY_CHK:
      case BUILT_IN_STRNCPY_CHK:
      case BUILT_IN_STPNCPY_CHK:
      case BUILT_IN_STRCAT_CHK:
      case BUILT_IN_STRNCAT_CHK:
      case BUILT_IN_ASSUME_ALIGNED:
	if (gimple_call_num_args (call) >= 1)
	  return gimple_call_arg (call, 0);
	break;
      default:
	break;
      }

  return NULL_TREE;
}
Esempio n. 7
0
static unsigned
stmt_cost (tree stmt)
{
  tree rhs;
  unsigned cost = 1;

  /* Always try to create possibilities for unswitching.  */
  if (TREE_CODE (stmt) == COND_EXPR)
    return LIM_EXPENSIVE;

  rhs = GENERIC_TREE_OPERAND (stmt, 1);

  /* Hoisting memory references out should almost surely be a win.  */
  if (stmt_references_memory_p (stmt))
    cost += 20;

  switch (TREE_CODE (rhs))
    {
    case CALL_EXPR:
      /* We should be hoisting calls if possible.  */

      /* Unless the call is a builtin_constant_p; this always folds to a
	 constant, so moving it is useless.  */
      rhs = get_callee_fndecl (rhs);
      if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
	  && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
	return 0;

      cost += 20;
      break;

    case MULT_EXPR:
    case TRUNC_DIV_EXPR:
    case CEIL_DIV_EXPR:
    case FLOOR_DIV_EXPR:
    case ROUND_DIV_EXPR:
    case EXACT_DIV_EXPR:
    case CEIL_MOD_EXPR:
    case FLOOR_MOD_EXPR:
    case ROUND_MOD_EXPR:
    case TRUNC_MOD_EXPR:
    case RDIV_EXPR:
      /* Division and multiplication are usually expensive.  */
      cost += 20;
      break;

    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
      cost += 20;
      break;

    default:
      break;
    }

  return cost;
}
Esempio n. 8
0
static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple call, int object_size_type)
{
  tree callee, bytes = NULL_TREE;
  tree alloc_size;
  int arg1 = -1, arg2 = -1;

  gcc_assert (is_gimple_call (call));

  callee = gimple_call_fndecl (call);
  if (!callee)
    return unknown[object_size_type];

  alloc_size = lookup_attribute ("alloc_size",
				 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
  if (alloc_size && TREE_VALUE (alloc_size))
    {
      tree p = TREE_VALUE (alloc_size);

      arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
      if (TREE_CHAIN (p))
        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
    }

  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_CALLOC:
	arg2 = 1;
	/* fall through */
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
      case BUILT_IN_ALLOCA_WITH_ALIGN:
	arg1 = 0;
      default:
	break;
      }

  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
      || (arg2 >= 0
	  && (arg2 >= (int)gimple_call_num_args (call)
	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
    return unknown[object_size_type];

  if (arg2 >= 0)
    bytes = size_binop (MULT_EXPR,
	fold_convert (sizetype, gimple_call_arg (call, arg1)),
	fold_convert (sizetype, gimple_call_arg (call, arg2)));
  else if (arg1 >= 0)
    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));

  if (bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
static void
unpack_ts_function_decl_value_fields (struct bitpack_d *bp, tree expr)
{
  DECL_BUILT_IN_CLASS (expr) = bp_unpack_enum (bp, built_in_class,
					       BUILT_IN_LAST);
  DECL_STATIC_CONSTRUCTOR (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_STATIC_DESTRUCTOR (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_UNINLINABLE (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_POSSIBLY_INLINED (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_IS_NOVOPS (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_IS_RETURNS_TWICE (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_IS_MALLOC (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_IS_OPERATOR_NEW (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_DECLARED_INLINE_P (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_STATIC_CHAIN (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_NO_INLINE_WARNING_P (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (expr)
    			= (unsigned) bp_unpack_value (bp, 1);
  DECL_NO_LIMIT_STACK (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_DISREGARD_INLINE_LIMITS (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_PURE_P (expr) = (unsigned) bp_unpack_value (bp, 1);
  DECL_LOOPING_CONST_OR_PURE_P (expr) = (unsigned) bp_unpack_value (bp, 1);
  if (DECL_BUILT_IN_CLASS (expr) != NOT_BUILT_IN)
    {
      DECL_FUNCTION_CODE (expr) = (enum built_in_function) bp_unpack_value (bp,
	                                                                    11);
      if (DECL_BUILT_IN_CLASS (expr) == BUILT_IN_NORMAL
	  && DECL_FUNCTION_CODE (expr) >= END_BUILTINS)
	fatal_error ("machine independent builtin code out of range");
      else if (DECL_BUILT_IN_CLASS (expr) == BUILT_IN_MD)
	{
          tree result = targetm.builtin_decl (DECL_FUNCTION_CODE (expr), true);
	  if (!result || result == error_mark_node)
	    fatal_error ("target specific builtin not available");
	}
    }
  if (DECL_STATIC_DESTRUCTOR (expr))
    {
      priority_type p;
      p = (priority_type) bp_unpack_var_len_unsigned (bp);
      SET_DECL_FINI_PRIORITY (expr, p);
    }
}
bool
is_sec_implicit_index_fn (tree fndecl)
{
  if (TREE_CODE (fndecl) == ADDR_EXPR)
    fndecl = TREE_OPERAND (fndecl, 0);

  return
    (TREE_CODE (fndecl) == FUNCTION_DECL
     && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
     && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CILKPLUS_SEC_IMPLICIT_INDEX);
}
Esempio n. 11
0
static unsigned HOST_WIDE_INT
alloc_object_size (tree call, int object_size_type)
{
  tree callee, arglist, a, bytes = NULL_TREE;
  unsigned int arg_mask = 0;

  gcc_assert (TREE_CODE (call) == CALL_EXPR);

  callee = get_callee_fndecl (call);
  arglist = TREE_OPERAND (call, 1);
  if (callee
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
	arg_mask = 1;
	break;
      /*
      case BUILT_IN_REALLOC:
	arg_mask = 2;
	break;
	*/
      case BUILT_IN_CALLOC:
	arg_mask = 3;
	break;
      default:
	break;
      }

  for (a = arglist; arg_mask && a; arg_mask >>= 1, a = TREE_CHAIN (a))
    if (arg_mask & 1)
      {
	tree arg = TREE_VALUE (a);

	if (TREE_CODE (arg) != INTEGER_CST)
	  break;

	if (! bytes)
	  bytes = fold_convert (sizetype, arg);
	else
	  bytes = size_binop (MULT_EXPR, bytes,
			      fold_convert (sizetype, arg));
      }

  if (! arg_mask && bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Esempio n. 12
0
static bool
is_call_dce_candidate (gimple call)
{
  tree fn;
  enum built_in_function fnc;

  /* Only potentially dead calls are considered.  */
  if (gimple_call_lhs (call))
    return false;

  fn = gimple_call_fndecl (call);
  if (!fn
      || !DECL_BUILT_IN (fn)
      || (DECL_BUILT_IN_CLASS (fn) != BUILT_IN_NORMAL))
    return false;

  fnc = DECL_FUNCTION_CODE (fn);
  switch (fnc)
    {
    /* Trig functions.  */
    CASE_FLT_FN (BUILT_IN_ACOS):
    CASE_FLT_FN (BUILT_IN_ASIN):
    /* Hyperbolic functions.  */
    CASE_FLT_FN (BUILT_IN_ACOSH):
    CASE_FLT_FN (BUILT_IN_ATANH):
    CASE_FLT_FN (BUILT_IN_COSH):
    CASE_FLT_FN (BUILT_IN_SINH):
    /* Log functions.  */
    CASE_FLT_FN (BUILT_IN_LOG):
    CASE_FLT_FN (BUILT_IN_LOG2):
    CASE_FLT_FN (BUILT_IN_LOG10):
    CASE_FLT_FN (BUILT_IN_LOG1P):
    /* Exp functions.  */
    CASE_FLT_FN (BUILT_IN_EXP):
    CASE_FLT_FN (BUILT_IN_EXP2):
    CASE_FLT_FN (BUILT_IN_EXP10):
    CASE_FLT_FN (BUILT_IN_EXPM1):
    CASE_FLT_FN (BUILT_IN_POW10):
    /* Sqrt.  */
    CASE_FLT_FN (BUILT_IN_SQRT):
      return check_builtin_call (call);
    /* Special one: two argument pow.  */
    case BUILT_IN_POW:
      return check_pow (call);
    default:
      break;
    }

  return false;
}
Esempio n. 13
0
static void
pack_ts_function_decl_value_fields (struct bitpack_d *bp, tree expr)
{
  bp_pack_enum (bp, built_in_class, BUILT_IN_LAST,
		DECL_BUILT_IN_CLASS (expr));
  bp_pack_value (bp, DECL_STATIC_CONSTRUCTOR (expr), 1);
  bp_pack_value (bp, DECL_STATIC_DESTRUCTOR (expr), 1);
  bp_pack_value (bp, DECL_UNINLINABLE (expr), 1);
  bp_pack_value (bp, DECL_POSSIBLY_INLINED (expr), 1);
  bp_pack_value (bp, DECL_IS_NOVOPS (expr), 1);
  bp_pack_value (bp, DECL_IS_RETURNS_TWICE (expr), 1);
  bp_pack_value (bp, DECL_IS_MALLOC (expr), 1);
  bp_pack_value (bp, DECL_IS_OPERATOR_NEW (expr), 1);
  bp_pack_value (bp, DECL_DECLARED_INLINE_P (expr), 1);
  bp_pack_value (bp, DECL_STATIC_CHAIN (expr), 1);
  bp_pack_value (bp, DECL_NO_INLINE_WARNING_P (expr), 1);
  bp_pack_value (bp, DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (expr), 1);
  bp_pack_value (bp, DECL_NO_LIMIT_STACK (expr), 1);
  bp_pack_value (bp, DECL_DISREGARD_INLINE_LIMITS (expr), 1);
  bp_pack_value (bp, DECL_PURE_P (expr), 1);
  bp_pack_value (bp, DECL_LOOPING_CONST_OR_PURE_P (expr), 1);
  if (DECL_BUILT_IN_CLASS (expr) != NOT_BUILT_IN)
    bp_pack_value (bp, DECL_FUNCTION_CODE (expr), 12);
}
Esempio n. 14
0
static tree
install_builtin (const char *name, tree fntype, enum built_in_function code,
                 bool publish)
{
  tree fndecl = build_fn_decl (name, fntype);
  DECL_BUILT_IN_CLASS (fndecl) = BUILT_IN_NORMAL;
  DECL_FUNCTION_CODE (fndecl) = code;
  if (publish)
    {
      tree t = lang_hooks.decls.pushdecl (fndecl);
      if (t)
        fndecl = t;
    }
  set_builtin_decl (code, fndecl, true);
  return fndecl;
}
Esempio n. 15
0
/* Define a single builtin.  */
static void
define_builtin (enum built_in_function val,
		const char *name,
		tree type,
		const char *libname,
		int flags)
{
  tree decl;

  decl = build_decl (BUILTINS_LOCATION,
		     FUNCTION_DECL, get_identifier (name), type);
  DECL_EXTERNAL (decl) = 1;
  TREE_PUBLIC (decl) = 1;
  SET_DECL_ASSEMBLER_NAME (decl, get_identifier (libname));
  pushdecl (decl);
  DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
  DECL_FUNCTION_CODE (decl) = val;
  set_call_expr_flags (decl, flags);

  set_builtin_decl (val, decl, true);
}
unsigned int get_decl_hash(const_tree decl, const char *decl_name)
{
	struct decl_hash decl_hash_data;
	enum tree_code code = TREE_CODE(decl);

	gcc_assert(code == FIELD_DECL || code == FUNCTION_DECL || code == VAR_DECL);

	// skip builtins __builtin_constant_p
	if (code == FUNCTION_DECL && (DECL_BUILT_IN(decl) || DECL_BUILT_IN_CLASS(decl) == BUILT_IN_NORMAL))
		return NO_HASH;

	decl_hash_data.fn_name = decl_name;
	decl_hash_data.decl = decl;
	decl_hash_data.context = get_decl_context(decl);
	if (!decl_hash_data.context)
		return NO_HASH;
	decl_hash_data.tree_codes_len = 0;

	set_decl_codes(&decl_hash_data);
	gcc_assert(decl_hash_data.tree_codes_len != 0);
	set_hash(&decl_hash_data);
	return decl_hash_data.hash;
}
Esempio n. 17
0
static bool
lto_symtab_merge_p (tree prevailing, tree decl)
{
  if (TREE_CODE (prevailing) != TREE_CODE (decl))
    {
      if (symtab->dump_file)
	fprintf (symtab->dump_file, "Not merging decls; "
		 "TREE_CODE mismatch\n");
      return false;
    }
  gcc_checking_assert (TREE_CHAIN (prevailing) == TREE_CHAIN (decl));
  
  if (TREE_CODE (prevailing) == FUNCTION_DECL)
    {
      if (DECL_BUILT_IN (prevailing) != DECL_BUILT_IN (decl))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN mismatch\n");
	  return false;
	}
      if (DECL_BUILT_IN (prevailing)
	  && (DECL_BUILT_IN_CLASS (prevailing) != DECL_BUILT_IN_CLASS (decl)
	      || DECL_FUNCTION_CODE (prevailing) != DECL_FUNCTION_CODE (decl)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN_CLASS or CODE mismatch\n");
	  return false;
	}
    }

  /* FIXME: after MPX is removed, use flags_from_decl_or_type
     function instead.  PR lto/85248.  */
  if (DECL_ATTRIBUTES (prevailing) != DECL_ATTRIBUTES (decl))
    {
      tree prev_attr = lookup_attribute ("error", DECL_ATTRIBUTES (prevailing));
      tree attr = lookup_attribute ("error", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr && !attribute_value_equal (prev_attr, attr)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "error attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("warning", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("warning", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr && !attribute_value_equal (prev_attr, attr)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "warning attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("noreturn", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("noreturn", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "noreturn attribute mismatch\n");
	  return false;
	}
    }
  return true;
}
Esempio n. 18
0
static bool
lto_symtab_merge_p (tree prevailing, tree decl)
{
  if (TREE_CODE (prevailing) != TREE_CODE (decl))
    {
      if (symtab->dump_file)
	fprintf (symtab->dump_file, "Not merging decls; "
		 "TREE_CODE mismatch\n");
      return false;
    }
  gcc_checking_assert (TREE_CHAIN (prevailing) == TREE_CHAIN (decl));
  
  if (TREE_CODE (prevailing) == FUNCTION_DECL)
    {
      if (DECL_BUILT_IN (prevailing) != DECL_BUILT_IN (decl))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN mismatch\n");
	  return false;
	}
      if (DECL_BUILT_IN (prevailing)
	  && (DECL_BUILT_IN_CLASS (prevailing) != DECL_BUILT_IN_CLASS (decl)
	      || DECL_FUNCTION_CODE (prevailing) != DECL_FUNCTION_CODE (decl)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN_CLASS or CODE mismatch\n");
	  return false;
	}
    }
  if (DECL_ATTRIBUTES (prevailing) != DECL_ATTRIBUTES (decl))
    {
      tree prev_attr = lookup_attribute ("error", DECL_ATTRIBUTES (prevailing));
      tree attr = lookup_attribute ("error", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr
	      && TREE_VALUE (TREE_VALUE (prev_attr))
		 != TREE_VALUE (TREE_VALUE (attr))))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "error attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("warning", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("warning", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr
	      && TREE_VALUE (TREE_VALUE (prev_attr))
		 != TREE_VALUE (TREE_VALUE (attr))))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "warning attribute mismatch\n");
	  return false;
	}
    }
  return true;
}
Esempio n. 19
0
void
print_node (FILE *file, const char *prefix, tree node, int indent)
{
    int hash;
    struct bucket *b;
    machine_mode mode;
    enum tree_code_class tclass;
    int len;
    int i;
    expanded_location xloc;
    enum tree_code code;

    if (node == 0)
        return;

    code = TREE_CODE (node);
    tclass = TREE_CODE_CLASS (code);

    /* Don't get too deep in nesting.  If the user wants to see deeper,
       it is easy to use the address of a lowest-level node
       as an argument in another call to debug_tree.  */

    if (indent > 24)
    {
        print_node_brief (file, prefix, node, indent);
        return;
    }

    if (indent > 8 && (tclass == tcc_type || tclass == tcc_declaration))
    {
        print_node_brief (file, prefix, node, indent);
        return;
    }

    /* It is unsafe to look at any other fields of an ERROR_MARK node.  */
    if (code == ERROR_MARK)
    {
        print_node_brief (file, prefix, node, indent);
        return;
    }

    /* Allow this function to be called if the table is not there.  */
    if (table)
    {
        hash = ((uintptr_t) node) % HASH_SIZE;

        /* If node is in the table, just mention its address.  */
        for (b = table[hash]; b; b = b->next)
            if (b->node == node)
            {
                print_node_brief (file, prefix, node, indent);
                return;
            }

        /* Add this node to the table.  */
        b = XNEW (struct bucket);
        b->node = node;
        b->next = table[hash];
        table[hash] = b;
    }

    /* Indent to the specified column, since this is the long form.  */
    indent_to (file, indent);

    /* Print the slot this node is in, and its code, and address.  */
    fprintf (file, "%s <%s", prefix, get_tree_code_name (code));
    dump_addr (file, " ", node);

    /* Print the name, if any.  */
    if (tclass == tcc_declaration)
    {
        if (DECL_NAME (node))
            fprintf (file, " %s", IDENTIFIER_POINTER (DECL_NAME (node)));
        else if (code == LABEL_DECL
                 && LABEL_DECL_UID (node) != -1)
        {
            if (dump_flags & TDF_NOUID)
                fprintf (file, " L.xxxx");
            else
                fprintf (file, " L.%d", (int) LABEL_DECL_UID (node));
        }
        else
        {
            if (dump_flags & TDF_NOUID)
                fprintf (file, " %c.xxxx", code == CONST_DECL ? 'C' : 'D');
            else
                fprintf (file, " %c.%u", code == CONST_DECL ? 'C' : 'D',
                         DECL_UID (node));
        }
    }
    else if (tclass == tcc_type)
    {
        if (TYPE_NAME (node))
        {
            if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
                fprintf (file, " %s", IDENTIFIER_POINTER (TYPE_NAME (node)));
            else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
                     && DECL_NAME (TYPE_NAME (node)))
                fprintf (file, " %s",
                         IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (node))));
        }
    }
    if (code == IDENTIFIER_NODE)
        fprintf (file, " %s", IDENTIFIER_POINTER (node));

    if (code == INTEGER_CST)
    {
        if (indent <= 4)
            print_node_brief (file, "type", TREE_TYPE (node), indent + 4);
    }
    else if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
    {
        print_node (file, "type", TREE_TYPE (node), indent + 4);
        if (TREE_TYPE (node))
            indent_to (file, indent + 3);
    }

    if (!TYPE_P (node) && TREE_SIDE_EFFECTS (node))
        fputs (" side-effects", file);

    if (TYPE_P (node) ? TYPE_READONLY (node) : TREE_READONLY (node))
        fputs (" readonly", file);
    if (TYPE_P (node) && TYPE_ATOMIC (node))
        fputs (" atomic", file);
    if (!TYPE_P (node) && TREE_CONSTANT (node))
        fputs (" constant", file);
    else if (TYPE_P (node) && TYPE_SIZES_GIMPLIFIED (node))
        fputs (" sizes-gimplified", file);

    if (TYPE_P (node) && !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (node)))
        fprintf (file, " address-space-%d", TYPE_ADDR_SPACE (node));

    if (TREE_ADDRESSABLE (node))
        fputs (" addressable", file);
    if (TREE_THIS_VOLATILE (node))
        fputs (" volatile", file);
    if (TREE_ASM_WRITTEN (node))
        fputs (" asm_written", file);
    if (TREE_USED (node))
        fputs (" used", file);
    if (TREE_NOTHROW (node))
        fputs (" nothrow", file);
    if (TREE_PUBLIC (node))
        fputs (" public", file);
    if (TREE_PRIVATE (node))
        fputs (" private", file);
    if (TREE_PROTECTED (node))
        fputs (" protected", file);
    if (TREE_STATIC (node))
        fputs (code == CALL_EXPR ? " must-tail-call" : " static", file);
    if (TREE_DEPRECATED (node))
        fputs (" deprecated", file);
    if (TREE_VISITED (node))
        fputs (" visited", file);

    if (code != TREE_VEC && code != INTEGER_CST && code != SSA_NAME)
    {
        if (TREE_LANG_FLAG_0 (node))
            fputs (" tree_0", file);
        if (TREE_LANG_FLAG_1 (node))
            fputs (" tree_1", file);
        if (TREE_LANG_FLAG_2 (node))
            fputs (" tree_2", file);
        if (TREE_LANG_FLAG_3 (node))
            fputs (" tree_3", file);
        if (TREE_LANG_FLAG_4 (node))
            fputs (" tree_4", file);
        if (TREE_LANG_FLAG_5 (node))
            fputs (" tree_5", file);
        if (TREE_LANG_FLAG_6 (node))
            fputs (" tree_6", file);
    }

    /* DECL_ nodes have additional attributes.  */

    switch (TREE_CODE_CLASS (code))
    {
    case tcc_declaration:
        if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
        {
            if (DECL_UNSIGNED (node))
                fputs (" unsigned", file);
            if (DECL_IGNORED_P (node))
                fputs (" ignored", file);
            if (DECL_ABSTRACT_P (node))
                fputs (" abstract", file);
            if (DECL_EXTERNAL (node))
                fputs (" external", file);
            if (DECL_NONLOCAL (node))
                fputs (" nonlocal", file);
        }
        if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
        {
            if (DECL_WEAK (node))
                fputs (" weak", file);
            if (DECL_IN_SYSTEM_HEADER (node))
                fputs (" in_system_header", file);
        }
        if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)
                && code != LABEL_DECL
                && code != FUNCTION_DECL
                && DECL_REGISTER (node))
            fputs (" regdecl", file);

        if (code == TYPE_DECL && TYPE_DECL_SUPPRESS_DEBUG (node))
            fputs (" suppress-debug", file);

        if (code == FUNCTION_DECL
                && DECL_FUNCTION_SPECIFIC_TARGET (node))
            fputs (" function-specific-target", file);
        if (code == FUNCTION_DECL
                && DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node))
            fputs (" function-specific-opt", file);
        if (code == FUNCTION_DECL && DECL_DECLARED_INLINE_P (node))
            fputs (" autoinline", file);
        if (code == FUNCTION_DECL && DECL_BUILT_IN (node))
            fputs (" built-in", file);
        if (code == FUNCTION_DECL && DECL_STATIC_CHAIN (node))
            fputs (" static-chain", file);
        if (TREE_CODE (node) == FUNCTION_DECL && decl_is_tm_clone (node))
            fputs (" tm-clone", file);

        if (code == FIELD_DECL && DECL_PACKED (node))
            fputs (" packed", file);
        if (code == FIELD_DECL && DECL_BIT_FIELD (node))
            fputs (" bit-field", file);
        if (code == FIELD_DECL && DECL_NONADDRESSABLE_P (node))
            fputs (" nonaddressable", file);

        if (code == LABEL_DECL && EH_LANDING_PAD_NR (node))
            fprintf (file, " landing-pad:%d", EH_LANDING_PAD_NR (node));

        if (code == VAR_DECL && DECL_IN_TEXT_SECTION (node))
            fputs (" in-text-section", file);
        if (code == VAR_DECL && DECL_IN_CONSTANT_POOL (node))
            fputs (" in-constant-pool", file);
        if (code == VAR_DECL && DECL_COMMON (node))
            fputs (" common", file);
        if (code == VAR_DECL && DECL_THREAD_LOCAL_P (node))
        {
            fputs (" ", file);
            fputs (tls_model_names[DECL_TLS_MODEL (node)], file);
        }

        if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
        {
            if (DECL_VIRTUAL_P (node))
                fputs (" virtual", file);
            if (DECL_PRESERVE_P (node))
                fputs (" preserve", file);
            if (DECL_LANG_FLAG_0 (node))
                fputs (" decl_0", file);
            if (DECL_LANG_FLAG_1 (node))
                fputs (" decl_1", file);
            if (DECL_LANG_FLAG_2 (node))
                fputs (" decl_2", file);
            if (DECL_LANG_FLAG_3 (node))
                fputs (" decl_3", file);
            if (DECL_LANG_FLAG_4 (node))
                fputs (" decl_4", file);
            if (DECL_LANG_FLAG_5 (node))
                fputs (" decl_5", file);
            if (DECL_LANG_FLAG_6 (node))
                fputs (" decl_6", file);
            if (DECL_LANG_FLAG_7 (node))
                fputs (" decl_7", file);

            mode = DECL_MODE (node);
            fprintf (file, " %s", GET_MODE_NAME (mode));
        }

        if ((code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
                && DECL_BY_REFERENCE (node))
            fputs (" passed-by-reference", file);

        if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)  && DECL_DEFER_OUTPUT (node))
            fputs (" defer-output", file);


        xloc = expand_location (DECL_SOURCE_LOCATION (node));
        fprintf (file, " file %s line %d col %d", xloc.file, xloc.line,
                 xloc.column);

        if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
        {
            print_node (file, "size", DECL_SIZE (node), indent + 4);
            print_node (file, "unit size", DECL_SIZE_UNIT (node), indent + 4);

            if (code != FUNCTION_DECL || DECL_BUILT_IN (node))
                indent_to (file, indent + 3);

            if (DECL_USER_ALIGN (node))
                fprintf (file, " user");

            fprintf (file, " align %d", DECL_ALIGN (node));
            if (code == FIELD_DECL)
                fprintf (file, " offset_align " HOST_WIDE_INT_PRINT_UNSIGNED,
                         DECL_OFFSET_ALIGN (node));

            if (code == FUNCTION_DECL && DECL_BUILT_IN (node))
            {
                if (DECL_BUILT_IN_CLASS (node) == BUILT_IN_MD)
                    fprintf (file, " built-in BUILT_IN_MD %d", DECL_FUNCTION_CODE (node));
                else
                    fprintf (file, " built-in %s:%s",
                             built_in_class_names[(int) DECL_BUILT_IN_CLASS (node)],
                             built_in_names[(int) DECL_FUNCTION_CODE (node)]);
            }
        }
        if (code == FIELD_DECL)
        {
            print_node (file, "offset", DECL_FIELD_OFFSET (node), indent + 4);
            print_node (file, "bit offset", DECL_FIELD_BIT_OFFSET (node),
                        indent + 4);
            if (DECL_BIT_FIELD_TYPE (node))
                print_node (file, "bit_field_type", DECL_BIT_FIELD_TYPE (node),
                            indent + 4);
        }

        print_node_brief (file, "context", DECL_CONTEXT (node), indent + 4);

        if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
        {
            print_node_brief (file, "attributes",
                              DECL_ATTRIBUTES (node), indent + 4);
            if (code != PARM_DECL)
                print_node_brief (file, "initial", DECL_INITIAL (node),
                                  indent + 4);
        }
        if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
        {
            print_node_brief (file, "abstract_origin",
                              DECL_ABSTRACT_ORIGIN (node), indent + 4);
        }
        if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
        {
            print_node (file, "result", DECL_RESULT_FLD (node), indent + 4);
        }

        lang_hooks.print_decl (file, node, indent);

        if (DECL_RTL_SET_P (node))
        {
            indent_to (file, indent + 4);
            print_rtl (file, DECL_RTL (node));
        }

        if (code == PARM_DECL)
        {
            print_node (file, "arg-type", DECL_ARG_TYPE (node), indent + 4);

            if (DECL_INCOMING_RTL (node) != 0)
            {
                indent_to (file, indent + 4);
                fprintf (file, "incoming-rtl ");
                print_rtl (file, DECL_INCOMING_RTL (node));
            }
        }
        else if (code == FUNCTION_DECL
                 && DECL_STRUCT_FUNCTION (node) != 0)
        {
            print_node (file, "arguments", DECL_ARGUMENTS (node), indent + 4);
            indent_to (file, indent + 4);
            dump_addr (file, "struct-function ", DECL_STRUCT_FUNCTION (node));
        }

        if ((code == VAR_DECL || code == PARM_DECL)
                && DECL_HAS_VALUE_EXPR_P (node))
            print_node (file, "value-expr", DECL_VALUE_EXPR (node), indent + 4);

        /* Print the decl chain only if decl is at second level.  */
        if (indent == 4)
            print_node (file, "chain", TREE_CHAIN (node), indent + 4);
        else
            print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4);
        break;

    case tcc_type:
        if (TYPE_UNSIGNED (node))
            fputs (" unsigned", file);

        if (TYPE_NO_FORCE_BLK (node))
            fputs (" no-force-blk", file);

        if (TYPE_STRING_FLAG (node))
            fputs (" string-flag", file);

        if (TYPE_NEEDS_CONSTRUCTING (node))
            fputs (" needs-constructing", file);

        if ((code == RECORD_TYPE
                || code == UNION_TYPE
                || code == QUAL_UNION_TYPE
                || code == ARRAY_TYPE)
                && TYPE_REVERSE_STORAGE_ORDER (node))
            fputs (" reverse-storage-order", file);

        /* The transparent-union flag is used for different things in
        different nodes.  */
        if ((code == UNION_TYPE || code == RECORD_TYPE)
                && TYPE_TRANSPARENT_AGGR (node))
            fputs (" transparent-aggr", file);
        else if (code == ARRAY_TYPE
                 && TYPE_NONALIASED_COMPONENT (node))
            fputs (" nonaliased-component", file);

        if (TYPE_PACKED (node))
            fputs (" packed", file);

        if (TYPE_RESTRICT (node))
            fputs (" restrict", file);

        if (TYPE_LANG_FLAG_0 (node))
            fputs (" type_0", file);
        if (TYPE_LANG_FLAG_1 (node))
            fputs (" type_1", file);
        if (TYPE_LANG_FLAG_2 (node))
            fputs (" type_2", file);
        if (TYPE_LANG_FLAG_3 (node))
            fputs (" type_3", file);
        if (TYPE_LANG_FLAG_4 (node))
            fputs (" type_4", file);
        if (TYPE_LANG_FLAG_5 (node))
            fputs (" type_5", file);
        if (TYPE_LANG_FLAG_6 (node))
            fputs (" type_6", file);
        if (TYPE_LANG_FLAG_7 (node))
            fputs (" type_7", file);

        mode = TYPE_MODE (node);
        fprintf (file, " %s", GET_MODE_NAME (mode));

        print_node (file, "size", TYPE_SIZE (node), indent + 4);
        print_node (file, "unit size", TYPE_SIZE_UNIT (node), indent + 4);
        indent_to (file, indent + 3);

        if (TYPE_USER_ALIGN (node))
            fprintf (file, " user");

        fprintf (file, " align %d symtab %d alias set " HOST_WIDE_INT_PRINT_DEC,
                 TYPE_ALIGN (node), TYPE_SYMTAB_ADDRESS (node),
                 (HOST_WIDE_INT) TYPE_ALIAS_SET (node));

        if (TYPE_STRUCTURAL_EQUALITY_P (node))
            fprintf (file, " structural equality");
        else
            dump_addr (file, " canonical type ", TYPE_CANONICAL (node));

        print_node (file, "attributes", TYPE_ATTRIBUTES (node), indent + 4);

        if (INTEGRAL_TYPE_P (node) || code == REAL_TYPE
                || code == FIXED_POINT_TYPE)
        {
            fprintf (file, " precision %d", TYPE_PRECISION (node));
            print_node_brief (file, "min", TYPE_MIN_VALUE (node), indent + 4);
            print_node_brief (file, "max", TYPE_MAX_VALUE (node), indent + 4);
        }

        if (code == ENUMERAL_TYPE)
            print_node (file, "values", TYPE_VALUES (node), indent + 4);
        else if (code == ARRAY_TYPE)
            print_node (file, "domain", TYPE_DOMAIN (node), indent + 4);
        else if (code == VECTOR_TYPE)
            fprintf (file, " nunits %d", (int) TYPE_VECTOR_SUBPARTS (node));
        else if (code == RECORD_TYPE
                 || code == UNION_TYPE
                 || code == QUAL_UNION_TYPE)
            print_node (file, "fields", TYPE_FIELDS (node), indent + 4);
        else if (code == FUNCTION_TYPE
                 || code == METHOD_TYPE)
        {
            if (TYPE_METHOD_BASETYPE (node))
                print_node_brief (file, "method basetype",
                                  TYPE_METHOD_BASETYPE (node), indent + 4);
            print_node (file, "arg-types", TYPE_ARG_TYPES (node), indent + 4);
        }
        else if (code == OFFSET_TYPE)
            print_node_brief (file, "basetype", TYPE_OFFSET_BASETYPE (node),
                              indent + 4);

        if (TYPE_CONTEXT (node))
            print_node_brief (file, "context", TYPE_CONTEXT (node), indent + 4);

        lang_hooks.print_type (file, node, indent);

        if (TYPE_POINTER_TO (node) || TREE_CHAIN (node))
            indent_to (file, indent + 3);

        print_node_brief (file, "pointer_to_this", TYPE_POINTER_TO (node),
                          indent + 4);
        print_node_brief (file, "reference_to_this", TYPE_REFERENCE_TO (node),
                          indent + 4);
        print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4);
        break;

    case tcc_expression:
    case tcc_comparison:
    case tcc_unary:
    case tcc_binary:
    case tcc_reference:
    case tcc_statement:
    case tcc_vl_exp:
        if (code == BIND_EXPR)
        {
            print_node (file, "vars", TREE_OPERAND (node, 0), indent + 4);
            print_node (file, "body", TREE_OPERAND (node, 1), indent + 4);
            print_node (file, "block", TREE_OPERAND (node, 2), indent + 4);
            break;
        }
        if (code == CALL_EXPR)
        {
            call_expr_arg_iterator iter;
            tree arg;
            print_node (file, "fn", CALL_EXPR_FN (node), indent + 4);
            print_node (file, "static_chain", CALL_EXPR_STATIC_CHAIN (node),
                        indent + 4);
            i = 0;
            FOR_EACH_CALL_EXPR_ARG (arg, iter, node)
            {
                char temp[10];
                sprintf (temp, "arg %d", i);
                print_node (file, temp, arg, indent + 4);
                i++;
            }
        }
Esempio n. 20
0
static gimple
vect_recog_pow_pattern (gimple last_stmt, tree *type_in, tree *type_out)
{
  tree fn, base, exp = NULL;
  gimple stmt;
  tree var;

  if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
    return NULL;

  fn = gimple_call_fndecl (last_stmt);
  if (fn == NULL_TREE || DECL_BUILT_IN_CLASS (fn) != BUILT_IN_NORMAL)
   return NULL;

  switch (DECL_FUNCTION_CODE (fn))
    {
    case BUILT_IN_POWIF:
    case BUILT_IN_POWI:
    case BUILT_IN_POWF:
    case BUILT_IN_POW:
      base = gimple_call_arg (last_stmt, 0);
      exp = gimple_call_arg (last_stmt, 1);
      if (TREE_CODE (exp) != REAL_CST
	  && TREE_CODE (exp) != INTEGER_CST)
        return NULL;
      break;

    default:
      return NULL;
    }

  /* We now have a pow or powi builtin function call with a constant
     exponent.  */

  *type_out = NULL_TREE;

  /* Catch squaring.  */
  if ((host_integerp (exp, 0)
       && tree_low_cst (exp, 0) == 2)
      || (TREE_CODE (exp) == REAL_CST
          && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
    {
      *type_in = TREE_TYPE (base);

      var = vect_recog_temp_ssa_var (TREE_TYPE (base), NULL);
      stmt = gimple_build_assign_with_ops (MULT_EXPR, var, base, base);
      SSA_NAME_DEF_STMT (var) = stmt;
      return stmt;
    }

  /* Catch square root.  */
  if (TREE_CODE (exp) == REAL_CST
      && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconsthalf))
    {
      tree newfn = mathfn_built_in (TREE_TYPE (base), BUILT_IN_SQRT);
      *type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
      if (*type_in)
	{
	  gimple stmt = gimple_build_call (newfn, 1, base);
	  if (vectorizable_function (stmt, *type_in, *type_in)
	      != NULL_TREE)
	    {
	      var = vect_recog_temp_ssa_var (TREE_TYPE (base), stmt);
	      gimple_call_set_lhs (stmt, var);
	      return stmt;
	    }
	}
    }

  return NULL;
}
Esempio n. 21
0
static void
check_call (funct_state local, gimple call, bool ipa)
{
  int flags = gimple_call_flags (call);
  tree callee_t = gimple_call_fndecl (call);
  bool possibly_throws = stmt_could_throw_p (call);
  bool possibly_throws_externally = (possibly_throws
  				     && stmt_can_throw_external (call));

  if (possibly_throws)
    {
      unsigned int i;
      for (i = 0; i < gimple_num_ops (call); i++)
        if (gimple_op (call, i)
	    && tree_could_throw_p (gimple_op (call, i)))
	  {
	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
	      {
		if (dump_file)
		  fprintf (dump_file, "    operand can throw; looping\n");
		local->looping = true;
	      }
	    if (possibly_throws_externally)
	      {
		if (dump_file)
		  fprintf (dump_file, "    operand can throw externally\n");
		local->can_throw = true;
	      }
	  }
    }

  /* The const and pure flags are set by a variety of places in the
     compiler (including here).  If someone has already set the flags
     for the callee, (such as for some of the builtins) we will use
     them, otherwise we will compute our own information.

     Const and pure functions have less clobber effects than other
     functions so we process these first.  Otherwise if it is a call
     outside the compilation unit or an indirect call we punt.  This
     leaves local calls which will be processed by following the call
     graph.  */
  if (callee_t)
    {
      enum pure_const_state_e call_state;
      bool call_looping;

      if (special_builtin_state (&call_state, &call_looping, callee_t))
	{
	  worse_state (&local->pure_const_state, &local->looping,
		       call_state, call_looping);
	  return;
	}
      /* When bad things happen to bad functions, they cannot be const
	 or pure.  */
      if (setjmp_call_p (callee_t))
	{
	  if (dump_file)
	    fprintf (dump_file, "    setjmp is not const/pure\n");
          local->looping = true;
	  local->pure_const_state = IPA_NEITHER;
	}

      if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
	switch (DECL_FUNCTION_CODE (callee_t))
	  {
	  case BUILT_IN_LONGJMP:
	  case BUILT_IN_NONLOCAL_GOTO:
	    if (dump_file)
	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
	    local->pure_const_state = IPA_NEITHER;
            local->looping = true;
	    break;
	  default:
	    break;
	  }
    }

  /* When not in IPA mode, we can still handle self recursion.  */
  if (!ipa && callee_t == current_function_decl)
    {
      if (dump_file)
        fprintf (dump_file, "    Recursive call can loop.\n");
      local->looping = true;
    }
  /* Either callee is unknown or we are doing local analysis.
     Look to see if there are any bits available for the callee (such as by
     declaration or because it is builtin) and process solely on the basis of
     those bits. */
  else if (!ipa)
    {
      enum pure_const_state_e call_state;
      bool call_looping;
      if (possibly_throws && cfun->can_throw_non_call_exceptions)
        {
	  if (dump_file)
	    fprintf (dump_file, "    can throw; looping\n");
          local->looping = true;
	}
      if (possibly_throws_externally)
        {
	  if (dump_file)
	    {
	      fprintf (dump_file, "    can throw externally to lp %i\n",
	      	       lookup_stmt_eh_lp (call));
	      if (callee_t)
		fprintf (dump_file, "     callee:%s\n",
			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
	    }
          local->can_throw = true;
	}
      if (dump_file && (dump_flags & TDF_DETAILS))
	fprintf (dump_file, "    checking flags for call:");
      state_from_flags (&call_state, &call_looping, flags,
			((flags & (ECF_NORETURN | ECF_NOTHROW))
			 == (ECF_NORETURN | ECF_NOTHROW))
			|| (!flag_exceptions && (flags & ECF_NORETURN)));
      worse_state (&local->pure_const_state, &local->looping,
		   call_state, call_looping);
    }
  /* Direct functions calls are handled by IPA propagation.  */
}
Esempio n. 22
0
/* Find memcpy, mempcpy, memmove and memset calls, perform
   checks before call and then call no_chk version of
   functions.  We do it on O2 to enable inlining of these
   functions during expand.

   Also try to find memcpy, mempcpy, memmove and memset calls
   which are known to not write pointers to memory and use
   faster function versions for them.  */
static void
chkp_optimize_string_function_calls (void)
{
    basic_block bb;

    if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file, "Searching for replaceable string function calls...\n");

    FOR_EACH_BB_FN (bb, cfun)
    {
        gimple_stmt_iterator i;

        for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
            gimple *stmt = gsi_stmt (i);
            tree fndecl;

            if (gimple_code (stmt) != GIMPLE_CALL
                    || !gimple_call_with_bounds_p (stmt))
                continue;

            fndecl = gimple_call_fndecl (stmt);

            if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
                continue;

            if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMCPY_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMMOVE_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP)
            {
                tree dst = gimple_call_arg (stmt, 0);
                tree dst_bnd = gimple_call_arg (stmt, 1);
                bool is_memset = DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP;
                tree size = gimple_call_arg (stmt, is_memset ? 3 : 4);
                tree fndecl_nochk;
                gimple_stmt_iterator j;
                basic_block check_bb;
                address_t size_val;
                int sign;
                bool known;

                /* We may replace call with corresponding __chkp_*_nobnd
                call in case destination pointer base type is not
                 void or pointer.  */
                if (POINTER_TYPE_P (TREE_TYPE (dst))
                        && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst)))
                        && !chkp_type_has_pointer (TREE_TYPE (TREE_TYPE (dst))))
                {
                    tree fndecl_nobnd
                        = chkp_get_nobnd_fndecl (DECL_FUNCTION_CODE (fndecl));

                    if (fndecl_nobnd)
                        fndecl = fndecl_nobnd;
                }

                fndecl_nochk = chkp_get_nochk_fndecl (DECL_FUNCTION_CODE (fndecl));

                if (fndecl_nochk)
                    fndecl = fndecl_nochk;

                if (fndecl != gimple_call_fndecl (stmt))
                {
                    if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                        fprintf (dump_file, "Replacing call: ");
                        print_gimple_stmt (dump_file, stmt, 0,
                                           TDF_VOPS|TDF_MEMSYMS);
                    }

                    gimple_call_set_fndecl (stmt, fndecl);

                    if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                        fprintf (dump_file, "With a new call: ");
                        print_gimple_stmt (dump_file, stmt, 0,
                                           TDF_VOPS|TDF_MEMSYMS);
                    }
                }

                /* If there is no nochk version of function then
                do nothing.  Otherwise insert checks before
                 the call.  */
                if (!fndecl_nochk)
                    continue;

                /* If size passed to call is known and > 0
                then we may insert checks unconditionally.  */
                size_val.pol.create (0);
                chkp_collect_value (size, size_val);
                known = chkp_is_constant_addr (size_val, &sign);
                size_val.pol.release ();

                /* If we are not sure size is not zero then we have
                to perform runtime check for size and perform
                 checks only when size is not zero.  */
                if (!known)
                {
                    gimple *check = gimple_build_cond (NE_EXPR,
                                                       size,
                                                       size_zero_node,
                                                       NULL_TREE,
                                                       NULL_TREE);

                    /* Split block before string function call.  */
                    gsi_prev (&i);
                    check_bb = insert_cond_bb (bb, gsi_stmt (i), check);

                    /* Set position for checks.  */
                    j = gsi_last_bb (check_bb);

                    /* The block was splitted and therefore we
                       need to set iterator to its end.  */
                    i = gsi_last_bb (bb);
                }
                /* If size is known to be zero then no checks
                should be performed.  */
                else if (!sign)
                    continue;
                else
                    j = i;

                size = size_binop (MINUS_EXPR, size, size_one_node);
                if (!is_memset)
                {
                    tree src = gimple_call_arg (stmt, 2);
                    tree src_bnd = gimple_call_arg (stmt, 3);

                    chkp_check_mem_access (src, fold_build_pointer_plus (src, size),
                                           src_bnd, j, gimple_location (stmt),
                                           integer_zero_node);
                }

                chkp_check_mem_access (dst, fold_build_pointer_plus (dst, size),
                                       dst_bnd, j, gimple_location (stmt),
                                       integer_one_node);

            }
        }
    }
Esempio n. 23
0
static void
check_call (funct_state local, tree call_expr) 
{
  int flags = call_expr_flags(call_expr);
  tree operand_list = TREE_OPERAND (call_expr, 1);
  tree operand;
  tree callee_t = get_callee_fndecl (call_expr);
  struct cgraph_node* callee;
  enum availability avail = AVAIL_NOT_AVAILABLE;

  for (operand = operand_list;
       operand != NULL_TREE;
       operand = TREE_CHAIN (operand))
    {
      tree argument = TREE_VALUE (operand);
      check_rhs_var (local, argument);
    }
  
  /* The const and pure flags are set by a variety of places in the
     compiler (including here).  If someone has already set the flags
     for the callee, (such as for some of the builtins) we will use
     them, otherwise we will compute our own information. 
  
     Const and pure functions have less clobber effects than other
     functions so we process these first.  Otherwise if it is a call
     outside the compilation unit or an indirect call we punt.  This
     leaves local calls which will be processed by following the call
     graph.  */  
  if (callee_t)
    {
      callee = cgraph_node(callee_t);
      avail = cgraph_function_body_availability (callee);

      /* When bad things happen to bad functions, they cannot be const
	 or pure.  */
      if (setjmp_call_p (callee_t))
	local->pure_const_state = IPA_NEITHER;

      if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
	switch (DECL_FUNCTION_CODE (callee_t))
	  {
	  case BUILT_IN_LONGJMP:
	  case BUILT_IN_NONLOCAL_GOTO:
	    local->pure_const_state = IPA_NEITHER;
	    break;
	  default:
	    break;
	  }
    }

  /* The callee is either unknown (indirect call) or there is just no
     scannable code for it (external call) .  We look to see if there
     are any bits available for the callee (such as by declaration or
     because it is builtin) and process solely on the basis of those
     bits. */
  if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
    {
      if (flags & ECF_PURE) 
	{
	  if (local->pure_const_state == IPA_CONST)
	    local->pure_const_state = IPA_PURE;
	}
      else 
	local->pure_const_state = IPA_NEITHER;
    }
  else
    {
      /* We have the code and we will scan it for the effects. */
      if (flags & ECF_PURE) 
	{
	  if (local->pure_const_state == IPA_CONST)
	    local->pure_const_state = IPA_PURE;
	}
    }
}
Esempio n. 24
0
tree
aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
{
  machine_mode in_mode, out_mode;
  int in_n, out_n;

  if (TREE_CODE (type_out) != VECTOR_TYPE
      || TREE_CODE (type_in) != VECTOR_TYPE)
    return NULL_TREE;

  out_mode = TYPE_MODE (TREE_TYPE (type_out));
  out_n = TYPE_VECTOR_SUBPARTS (type_out);
  in_mode = TYPE_MODE (TREE_TYPE (type_in));
  in_n = TYPE_VECTOR_SUBPARTS (type_in);

#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
#define AARCH64_FIND_FRINT_VARIANT(N) \
  (AARCH64_CHECK_BUILTIN_MODE (2, D) \
    ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
    : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
	? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
	: (AARCH64_CHECK_BUILTIN_MODE (2, S) \
	   ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
	   : NULL_TREE)))
  if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
    {
      enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
      switch (fn)
	{
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Fmode && out_n == C \
   && in_mode == N##Fmode && in_n == C)
	case BUILT_IN_FLOOR:
	case BUILT_IN_FLOORF:
	  return AARCH64_FIND_FRINT_VARIANT (floor);
	case BUILT_IN_CEIL:
	case BUILT_IN_CEILF:
	  return AARCH64_FIND_FRINT_VARIANT (ceil);
	case BUILT_IN_TRUNC:
	case BUILT_IN_TRUNCF:
	  return AARCH64_FIND_FRINT_VARIANT (btrunc);
	case BUILT_IN_ROUND:
	case BUILT_IN_ROUNDF:
	  return AARCH64_FIND_FRINT_VARIANT (round);
	case BUILT_IN_NEARBYINT:
	case BUILT_IN_NEARBYINTF:
	  return AARCH64_FIND_FRINT_VARIANT (nearbyint);
	case BUILT_IN_SQRT:
	case BUILT_IN_SQRTF:
	  return AARCH64_FIND_FRINT_VARIANT (sqrt);
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == SImode && out_n == C \
   && in_mode == N##Imode && in_n == C)
        case BUILT_IN_CLZ:
          {
            if (AARCH64_CHECK_BUILTIN_MODE (4, S))
              return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
            return NULL_TREE;
          }
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Imode && out_n == C \
   && in_mode == N##Fmode && in_n == C)
	case BUILT_IN_LFLOOR:
	case BUILT_IN_LFLOORF:
	case BUILT_IN_LLFLOOR:
	case BUILT_IN_IFLOORF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_LCEIL:
	case BUILT_IN_LCEILF:
	case BUILT_IN_LLCEIL:
	case BUILT_IN_ICEILF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_LROUND:
	case BUILT_IN_IROUNDF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_BSWAP16:
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Imode && out_n == C \
   && in_mode == N##Imode && in_n == C)
	  if (AARCH64_CHECK_BUILTIN_MODE (4, H))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
	  else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
	  else
	    return NULL_TREE;
	case BUILT_IN_BSWAP32:
	  if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
	  else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
	  else
	    return NULL_TREE;
	case BUILT_IN_BSWAP64:
	  if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
	  else
	    return NULL_TREE;
	default:
	  return NULL_TREE;
      }
    }

  return NULL_TREE;
}
Esempio n. 25
0
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  gimple stmt = gsi_stmt (*gsi);

  gimple_set_block (stmt, data->block);

  switch (gimple_code (stmt))
    {
    case GIMPLE_BIND:
      lower_gimple_bind (gsi, data);
      /* Propagate fallthruness.  */
      return;

    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_SWITCH:
      data->cannot_fallthru = true;
      gsi_next (gsi);
      return;

    case GIMPLE_RETURN:
      if (data->cannot_fallthru)
	{
	  gsi_remove (gsi, false);
	  /* Propagate fallthruness.  */
	}
      else
	{
	  lower_gimple_return (gsi, data);
	  data->cannot_fallthru = true;
	}
      return;

    case GIMPLE_TRY:
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
	lower_try_catch (gsi, data);
      else
	{
	  /* It must be a GIMPLE_TRY_FINALLY.  */
	  bool cannot_fallthru;
	  lower_sequence (gimple_try_eval_ptr (stmt), data);
	  cannot_fallthru = data->cannot_fallthru;

	  /* The finally clause is always executed after the try clause,
	     so if it does not fall through, then the try-finally will not
	     fall through.  Otherwise, if the try clause does not fall
	     through, then when the finally clause falls through it will
	     resume execution wherever the try clause was going.  So the
	     whole try-finally will only fall through if both the try
	     clause and the finally clause fall through.  */
	  data->cannot_fallthru = false;
	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
	  data->cannot_fallthru |= cannot_fallthru;
	  gsi_next (gsi);
	}
      return;

    case GIMPLE_EH_ELSE:
      lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
      lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
      break;

    case GIMPLE_NOP:
    case GIMPLE_ASM:
    case GIMPLE_ASSIGN:
    case GIMPLE_PREDICT:
    case GIMPLE_LABEL:
    case GIMPLE_EH_MUST_NOT_THROW:
    case GIMPLE_OMP_FOR:
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SECTIONS_SWITCH:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_CRITICAL:
    case GIMPLE_OMP_RETURN:
    case GIMPLE_OMP_ATOMIC_LOAD:
    case GIMPLE_OMP_ATOMIC_STORE:
    case GIMPLE_OMP_CONTINUE:
      break;

    case GIMPLE_CALL:
      {
	tree decl = gimple_call_fndecl (stmt);
	unsigned i;

	for (i = 0; i < gimple_call_num_args (stmt); i++)
	  {
	    tree arg = gimple_call_arg (stmt, i);
	    if (EXPR_P (arg))
	      TREE_SET_BLOCK (arg, data->block);
	  }

	if (decl
	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
	    && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
	  {
	    lower_builtin_setjmp (gsi);
	    data->cannot_fallthru = false;
	    data->calls_builtin_setjmp = true;
	    return;
	  }

	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
	  {
	    data->cannot_fallthru = true;
	    gsi_next (gsi);
	    return;
	  }
      }
      break;

    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
      data->cannot_fallthru = false;
      lower_omp_directive (gsi, data);
      data->cannot_fallthru = false;
      return;

    case GIMPLE_TRANSACTION:
      lower_sequence (gimple_transaction_body_ptr (stmt), data);
      break;

    default:
      gcc_unreachable ();
    }

  data->cannot_fallthru = false;
  gsi_next (gsi);
}
Esempio n. 26
0
bool
gimple_simplify (gimple *stmt,
		 code_helper *rcode, tree *ops,
		 gimple_seq *seq,
		 tree (*valueize)(tree), tree (*top_valueize)(tree))
{
  switch (gimple_code (stmt))
    {
    case GIMPLE_ASSIGN:
      {
	enum tree_code code = gimple_assign_rhs_code (stmt);
	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
	switch (gimple_assign_rhs_class (stmt))
	  {
	  case GIMPLE_SINGLE_RHS:
	    if (code == REALPART_EXPR
		|| code == IMAGPART_EXPR
		|| code == VIEW_CONVERT_EXPR)
	      {
		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
		bool valueized = false;
		op0 = do_valueize (op0, top_valueize, valueized);
		*rcode = code;
		ops[0] = op0;
		return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
			|| valueized);
	      }
	    else if (code == BIT_FIELD_REF)
	      {
		tree rhs1 = gimple_assign_rhs1 (stmt);
		tree op0 = TREE_OPERAND (rhs1, 0);
		bool valueized = false;
		op0 = do_valueize (op0, top_valueize, valueized);
		*rcode = code;
		ops[0] = op0;
		ops[1] = TREE_OPERAND (rhs1, 1);
		ops[2] = TREE_OPERAND (rhs1, 2);
		return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
			|| valueized);
	      }
	    else if (code == SSA_NAME
		     && top_valueize)
	      {
		tree op0 = gimple_assign_rhs1 (stmt);
		tree valueized = top_valueize (op0);
		if (!valueized || op0 == valueized)
		  return false;
		ops[0] = valueized;
		*rcode = TREE_CODE (op0);
		return true;
	      }
	    break;
	  case GIMPLE_UNARY_RHS:
	    {
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      bool valueized = false;
	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
	      *rcode = code;
	      ops[0] = rhs1;
	      return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
		      || valueized);
	    }
	  case GIMPLE_BINARY_RHS:
	    {
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      tree rhs2 = gimple_assign_rhs2 (stmt);
	      bool valueized = false;
	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
	      *rcode = code;
	      ops[0] = rhs1;
	      ops[1] = rhs2;
	      return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
		      || valueized);
	    }
	  case GIMPLE_TERNARY_RHS:
	    {
	      bool valueized = false;
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      /* If this is a [VEC_]COND_EXPR first try to simplify an
		 embedded GENERIC condition.  */
	      if (code == COND_EXPR
		  || code == VEC_COND_EXPR)
		{
		  if (COMPARISON_CLASS_P (rhs1))
		    {
		      tree lhs = TREE_OPERAND (rhs1, 0);
		      tree rhs = TREE_OPERAND (rhs1, 1);
		      lhs = do_valueize (lhs, top_valueize, valueized);
		      rhs = do_valueize (rhs, top_valueize, valueized);
		      code_helper rcode2 = TREE_CODE (rhs1);
		      tree ops2[3] = {};
		      ops2[0] = lhs;
		      ops2[1] = rhs;
		      if ((gimple_resimplify2 (seq, &rcode2, TREE_TYPE (rhs1),
					       ops2, valueize)
			   || valueized)
			  && rcode2.is_tree_code ())
			{
			  valueized = true;
			  if (TREE_CODE_CLASS ((enum tree_code)rcode2)
			      == tcc_comparison)
			    rhs1 = build2 (rcode2, TREE_TYPE (rhs1),
					   ops2[0], ops2[1]);
			  else if (rcode2 == SSA_NAME
				   || rcode2 == INTEGER_CST)
			    rhs1 = ops2[0];
			  else
			    valueized = false;
			}
		    }
		}
	      tree rhs2 = gimple_assign_rhs2 (stmt);
	      tree rhs3 = gimple_assign_rhs3 (stmt);
	      rhs1 = do_valueize (rhs1, top_valueize, valueized);
	      rhs2 = do_valueize (rhs2, top_valueize, valueized);
	      rhs3 = do_valueize (rhs3, top_valueize, valueized);
	      *rcode = code;
	      ops[0] = rhs1;
	      ops[1] = rhs2;
	      ops[2] = rhs3;
	      return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
		      || valueized);
	    }
	  default:
	    gcc_unreachable ();
	  }
	break;
      }

    case GIMPLE_CALL:
      /* ???  This way we can't simplify calls with side-effects.  */
      if (gimple_call_lhs (stmt) != NULL_TREE
	  && gimple_call_num_args (stmt) >= 1
	  && gimple_call_num_args (stmt) <= 3)
	{
	  tree fn = gimple_call_fn (stmt);
	  /* ???  Internal function support missing.  */
	  if (!fn)
	    return false;
	  bool valueized = false;
	  fn = do_valueize (fn, top_valueize, valueized);
	  if (TREE_CODE (fn) != ADDR_EXPR
	      || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
	    return false;

	  tree decl = TREE_OPERAND (fn, 0);
	  if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
	      || !builtin_decl_implicit (DECL_FUNCTION_CODE (decl))
	      || !gimple_builtin_call_types_compatible_p (stmt, decl))
	    return false;

	  tree type = TREE_TYPE (gimple_call_lhs (stmt));
	  *rcode = DECL_FUNCTION_CODE (decl);
	  for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
	    {
	      tree arg = gimple_call_arg (stmt, i);
	      ops[i] = do_valueize (arg, top_valueize, valueized);
	    }
	  switch (gimple_call_num_args (stmt))
	    {
	    case 1:
	      return (gimple_resimplify1 (seq, rcode, type, ops, valueize)
		      || valueized);
	    case 2:
	      return (gimple_resimplify2 (seq, rcode, type, ops, valueize)
		      || valueized);
	    case 3:
	      return (gimple_resimplify3 (seq, rcode, type, ops, valueize)
		      || valueized);
	    default:
	     gcc_unreachable ();
	    }
	}
      break;

    case GIMPLE_COND:
      {
	tree lhs = gimple_cond_lhs (stmt);
	tree rhs = gimple_cond_rhs (stmt);
	bool valueized = false;
	lhs = do_valueize (lhs, top_valueize, valueized);
	rhs = do_valueize (rhs, top_valueize, valueized);
	*rcode = gimple_cond_code (stmt);
	ops[0] = lhs;
	ops[1] = rhs;
        return (gimple_resimplify2 (seq, rcode,
				    boolean_type_node, ops, valueize)
		|| valueized);
      }

    default:
      break;
    }

  return false;
}
Esempio n. 27
0
bool
gimple_simplify (gimple stmt,
		 code_helper *rcode, tree *ops,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  switch (gimple_code (stmt))
    {
    case GIMPLE_ASSIGN:
      {
	enum tree_code code = gimple_assign_rhs_code (stmt);
	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
	switch (gimple_assign_rhs_class (stmt))
	  {
	  case GIMPLE_SINGLE_RHS:
	    if (code == REALPART_EXPR
		|| code == IMAGPART_EXPR
		|| code == VIEW_CONVERT_EXPR)
	      {
		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
		if (valueize && TREE_CODE (op0) == SSA_NAME)
		  {
		    tree tem = valueize (op0);
		    if (tem)
		      op0 = tem;
		  }
		*rcode = code;
		ops[0] = op0;
		return gimple_resimplify1 (seq, rcode, type, ops, valueize);
	      }
	    else if (code == BIT_FIELD_REF)
	      {
		tree rhs1 = gimple_assign_rhs1 (stmt);
		tree op0 = TREE_OPERAND (rhs1, 0);
		if (valueize && TREE_CODE (op0) == SSA_NAME)
		  {
		    tree tem = valueize (op0);
		    if (tem)
		      op0 = tem;
		  }
		*rcode = code;
		ops[0] = op0;
		ops[1] = TREE_OPERAND (rhs1, 1);
		ops[2] = TREE_OPERAND (rhs1, 2);
		return gimple_resimplify3 (seq, rcode, type, ops, valueize);
	      }
	    else if (code == SSA_NAME
		     && valueize)
	      {
		tree op0 = gimple_assign_rhs1 (stmt);
		tree valueized = valueize (op0);
		if (!valueized || op0 == valueized)
		  return false;
		ops[0] = valueized;
		*rcode = TREE_CODE (op0);
		return true;
	      }
	    break;
	  case GIMPLE_UNARY_RHS:
	    {
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      if (valueize && TREE_CODE (rhs1) == SSA_NAME)
		{
		  tree tem = valueize (rhs1);
		  if (tem)
		    rhs1 = tem;
		}
	      *rcode = code;
	      ops[0] = rhs1;
	      return gimple_resimplify1 (seq, rcode, type, ops, valueize);
	    }
	  case GIMPLE_BINARY_RHS:
	    {
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      if (valueize && TREE_CODE (rhs1) == SSA_NAME)
		{
		  tree tem = valueize (rhs1);
		  if (tem)
		    rhs1 = tem;
		}
	      tree rhs2 = gimple_assign_rhs2 (stmt);
	      if (valueize && TREE_CODE (rhs2) == SSA_NAME)
		{
		  tree tem = valueize (rhs2);
		  if (tem)
		    rhs2 = tem;
		}
	      *rcode = code;
	      ops[0] = rhs1;
	      ops[1] = rhs2;
	      return gimple_resimplify2 (seq, rcode, type, ops, valueize);
	    }
	  case GIMPLE_TERNARY_RHS:
	    {
	      tree rhs1 = gimple_assign_rhs1 (stmt);
	      if (valueize && TREE_CODE (rhs1) == SSA_NAME)
		{
		  tree tem = valueize (rhs1);
		  if (tem)
		    rhs1 = tem;
		}
	      tree rhs2 = gimple_assign_rhs2 (stmt);
	      if (valueize && TREE_CODE (rhs2) == SSA_NAME)
		{
		  tree tem = valueize (rhs2);
		  if (tem)
		    rhs2 = tem;
		}
	      tree rhs3 = gimple_assign_rhs3 (stmt);
	      if (valueize && TREE_CODE (rhs3) == SSA_NAME)
		{
		  tree tem = valueize (rhs3);
		  if (tem)
		    rhs3 = tem;
		}
	      *rcode = code;
	      ops[0] = rhs1;
	      ops[1] = rhs2;
	      ops[2] = rhs3;
	      return gimple_resimplify3 (seq, rcode, type, ops, valueize);
	    }
	  default:
	    gcc_unreachable ();
	  }
	break;
      }

    case GIMPLE_CALL:
      /* ???  This way we can't simplify calls with side-effects.  */
      if (gimple_call_lhs (stmt) != NULL_TREE)
	{
	  tree fn = gimple_call_fn (stmt);
	  /* ???  Internal function support missing.  */
	  if (!fn)
	    return false;
	  if (valueize && TREE_CODE (fn) == SSA_NAME)
	    {
	      tree tem = valueize (fn);
	      if (tem)
		fn = tem;
	    }
	  if (!fn
	      || TREE_CODE (fn) != ADDR_EXPR
	      || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL
	      || DECL_BUILT_IN_CLASS (TREE_OPERAND (fn, 0)) != BUILT_IN_NORMAL
	      || !builtin_decl_implicit (DECL_FUNCTION_CODE (TREE_OPERAND (fn, 0)))
	      || !gimple_builtin_call_types_compatible_p (stmt,
							  TREE_OPERAND (fn, 0)))
	    return false;

	  tree decl = TREE_OPERAND (fn, 0);
	  tree type = TREE_TYPE (gimple_call_lhs (stmt));
	  switch (gimple_call_num_args (stmt))
	    {
	    case 1:
	      {
		tree arg1 = gimple_call_arg (stmt, 0);
		if (valueize && TREE_CODE (arg1) == SSA_NAME)
		  {
		    tree tem = valueize (arg1);
		    if (tem)
		      arg1 = tem;
		  }
		*rcode = DECL_FUNCTION_CODE (decl);
		ops[0] = arg1;
		return gimple_resimplify1 (seq, rcode, type, ops, valueize);
	      }
	    case 2:
	      {
		tree arg1 = gimple_call_arg (stmt, 0);
		if (valueize && TREE_CODE (arg1) == SSA_NAME)
		  {
		    tree tem = valueize (arg1);
		    if (tem)
		      arg1 = tem;
		  }
		tree arg2 = gimple_call_arg (stmt, 1);
		if (valueize && TREE_CODE (arg2) == SSA_NAME)
		  {
		    tree tem = valueize (arg2);
		    if (tem)
		      arg2 = tem;
		  }
		*rcode = DECL_FUNCTION_CODE (decl);
		ops[0] = arg1;
		ops[1] = arg2;
		return gimple_resimplify2 (seq, rcode, type, ops, valueize);
	      }
	    case 3:
	      {
		tree arg1 = gimple_call_arg (stmt, 0);
		if (valueize && TREE_CODE (arg1) == SSA_NAME)
		  {
		    tree tem = valueize (arg1);
		    if (tem)
		      arg1 = tem;
		  }
		tree arg2 = gimple_call_arg (stmt, 1);
		if (valueize && TREE_CODE (arg2) == SSA_NAME)
		  {
		    tree tem = valueize (arg2);
		    if (tem)
		      arg2 = tem;
		  }
		tree arg3 = gimple_call_arg (stmt, 2);
		if (valueize && TREE_CODE (arg3) == SSA_NAME)
		  {
		    tree tem = valueize (arg3);
		    if (tem)
		      arg3 = tem;
		  }
		*rcode = DECL_FUNCTION_CODE (decl);
		ops[0] = arg1;
		ops[1] = arg2;
		ops[2] = arg3;
		return gimple_resimplify3 (seq, rcode, type, ops, valueize);
	      }
	    default:
	      return false;
	    }
	}
      break;

    case GIMPLE_COND:
      {
	tree lhs = gimple_cond_lhs (stmt);
	if (valueize && TREE_CODE (lhs) == SSA_NAME)
	  {
	    tree tem = valueize (lhs);
	    if (tem)
	      lhs = tem;
	  }
	tree rhs = gimple_cond_rhs (stmt);
	if (valueize && TREE_CODE (rhs) == SSA_NAME)
	  {
	    tree tem = valueize (rhs);
	    if (tem)
	      rhs = tem;
	  }
	*rcode = gimple_cond_code (stmt);
	ops[0] = lhs;
	ops[1] = rhs;
        return gimple_resimplify2 (seq, rcode, boolean_type_node, ops, valueize);
      }

    default:
      break;
    }

  return false;
}
Esempio n. 28
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
         See if the former is preferred for jump tests and restore it
         if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
        {
          tree exp0 = TREE_OPERAND (exp, 0);
          rtx set_label, clr_label;

          /* Strip narrowing integral type conversions.  */
          while ((TREE_CODE (exp0) == NOP_EXPR
                  || TREE_CODE (exp0) == CONVERT_EXPR
                  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
                 && TREE_OPERAND (exp0, 0) != error_mark_node
                 && TYPE_PRECISION (TREE_TYPE (exp0))
                    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
            exp0 = TREE_OPERAND (exp0, 0);

          /* "exp0 ^ 1" inverts the sense of the single bit test.  */
          if (TREE_CODE (exp0) == BIT_XOR_EXPR
              && integer_onep (TREE_OPERAND (exp0, 1)))
            {
              exp0 = TREE_OPERAND (exp0, 0);
              clr_label = if_true_label;
              set_label = if_false_label;
            }
          else
            {
              clr_label = if_false_label;
              set_label = if_true_label;
            }

          if (TREE_CODE (exp0) == RSHIFT_EXPR)
            {
              tree arg = TREE_OPERAND (exp0, 0);
              tree shift = TREE_OPERAND (exp0, 1);
              tree argtype = TREE_TYPE (arg);
              if (TREE_CODE (shift) == INTEGER_CST
                  && compare_tree_int (shift, 0) >= 0
                  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
                  && prefer_and_bit_test (TYPE_MODE (argtype),
                                          TREE_INT_CST_LOW (shift)))
                {
                  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
                                       << TREE_INT_CST_LOW (shift);
                  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
                                   build_int_cst_type (argtype, mask)),
                           clr_label, set_label);
                  break;
                }
            }
        }

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (fold_convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case COND_EXPR:
      {
        rtx label1 = gen_label_rtx ();
        if (!if_true_label || !if_false_label)
          {
            drop_through_label = gen_label_rtx ();
            if (!if_true_label)
              if_true_label = drop_through_label;
            if (!if_false_label)
              if_false_label = drop_through_label;
          }

        do_pending_stack_adjust ();
        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
        do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        emit_label (label1);
        do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
        break;
      }

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
                != CODE_FOR_nothing))
          {
            do_jump (fold_convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      exp = build2 (NE_EXPR, TREE_TYPE (exp),
                    TREE_OPERAND (exp, 0),
                    TREE_OPERAND (exp, 1));
      /* FALLTHRU */
    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode1, tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode1 = UNORDERED_EXPR;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;
      case LTGT_EXPR:
        /* It is ok for LTGT_EXPR to trap when the result is unordered,
           so expand to (a < b) || (a > b).  */
        rcode1 = LTGT;
        tcode1 = LT_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into two comparisons.  */
            if (if_true_label == 0)
              drop_through_label = if_true_label = gen_label_rtx ();
              
            cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
            cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
            do_jump (cmp0, 0, if_true_label);
            do_jump (cmp1, if_false_label, if_true_label);
          }
      }
      break;

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_false_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_true_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)        and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
        tree fndecl = get_callee_fndecl (exp);
        tree arglist = TREE_OPERAND (exp, 1);

        if (fndecl
            && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
            && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
            && arglist != NULL_TREE
            && TREE_CHAIN (arglist) != NULL_TREE)
          {
            rtx seq = expand_builtin_expect_jump (exp, if_false_label,
                                                  if_true_label);

            if (seq != NULL_RTX)
              {
                emit_insn (seq);
                return;
              }
          }
      }
 
      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
        {
          /* Compare promoted variables in their promoted mode.  */
          if (SUBREG_PROMOTED_VAR_P (temp)
              && REG_P (XEXP (temp, 0)))
            temp = XEXP (temp, 0);
          else
            temp = copy_to_reg (temp);
        }
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
                               NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
                               GET_MODE (temp), NULL_RTX,
                               if_false_label, if_true_label);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Esempio n. 29
0
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  gimple *stmt = gsi_stmt (*gsi);

  gimple_set_block (stmt, data->block);

  switch (gimple_code (stmt))
    {
    case GIMPLE_BIND:
      lower_gimple_bind (gsi, data);
      /* Propagate fallthruness.  */
      return;

    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_SWITCH:
      data->cannot_fallthru = true;
      gsi_next (gsi);
      return;

    case GIMPLE_RETURN:
      if (data->cannot_fallthru)
	{
	  gsi_remove (gsi, false);
	  /* Propagate fallthruness.  */
	}
      else
	{
	  lower_gimple_return (gsi, data);
	  data->cannot_fallthru = true;
	}
      return;

    case GIMPLE_TRY:
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
	lower_try_catch (gsi, data);
      else
	{
	  /* It must be a GIMPLE_TRY_FINALLY.  */
	  bool cannot_fallthru;
	  lower_sequence (gimple_try_eval_ptr (stmt), data);
	  cannot_fallthru = data->cannot_fallthru;

	  /* The finally clause is always executed after the try clause,
	     so if it does not fall through, then the try-finally will not
	     fall through.  Otherwise, if the try clause does not fall
	     through, then when the finally clause falls through it will
	     resume execution wherever the try clause was going.  So the
	     whole try-finally will only fall through if both the try
	     clause and the finally clause fall through.  */
	  data->cannot_fallthru = false;
	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
	  data->cannot_fallthru |= cannot_fallthru;
	  gsi_next (gsi);
	}
      return;

    case GIMPLE_EH_ELSE:
      {
	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
      }
      break;

    case GIMPLE_DEBUG:
      gcc_checking_assert (cfun->debug_nonbind_markers);
      /* We can't possibly have debug bind stmts before lowering, we
	 first emit them when entering SSA.  */
      gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
      /* Propagate fallthruness.  */
      /* If the function (e.g. from PCH) had debug stmts, but they're
	 disabled for this compilation, remove them.  */
      if (!MAY_HAVE_DEBUG_MARKER_STMTS)
	gsi_remove (gsi, true);
      else
	gsi_next (gsi);
      return;

    case GIMPLE_NOP:
    case GIMPLE_ASM:
    case GIMPLE_ASSIGN:
    case GIMPLE_PREDICT:
    case GIMPLE_LABEL:
    case GIMPLE_EH_MUST_NOT_THROW:
    case GIMPLE_OMP_FOR:
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SECTIONS_SWITCH:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_TASKGROUP:
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_CRITICAL:
    case GIMPLE_OMP_RETURN:
    case GIMPLE_OMP_ATOMIC_LOAD:
    case GIMPLE_OMP_ATOMIC_STORE:
    case GIMPLE_OMP_CONTINUE:
      break;

    case GIMPLE_CALL:
      {
	tree decl = gimple_call_fndecl (stmt);
	unsigned i;

	for (i = 0; i < gimple_call_num_args (stmt); i++)
	  {
	    tree arg = gimple_call_arg (stmt, i);
	    if (EXPR_P (arg))
	      TREE_SET_BLOCK (arg, data->block);
	  }

	if (decl
	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
	  {
	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
	      {
		lower_builtin_setjmp (gsi);
		data->cannot_fallthru = false;
		return;
	      }
	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
		     && flag_tree_bit_ccp
		     && gimple_builtin_call_types_compatible_p (stmt, decl))
	      {
		lower_builtin_posix_memalign (gsi);
		return;
	      }
	  }

	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
	  {
	    data->cannot_fallthru = true;
	    gsi_next (gsi);
	    return;
	  }
      }
      break;

    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
    case GIMPLE_OMP_TARGET:
    case GIMPLE_OMP_TEAMS:
    case GIMPLE_OMP_GRID_BODY:
      data->cannot_fallthru = false;
      lower_omp_directive (gsi, data);
      data->cannot_fallthru = false;
      return;

    case GIMPLE_TRANSACTION:
      lower_sequence (gimple_transaction_body_ptr (
			as_a <gtransaction *> (stmt)),
		      data);
      break;

    default:
      gcc_unreachable ();
    }

  data->cannot_fallthru = false;
  gsi_next (gsi);
}
Esempio n. 30
-1
void
streamer_write_builtin (struct output_block *ob, tree expr)
{
  gcc_assert (streamer_handle_as_builtin_p (expr));

  if (DECL_BUILT_IN_CLASS (expr) == BUILT_IN_MD
      && !targetm.builtin_decl)
    sorry ("tree bytecode streams do not support machine specific builtin "
	   "functions on this target");

  streamer_write_record_start (ob, LTO_builtin_decl);
  streamer_write_enum (ob->main_stream, built_in_class, BUILT_IN_LAST,
		       DECL_BUILT_IN_CLASS (expr));
  streamer_write_uhwi (ob, DECL_FUNCTION_CODE (expr));

  if (DECL_ASSEMBLER_NAME_SET_P (expr))
    {
      /* When the assembler name of a builtin gets a user name,
	 the new name is always prefixed with '*' by
	 set_builtin_user_assembler_name.  So, to prevent the
	 reader side from adding a second '*', we omit it here.  */
      const char *str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (expr));
      if (strlen (str) > 1 && str[0] == '*')
	streamer_write_string (ob, ob->main_stream, &str[1], true);
      else
	streamer_write_string (ob, ob->main_stream, NULL, true);
    }
  else
    streamer_write_string (ob, ob->main_stream, NULL, true);
}