Esempio n. 1
0
static void
add_subscript_strides (tree access_fn, unsigned stride,
		       HOST_WIDE_INT *strides, unsigned n, struct loop *loop)
{
  struct loop *aloop;
  tree step;
  HOST_WIDE_INT astep;
  unsigned min_depth = loop_depth (loop) - n;

  while (TREE_CODE (access_fn) == POLYNOMIAL_CHREC)
    {
      aloop = get_chrec_loop (access_fn);
      step = CHREC_RIGHT (access_fn);
      access_fn = CHREC_LEFT (access_fn);

      if ((unsigned) loop_depth (aloop) <= min_depth)
	continue;

      if (host_integerp (step, 0))
	astep = tree_low_cst (step, 0);
      else
	astep = L1_CACHE_LINE_SIZE;

      strides[n - 1 - loop_depth (loop) + loop_depth (aloop)] += astep * stride;

    }
}
Esempio n. 2
0
static void
add_stack_var (tree decl)
{
  if (stack_vars_num >= stack_vars_alloc)
    {
      if (stack_vars_alloc)
	stack_vars_alloc = stack_vars_alloc * 3 / 2;
      else
	stack_vars_alloc = 32;
      stack_vars
	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
    }
  stack_vars[stack_vars_num].decl = decl;
  stack_vars[stack_vars_num].offset = 0;
  stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
  stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);

  /* All variables are initially in their own partition.  */
  stack_vars[stack_vars_num].representative = stack_vars_num;
  stack_vars[stack_vars_num].next = EOC;

  /* Ensure that this decl doesn't get put onto the list twice.  */
  SET_DECL_RTL (decl, pc_rtx);

  stack_vars_num++;
}
Esempio n. 3
0
static unsigned short
get_ubsan_type_info_for_type (tree type)
{
  gcc_assert (TYPE_SIZE (type) && host_integerp (TYPE_SIZE (type), 1));
  int prec = exact_log2 (tree_low_cst (TYPE_SIZE (type), 1));
  gcc_assert (prec != -1);
  return (prec << 1) | !TYPE_UNSIGNED (type);
}
Esempio n. 4
0
static tree
compute_object_offset (tree expr, tree var)
{
  enum tree_code code = PLUS_EXPR;
  tree base, off, t;

  if (expr == var)
    return size_zero_node;

  switch (TREE_CODE (expr))
    {
    case COMPONENT_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
			size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
				  / BITS_PER_UNIT));
      break;

    case REALPART_EXPR:
    case NOP_EXPR:
    case CONVERT_EXPR:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
      return compute_object_offset (TREE_OPERAND (expr, 0), var);

    case IMAGPART_EXPR:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
      break;

    case ARRAY_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
	{
	  code = MINUS_EXPR;
	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
	}
      t = convert (sizetype, t);
      off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
      break;

    default:
      return error_mark_node;
    }

  return size_binop (code, base, off);
}
Esempio n. 5
0
static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple call, int object_size_type)
{
  tree callee, bytes = NULL_TREE;
  tree alloc_size;
  int arg1 = -1, arg2 = -1;

  gcc_assert (is_gimple_call (call));

  callee = gimple_call_fndecl (call);
  if (!callee)
    return unknown[object_size_type];

  alloc_size = lookup_attribute ("alloc_size",
				 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
  if (alloc_size && TREE_VALUE (alloc_size))
    {
      tree p = TREE_VALUE (alloc_size);

      arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
      if (TREE_CHAIN (p))
        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
    }

  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_CALLOC:
	arg2 = 1;
	/* fall through */
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
      case BUILT_IN_ALLOCA_WITH_ALIGN:
	arg1 = 0;
      default:
	break;
      }

  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
      || (arg2 >= 0
	  && (arg2 >= (int)gimple_call_num_args (call)
	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
    return unknown[object_size_type];

  if (arg2 >= 0)
    bytes = size_binop (MULT_EXPR,
	fold_convert (sizetype, gimple_call_arg (call, arg1)),
	fold_convert (sizetype, gimple_call_arg (call, arg2)));
  else if (arg1 >= 0)
    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));

  if (bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Esempio n. 6
0
/* Initialize OFFSET_LIMIT variable.  */
static void
init_offset_limit (void)
{
  if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
    offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
  else
    offset_limit = -1;
  offset_limit /= 2;
}
Esempio n. 7
0
static void
expand_one_stack_var (tree var)
{
  HOST_WIDE_INT size, offset, align;

  size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
  align = get_decl_align_unit (var);
  offset = alloc_stack_frame_space (size, align);

  expand_one_stack_var_at (var, offset);
}
Uint
UI_From_gnu (tree Input)
{
  tree gnu_type = TREE_TYPE (Input), gnu_base, gnu_temp;
  /* UI_Base is defined so that 5 Uint digits is sufficient to hold the
     largest possible signed 64-bit value.  */
  const int Max_For_Dint = 5;
  int v[Max_For_Dint], i;
  Vector_Template temp;
  Int_Vector vec;

#if HOST_BITS_PER_WIDE_INT == 64
  /* On 64-bit hosts, host_integerp tells whether the input fits in a
     signed 64-bit integer.  Then a truncation tells whether it fits
     in a signed 32-bit integer.  */
  if (host_integerp (Input, 0))
    {
      HOST_WIDE_INT hw_input = TREE_INT_CST_LOW (Input);
      if (hw_input == (int) hw_input)
	return UI_From_Int (hw_input);
    }
  else
    return No_Uint;
#else
  /* On 32-bit hosts, host_integerp tells whether the input fits in a
     signed 32-bit integer.  Then a sign test tells whether it fits
     in a signed 64-bit integer.  */
  if (host_integerp (Input, 0))
    return UI_From_Int (TREE_INT_CST_LOW (Input));
  else if (TREE_INT_CST_HIGH (Input) < 0
	   && TYPE_UNSIGNED (gnu_type)
	   && !(TREE_CODE (gnu_type) == INTEGER_TYPE
		&& TYPE_IS_SIZETYPE (gnu_type)))
    return No_Uint;
#endif

  gnu_base = build_int_cst (gnu_type, UI_Base);
  gnu_temp = Input;

  for (i = Max_For_Dint - 1; i >= 0; i--)
    {
      v[i] = tree_low_cst (fold_build1 (ABS_EXPR, gnu_type,
					fold_build2 (TRUNC_MOD_EXPR, gnu_type,
						     gnu_temp, gnu_base)),
			   0);
      gnu_temp = fold_build2 (TRUNC_DIV_EXPR, gnu_type, gnu_temp, gnu_base);
    }

  temp.Low_Bound = 1, temp.High_Bound = Max_For_Dint;
  vec.Array = v, vec.Bounds = &temp;
  return Vector_To_Uint (vec, tree_int_cst_sgn (Input) < 0);
}
Esempio n. 9
0
static void
create_temp_arrays (struct switch_conv_info *info)
{
  int i;

  info->default_values = XCNEWVEC (tree, info->phi_count * 3);
  info->constructors = XCNEWVEC (VEC (constructor_elt, gc) *, info->phi_count);
  info->target_inbound_names = info->default_values + info->phi_count;
  info->target_outbound_names = info->target_inbound_names + info->phi_count;
  for (i = 0; i < info->phi_count; i++)
    info->constructors[i]
      = VEC_alloc (constructor_elt, gc, tree_low_cst (info->range_size, 1) + 1);
}
Esempio n. 10
0
static void
pp_c_character_constant (c_pretty_printer *pp, tree c)
{
  tree type = TREE_TYPE (c);
  if (type == wchar_type_node)
    pp_character (pp, 'L');
  pp_quote (pp);
  if (host_integerp (c, TYPE_UNSIGNED (type)))
    pp_c_char (pp, tree_low_cst (c, TYPE_UNSIGNED (type)));
  else
    pp_scalar (pp, "\\x%x", (unsigned) TREE_INT_CST_LOW (c));
  pp_quote (pp);
}
Esempio n. 11
0
void
pp_c_direct_abstract_declarator (c_pretty_printer *pp, tree t)
{
  switch (TREE_CODE (t))
    {
    case POINTER_TYPE:
	/* APPLE LOCAL blocks */
	case BLOCK_POINTER_TYPE:
      pp_abstract_declarator (pp, t);
      break;

    case FUNCTION_TYPE:
      pp_c_parameter_type_list (pp, t);
      pp_direct_abstract_declarator (pp, TREE_TYPE (t));
      break;

    case ARRAY_TYPE:
      pp_c_left_bracket (pp);
      if (TYPE_DOMAIN (t) && TYPE_MAX_VALUE (TYPE_DOMAIN (t)))
	{
	  tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (t));
	  tree type = TREE_TYPE (maxval);

	  if (host_integerp (maxval, 0))
	    pp_wide_integer (pp, tree_low_cst (maxval, 0) + 1);
	  else
	    pp_expression (pp, fold (build2 (PLUS_EXPR, type, maxval,
					     build_int_cst (type, 1))));
	}
      pp_c_right_bracket (pp);
      pp_direct_abstract_declarator (pp, TREE_TYPE (t));
      break;

    case IDENTIFIER_NODE:
    case VOID_TYPE:
    case BOOLEAN_TYPE:
    case INTEGER_TYPE:
    case REAL_TYPE:
    case ENUMERAL_TYPE:
    case RECORD_TYPE:
    case UNION_TYPE:
    case VECTOR_TYPE:
    case COMPLEX_TYPE:
    case TYPE_DECL:
      break;

    default:
      pp_unsupported_tree (pp, t);
      break;
    }
}
Esempio n. 12
0
static unsigned HOST_WIDE_INT
alloc_object_size (tree call, int object_size_type)
{
  tree callee, arglist, a, bytes = NULL_TREE;
  unsigned int arg_mask = 0;

  gcc_assert (TREE_CODE (call) == CALL_EXPR);

  callee = get_callee_fndecl (call);
  arglist = TREE_OPERAND (call, 1);
  if (callee
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
	arg_mask = 1;
	break;
      /*
      case BUILT_IN_REALLOC:
	arg_mask = 2;
	break;
	*/
      case BUILT_IN_CALLOC:
	arg_mask = 3;
	break;
      default:
	break;
      }

  for (a = arglist; arg_mask && a; arg_mask >>= 1, a = TREE_CHAIN (a))
    if (arg_mask & 1)
      {
	tree arg = TREE_VALUE (a);

	if (TREE_CODE (arg) != INTEGER_CST)
	  break;

	if (! bytes)
	  bytes = fold_convert (sizetype, arg);
	else
	  bytes = size_binop (MULT_EXPR, bytes,
			      fold_convert (sizetype, arg));
      }

  if (! arg_mask && bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
static unsigned int instrument_assignments_plugin_exec(void)
{
#ifdef DEBUG
    fprintf(stderr, "* Inspecting function `%s'\n", FN_NAME);
#endif

    basic_block bb;
    FOR_EACH_BB(bb) {
        gimple_stmt_iterator gsi;
        for (gsi = gsi_start_bb(bb) ; !gsi_end_p(gsi) ; gsi_next(&gsi)) {
            gimple curr_stmt = gsi_stmt(gsi);
            tree lhs = gimple_get_lhs(curr_stmt);

            // We only care about assignments to “real” variables – i.e. not
            // variable versions that were created as part of the transformation
            // to SSA form.
            if (gimple_code(curr_stmt) == GIMPLE_ASSIGN
                    && lhs != NULL_TREE && TREE_CODE(lhs) != SSA_NAME && DECL_P(lhs)) {

                tree attrlist = DECL_ATTRIBUTES(lhs);
                tree attr = lookup_attribute("instrument", attrlist);

                // the princess is in another castle
                if (attr == NULL_TREE) continue;

                // read the variable id that was passed to the `instrument'
                // attribute
                const_tree arg = TREE_VALUE(TREE_VALUE(attr));
                tree var_id = build_int_cst(NULL_TREE, tree_low_cst (arg, 1));

#ifdef DEBUG
                fprintf(stderr, "  > found assignment to instrumented variable `%s': \n\t", get_name(lhs));
                print_gimple_stmt(stderr, curr_stmt, 0, 0);
                fprintf(stderr, "\tbase address of `%s' is %p\n", get_name(lhs), get_base_address(lhs));
#endif

                // insert our instrumentation function before the current
                // statement and pass along the rhs (i.e. the new value)
                tree rhs = gimple_op(curr_stmt, 1);
                insert_instrumentation_fn(curr_stmt, var_id, rhs);
            }
        }
    }
#ifdef DEBUG
    fprintf(stderr, "\n");
#endif

    return 0;
}
static void
create_temp_arrays (void)
{
  int i;

  info.default_values = (tree *) xcalloc (info.phi_count, sizeof (tree));
  info.constructors = (VEC (constructor_elt, gc) **) xcalloc (info.phi_count,
							      sizeof (tree));
  info.target_inbound_names = (tree *) xcalloc (info.phi_count, sizeof (tree));
  info.target_outbound_names = (tree *) xcalloc (info.phi_count,
						 sizeof (tree));

  for (i = 0; i < info.phi_count; i++)
    info.constructors[i]
      = VEC_alloc (constructor_elt, gc, tree_low_cst (info.range_size, 1) + 1);
}
static bool
check_range (gimple swtch)
{
  tree min_case, max_case;
  unsigned int branch_num = gimple_switch_num_labels (swtch);
  tree range_max;

  /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
     is a default label which is the last in the vector.  */

  min_case = gimple_switch_label (swtch, 1);
  info.range_min = CASE_LOW (min_case);

  gcc_assert (branch_num > 1);
  gcc_assert (CASE_LOW (gimple_switch_label (swtch, 0)) == NULL_TREE);
  max_case = gimple_switch_label (swtch, branch_num - 1);
  if (CASE_HIGH (max_case) != NULL_TREE)
    range_max = CASE_HIGH (max_case);
  else
    range_max = CASE_LOW (max_case);

  gcc_assert (info.range_min);
  gcc_assert (range_max);

  info.range_size = int_const_binop (MINUS_EXPR, range_max, info.range_min, 0);

  gcc_assert (info.range_size);
  if (!host_integerp (info.range_size, 1))
    {
      info.reason = "index range way too large or otherwise unusable.\n";
      return false;
    }

  if ((unsigned HOST_WIDE_INT) tree_low_cst (info.range_size, 1)
      > ((unsigned) branch_num * SWITCH_CONVERSION_BRANCH_RATIO))
    {
      info.reason = "the maximum range-branch ratio exceeded.\n";
      return false;
    }

  return true;
}
Esempio n. 16
0
static bool
defer_stack_allocation (tree var, bool toplevel)
{
  /* Variables in the outermost scope automatically conflict with
     every other variable.  The only reason to want to defer them
     at all is that, after sorting, we can more efficiently pack
     small variables in the stack frame.  Continue to defer at -O2.  */
  if (toplevel && optimize < 2)
    return false;

  /* Without optimization, *most* variables are allocated from the
     stack, which makes the quadratic problem large exactly when we
     want compilation to proceed as quickly as possible.  On the 
     other hand, we don't want the function's stack frame size to
     get completely out of hand.  So we avoid adding scalars and
     "small" aggregates to the list at all.  */
  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
    return false;

  return true;
}
Esempio n. 17
0
static bool
try_unroll_loop_completely (struct loop *loop,
			    edge exit, tree niter,
			    enum unroll_level ul,
			    HOST_WIDE_INT maxiter,
			    location_t locus)
{
  unsigned HOST_WIDE_INT n_unroll, ninsns, max_unroll, unr_insns;
  gimple cond;
  struct loop_size size;
  bool n_unroll_found = false;
  edge edge_to_cancel = NULL;

  /* See if we proved number of iterations to be low constant.

     EXIT is an edge that will be removed in all but last iteration of 
     the loop.

     EDGE_TO_CACNEL is an edge that will be removed from the last iteration
     of the unrolled sequence and is expected to make the final loop not
     rolling. 

     If the number of execution of loop is determined by standard induction
     variable test, then EXIT and EDGE_TO_CANCEL are the two edges leaving
     from the iv test.  */
  if (host_integerp (niter, 1))
    {
      n_unroll = tree_low_cst (niter, 1);
      n_unroll_found = true;
      edge_to_cancel = EDGE_SUCC (exit->src, 0);
      if (edge_to_cancel == exit)
	edge_to_cancel = EDGE_SUCC (exit->src, 1);
    }
  /* We do not know the number of iterations and thus we can not eliminate
     the EXIT edge.  */
  else
    exit = NULL;

  /* See if we can improve our estimate by using recorded loop bounds.  */
  if (maxiter >= 0
      && (!n_unroll_found || (unsigned HOST_WIDE_INT)maxiter < n_unroll))
    {
      n_unroll = maxiter;
      n_unroll_found = true;
      /* Loop terminates before the IV variable test, so we can not
	 remove it in the last iteration.  */
      edge_to_cancel = NULL;
    }

  if (!n_unroll_found)
    return false;

  max_unroll = PARAM_VALUE (PARAM_MAX_COMPLETELY_PEEL_TIMES);
  if (n_unroll > max_unroll)
    return false;

  if (!edge_to_cancel)
    edge_to_cancel = loop_edge_to_cancel (loop);

  if (n_unroll)
    {
      sbitmap wont_exit;
      edge e;
      unsigned i;
      bool large;
      vec<edge> to_remove = vNULL;
      if (ul == UL_SINGLE_ITER)
	return false;

      large = tree_estimate_loop_size
		 (loop, exit, edge_to_cancel, &size,
		  PARAM_VALUE (PARAM_MAX_COMPLETELY_PEELED_INSNS));
      ninsns = size.overall;
      if (large)
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: it is too large.\n",
		     loop->num);
	  return false;
	}

      unr_insns = estimated_unrolled_size (&size, n_unroll);
      if (dump_file && (dump_flags & TDF_DETAILS))
	{
	  fprintf (dump_file, "  Loop size: %d\n", (int) ninsns);
	  fprintf (dump_file, "  Estimated size after unrolling: %d\n",
		   (int) unr_insns);
	}

      /* If the code is going to shrink, we don't need to be extra cautious
	 on guessing if the unrolling is going to be profitable.  */
      if (unr_insns
	  /* If there is IV variable that will become constant, we save
	     one instruction in the loop prologue we do not account
	     otherwise.  */
	  <= ninsns + (size.constant_iv != false))
	;
      /* We unroll only inner loops, because we do not consider it profitable
	 otheriwse.  We still can cancel loopback edge of not rolling loop;
	 this is always a good idea.  */
      else if (ul == UL_NO_GROWTH)
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: size would grow.\n",
		     loop->num);
	  return false;
	}
      /* Outer loops tend to be less interesting candidates for complette
	 unrolling unless we can do a lot of propagation into the inner loop
	 body.  For now we disable outer loop unrolling when the code would
	 grow.  */
      else if (loop->inner)
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: "
		     "it is not innermost and code would grow.\n",
		     loop->num);
	  return false;
	}
      /* If there is call on a hot path through the loop, then
	 there is most probably not much to optimize.  */
      else if (size.num_non_pure_calls_on_hot_path)
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: "
		     "contains call and code would grow.\n",
		     loop->num);
	  return false;
	}
      /* If there is pure/const call in the function, then we
	 can still optimize the unrolled loop body if it contains
	 some other interesting code than the calls and code
	 storing or cumulating the return value.  */
      else if (size.num_pure_calls_on_hot_path
	       /* One IV increment, one test, one ivtmp store
		  and one usefull stmt.  That is about minimal loop
		  doing pure call.  */
	       && (size.non_call_stmts_on_hot_path
		   <= 3 + size.num_pure_calls_on_hot_path))
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: "
		     "contains just pure calls and code would grow.\n",
		     loop->num);
	  return false;
	}
      /* Complette unrolling is major win when control flow is removed and
	 one big basic block is created.  If the loop contains control flow
	 the optimization may still be a win because of eliminating the loop
	 overhead but it also may blow the branch predictor tables.
	 Limit number of branches on the hot path through the peeled
	 sequence.  */
      else if (size.num_branches_on_hot_path * (int)n_unroll
	       > PARAM_VALUE (PARAM_MAX_PEEL_BRANCHES))
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: "
		     " number of branches on hot path in the unrolled sequence"
		     " reach --param max-peel-branches limit.\n",
		     loop->num);
	  return false;
	}
      else if (unr_insns
	       > (unsigned) PARAM_VALUE (PARAM_MAX_COMPLETELY_PEELED_INSNS))
	{
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Not unrolling loop %d: "
		     "(--param max-completely-peeled-insns limit reached).\n",
		     loop->num);
	  return false;
	}

      initialize_original_copy_tables ();
      wont_exit = sbitmap_alloc (n_unroll + 1);
      bitmap_ones (wont_exit);
      bitmap_clear_bit (wont_exit, 0);

      if (!gimple_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
						 n_unroll, wont_exit,
						 exit, &to_remove,
						 DLTHE_FLAG_UPDATE_FREQ
						 | DLTHE_FLAG_COMPLETTE_PEEL))
	{
          free_original_copy_tables ();
	  free (wont_exit);
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    fprintf (dump_file, "Failed to duplicate the loop\n");
	  return false;
	}

      FOR_EACH_VEC_ELT (to_remove, i, e)
	{
	  bool ok = remove_path (e);
	  gcc_assert (ok);
	}

      to_remove.release ();
      free (wont_exit);
      free_original_copy_tables ();
    }
Esempio n. 18
0
static unsigned
self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n,
		     struct loop *loop)
{
  tree stride, access_fn;
  HOST_WIDE_INT *strides, astride;
  VEC (tree, heap) *access_fns;
  tree ref = DR_REF (dr);
  unsigned i, ret = ~0u;

  /* In the following example:

     for (i = 0; i < N; i++)
       for (j = 0; j < N; j++)
         use (a[j][i]);
     the same cache line is accessed each N steps (except if the change from
     i to i + 1 crosses the boundary of the cache line).  Thus, for self-reuse,
     we cannot rely purely on the results of the data dependence analysis.

     Instead, we compute the stride of the reference in each loop, and consider
     the innermost loop in that the stride is less than cache size.  */

  strides = XCNEWVEC (HOST_WIDE_INT, n);
  access_fns = DR_ACCESS_FNS (dr);

  for (i = 0; VEC_iterate (tree, access_fns, i, access_fn); i++)
    {
      /* Keep track of the reference corresponding to the subscript, so that we
	 know its stride.  */
      while (handled_component_p (ref) && TREE_CODE (ref) != ARRAY_REF)
	ref = TREE_OPERAND (ref, 0);
      
      if (TREE_CODE (ref) == ARRAY_REF)
	{
	  stride = TYPE_SIZE_UNIT (TREE_TYPE (ref));
	  if (host_integerp (stride, 1))
	    astride = tree_low_cst (stride, 1);
	  else
	    astride = L1_CACHE_LINE_SIZE;

	  ref = TREE_OPERAND (ref, 0);
	}
      else
	astride = 1;

      add_subscript_strides (access_fn, astride, strides, n, loop);
    }

  for (i = n; i-- > 0; )
    {
      unsigned HOST_WIDE_INT s;

      s = strides[i] < 0 ?  -strides[i] : strides[i];

      if (s < (unsigned) L1_CACHE_LINE_SIZE
	  && (loop_sizes[i]
	      > (unsigned) (L1_CACHE_SIZE_BYTES / NONTEMPORAL_FRACTION)))
	{
	  ret = loop_sizes[i];
	  break;
	}
    }

  free (strides);
  return ret;
}
Esempio n. 19
0
static void
emit_case_bit_tests (gimple swtch, tree index_expr,
                     tree minval, tree range)
{
    struct case_bit_test test[MAX_CASE_BIT_TESTS];
    unsigned int i, j, k;
    unsigned int count;

    basic_block switch_bb = gimple_bb (swtch);
    basic_block default_bb, new_default_bb, new_bb;
    edge default_edge;
    bool update_dom = dom_info_available_p (CDI_DOMINATORS);

    vec<basic_block> bbs_to_fix_dom = vNULL;

    tree index_type = TREE_TYPE (index_expr);
    tree unsigned_index_type = unsigned_type_for (index_type);
    unsigned int branch_num = gimple_switch_num_labels (swtch);

    gimple_stmt_iterator gsi;
    gimple shift_stmt;

    tree idx, tmp, csui;
    tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
    tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
    tree word_mode_one = fold_convert (word_type_node, integer_one_node);

    memset (&test, 0, sizeof (test));

    /* Get the edge for the default case.  */
    tmp = gimple_switch_default_label (swtch);
    default_bb = label_to_block (CASE_LABEL (tmp));
    default_edge = find_edge (switch_bb, default_bb);

    /* Go through all case labels, and collect the case labels, profile
       counts, and other information we need to build the branch tests.  */
    count = 0;
    for (i = 1; i < branch_num; i++)
    {
        unsigned int lo, hi;
        tree cs = gimple_switch_label (swtch, i);
        tree label = CASE_LABEL (cs);
        edge e = find_edge (switch_bb, label_to_block (label));
        for (k = 0; k < count; k++)
            if (e == test[k].target_edge)
                break;

        if (k == count)
        {
            gcc_checking_assert (count < MAX_CASE_BIT_TESTS);
            test[k].hi = 0;
            test[k].lo = 0;
            test[k].target_edge = e;
            test[k].label = label;
            test[k].bits = 1;
            count++;
        }
        else
            test[k].bits++;

        lo = tree_low_cst (int_const_binop (MINUS_EXPR,
                                            CASE_LOW (cs), minval),
                           1);
        if (CASE_HIGH (cs) == NULL_TREE)
            hi = lo;
        else
            hi = tree_low_cst (int_const_binop (MINUS_EXPR,
                                                CASE_HIGH (cs), minval),
                               1);

        for (j = lo; j <= hi; j++)
            if (j >= HOST_BITS_PER_WIDE_INT)
                test[k].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
            else
                test[k].lo |= (HOST_WIDE_INT) 1 << j;
    }

    qsort (test, count, sizeof(*test), case_bit_test_cmp);

    /* We generate two jumps to the default case label.
       Split the default edge, so that we don't have to do any PHI node
       updating.  */
    new_default_bb = split_edge (default_edge);

    if (update_dom)
    {
        bbs_to_fix_dom.create (10);
        bbs_to_fix_dom.quick_push (switch_bb);
        bbs_to_fix_dom.quick_push (default_bb);
        bbs_to_fix_dom.quick_push (new_default_bb);
    }

    /* Now build the test-and-branch code.  */

    gsi = gsi_last_bb (switch_bb);

    /* idx = (unsigned)x - minval.  */
    idx = fold_convert (unsigned_index_type, index_expr);
    idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
                       fold_convert (unsigned_index_type, minval));
    idx = force_gimple_operand_gsi (&gsi, idx,
                                    /*simple=*/true, NULL_TREE,
                                    /*before=*/true, GSI_SAME_STMT);

    /* if (idx > range) goto default */
    range = force_gimple_operand_gsi (&gsi,
                                      fold_convert (unsigned_index_type, range),
                                      /*simple=*/true, NULL_TREE,
                                      /*before=*/true, GSI_SAME_STMT);
    tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
    new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom);
    if (update_dom)
        bbs_to_fix_dom.quick_push (new_bb);
    gcc_assert (gimple_bb (swtch) == new_bb);
    gsi = gsi_last_bb (new_bb);

    /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors
       of NEW_BB, are still immediately dominated by SWITCH_BB.  Make it so.  */
    if (update_dom)
    {
        vec<basic_block> dom_bbs;
        basic_block dom_son;

        dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb);
        FOR_EACH_VEC_ELT (dom_bbs, i, dom_son)
        {
            edge e = find_edge (new_bb, dom_son);
            if (e && single_pred_p (e->dest))
                continue;
            set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb);
            bbs_to_fix_dom.safe_push (dom_son);
        }
        dom_bbs.release ();
    }
Esempio n. 20
0
static unsigned HOST_WIDE_INT
addr_object_size (tree ptr, int object_size_type)
{
  tree pt_var;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  if (REFERENCE_CLASS_P (pt_var))
    pt_var = get_base_address (pt_var);

  if (pt_var
      && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
      && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
      && (unsigned HOST_WIDE_INT)
	 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
    {
      tree bytes;

      if (pt_var != TREE_OPERAND (ptr, 0))
	{
	  tree var;

	  if (object_size_type & 1)
	    {
	      var = TREE_OPERAND (ptr, 0);

	      while (var != pt_var
		      && TREE_CODE (var) != BIT_FIELD_REF
		      && TREE_CODE (var) != COMPONENT_REF
		      && TREE_CODE (var) != ARRAY_REF
		      && TREE_CODE (var) != ARRAY_RANGE_REF
		      && TREE_CODE (var) != REALPART_EXPR
		      && TREE_CODE (var) != IMAGPART_EXPR)
		var = TREE_OPERAND (var, 0);
	      if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
		var = TREE_OPERAND (var, 0);
	      if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
		  || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
		  || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
				      TYPE_SIZE_UNIT (TREE_TYPE (var))))
		var = pt_var;
	    }
	  else
	    var = pt_var;

	  bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
	  if (bytes != error_mark_node)
	    {
	      if (TREE_CODE (bytes) == INTEGER_CST
		  && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
		bytes = size_zero_node;
	      else
		bytes = size_binop (MINUS_EXPR,
				    TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
	    }
	}
      else
	bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));

      if (host_integerp (bytes, 1))
	return tree_low_cst (bytes, 1);
    }

  return unknown[object_size_type];
}
Esempio n. 21
0
void
use_thunk (tree thunk_fndecl, bool emit_p)
{
    tree a, t, function, alias;
    tree virtual_offset;
    HOST_WIDE_INT fixed_offset, virtual_value;
    bool this_adjusting = DECL_THIS_THUNK_P (thunk_fndecl);

    /* We should have called finish_thunk to give it a name.  */
    gcc_assert (DECL_NAME (thunk_fndecl));

    /* We should never be using an alias, always refer to the
       aliased thunk.  */
    gcc_assert (!THUNK_ALIAS (thunk_fndecl));

    if (TREE_ASM_WRITTEN (thunk_fndecl))
        return;

    function = THUNK_TARGET (thunk_fndecl);
    if (DECL_RESULT (thunk_fndecl))
        /* We already turned this thunk into an ordinary function.
           There's no need to process this thunk again.  */
        return;

    if (DECL_THUNK_P (function))
        /* The target is itself a thunk, process it now.  */
        use_thunk (function, emit_p);

    /* Thunks are always addressable; they only appear in vtables.  */
    TREE_ADDRESSABLE (thunk_fndecl) = 1;

    /* Figure out what function is being thunked to.  It's referenced in
       this translation unit.  */
    TREE_ADDRESSABLE (function) = 1;
    mark_used (function);
    if (!emit_p)
        return;

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function))
        alias = make_alias_for_thunk (function);
    else
        alias = function;

    fixed_offset = THUNK_FIXED_OFFSET (thunk_fndecl);
    virtual_offset = THUNK_VIRTUAL_OFFSET (thunk_fndecl);

    if (virtual_offset)
    {
        if (!this_adjusting)
            virtual_offset = BINFO_VPTR_FIELD (virtual_offset);
        virtual_value = tree_low_cst (virtual_offset, /*pos=*/0);
        gcc_assert (virtual_value);
    }
    else
        virtual_value = 0;

    /* And, if we need to emit the thunk, it's used.  */
    mark_used (thunk_fndecl);
    /* This thunk is actually defined.  */
    DECL_EXTERNAL (thunk_fndecl) = 0;
    /* The linkage of the function may have changed.  FIXME in linkage
       rewrite.  */
    TREE_PUBLIC (thunk_fndecl) = TREE_PUBLIC (function);
    DECL_VISIBILITY (thunk_fndecl) = DECL_VISIBILITY (function);
    DECL_VISIBILITY_SPECIFIED (thunk_fndecl)
        = DECL_VISIBILITY_SPECIFIED (function);
    if (DECL_ONE_ONLY (function))
        make_decl_one_only (thunk_fndecl);

    if (flag_syntax_only)
    {
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
        return;
    }

    push_to_top_level ();

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function)
            && targetm.have_named_sections)
    {
        resolve_unique_section (function, 0, flag_function_sections);

        if (DECL_SECTION_NAME (function) != NULL && DECL_ONE_ONLY (function))
        {
            resolve_unique_section (thunk_fndecl, 0, flag_function_sections);

            /* Output the thunk into the same section as function.  */
            DECL_SECTION_NAME (thunk_fndecl) = DECL_SECTION_NAME (function);
        }
    }

    /* The back-end expects DECL_INITIAL to contain a BLOCK, so we
       create one.  */
    DECL_INITIAL (thunk_fndecl) = make_node (BLOCK);

    /* Set up cloned argument trees for the thunk.  */
    t = NULL_TREE;
    for (a = DECL_ARGUMENTS (function); a; a = TREE_CHAIN (a))
    {
        tree x = copy_node (a);
        TREE_CHAIN (x) = t;
        DECL_CONTEXT (x) = thunk_fndecl;
        SET_DECL_RTL (x, NULL_RTX);
        DECL_HAS_VALUE_EXPR_P (x) = 0;
        t = x;
    }
    a = nreverse (t);
    DECL_ARGUMENTS (thunk_fndecl) = a;
    BLOCK_VARS (DECL_INITIAL (thunk_fndecl)) = a;

    if (this_adjusting
            && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
                    virtual_value, alias))
    {
        const char *fnname;
        current_function_decl = thunk_fndecl;
        DECL_RESULT (thunk_fndecl)
            = build_decl (RESULT_DECL, 0, integer_type_node);
        fnname = XSTR (XEXP (DECL_RTL (thunk_fndecl), 0), 0);
        init_function_start (thunk_fndecl);
        current_function_is_thunk = 1;
        assemble_start_function (thunk_fndecl, fnname);

        targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
                                         fixed_offset, virtual_value, alias);

        assemble_end_function (thunk_fndecl, fnname);
        init_insn_lengths ();
        current_function_decl = 0;
        cfun = 0;
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
    }
    else
    {
        /* If this is a covariant thunk, or we don't have the necessary
        code for efficient thunks, generate a thunk function that
         just makes a call to the real function.  Unfortunately, this
         doesn't work for varargs.  */

        if (varargs_function_p (function))
            error ("generic thunk code fails for method %q#D which uses %<...%>",
                   function);

        DECL_RESULT (thunk_fndecl) = NULL_TREE;

        start_preparsed_function (thunk_fndecl, NULL_TREE, SF_PRE_PARSED);
        /* We don't bother with a body block for thunks.  */

        /* There's no need to check accessibility inside the thunk body.  */
        push_deferring_access_checks (dk_no_check);

        t = a;
        if (this_adjusting)
            t = thunk_adjust (t, /*this_adjusting=*/1,
                              fixed_offset, virtual_offset);

        /* Build up the call to the real function.  */
        t = tree_cons (NULL_TREE, t, NULL_TREE);
        for (a = TREE_CHAIN (a); a; a = TREE_CHAIN (a))
            t = tree_cons (NULL_TREE, a, t);
        t = nreverse (t);
        t = build_call (alias, t);
        CALL_FROM_THUNK_P (t) = 1;

        if (VOID_TYPE_P (TREE_TYPE (t)))
            finish_expr_stmt (t);
        else
        {
            if (!this_adjusting)
            {
                tree cond = NULL_TREE;

                if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
                {
                    /* If the return type is a pointer, we need to
                       protect against NULL.  We know there will be an
                       adjustment, because that's why we're emitting a
                       thunk.  */
                    t = save_expr (t);
                    cond = cp_convert (boolean_type_node, t);
                }

                t = thunk_adjust (t, /*this_adjusting=*/0,
                                  fixed_offset, virtual_offset);
                if (cond)
                    t = build3 (COND_EXPR, TREE_TYPE (t), cond, t,
                                cp_convert (TREE_TYPE (t), integer_zero_node));
            }
            if (IS_AGGR_TYPE (TREE_TYPE (t)))
                t = build_cplus_new (TREE_TYPE (t), t);
            finish_return_stmt (t);
        }

        /* Since we want to emit the thunk, we explicitly mark its name as
        referenced.  */
        mark_decl_referenced (thunk_fndecl);

        /* But we don't want debugging information about it.  */
        DECL_IGNORED_P (thunk_fndecl) = 1;

        /* Re-enable access control.  */
        pop_deferring_access_checks ();

        thunk_fndecl = finish_function (0);
        tree_lowering_passes (thunk_fndecl);
        expand_body (thunk_fndecl);
    }

    pop_from_top_level ();
}
Esempio n. 22
0
tree
make_thunk (tree function, bool this_adjusting,
            tree fixed_offset, tree virtual_offset)
{
    HOST_WIDE_INT d;
    tree thunk;

    gcc_assert (TREE_CODE (function) == FUNCTION_DECL);
    /* We can have this thunks to covariant thunks, but not vice versa.  */
    gcc_assert (!DECL_THIS_THUNK_P (function));
    gcc_assert (!DECL_RESULT_THUNK_P (function) || this_adjusting);

    /* Scale the VIRTUAL_OFFSET to be in terms of bytes.  */
    if (this_adjusting && virtual_offset)
        virtual_offset
            = size_binop (MULT_EXPR,
                          virtual_offset,
                          convert (ssizetype,
                                   TYPE_SIZE_UNIT (vtable_entry_type)));

    d = tree_low_cst (fixed_offset, 0);

    /* See if we already have the thunk in question.  For this_adjusting
       thunks VIRTUAL_OFFSET will be an INTEGER_CST, for covariant thunks it
       will be a BINFO.  */
    for (thunk = DECL_THUNKS (function); thunk; thunk = TREE_CHAIN (thunk))
        if (DECL_THIS_THUNK_P (thunk) == this_adjusting
                && THUNK_FIXED_OFFSET (thunk) == d
                && !virtual_offset == !THUNK_VIRTUAL_OFFSET (thunk)
                && (!virtual_offset
                    || (this_adjusting
                        ? tree_int_cst_equal (THUNK_VIRTUAL_OFFSET (thunk),
                                              virtual_offset)
                        : THUNK_VIRTUAL_OFFSET (thunk) == virtual_offset)))
            return thunk;

    /* All thunks must be created before FUNCTION is actually emitted;
       the ABI requires that all thunks be emitted together with the
       function to which they transfer control.  */
    gcc_assert (!TREE_ASM_WRITTEN (function));
    /* Likewise, we can only be adding thunks to a function declared in
       the class currently being laid out.  */
    gcc_assert (TYPE_SIZE (DECL_CONTEXT (function))
                && TYPE_BEING_DEFINED (DECL_CONTEXT (function)));

    thunk = build_decl (FUNCTION_DECL, NULL_TREE, TREE_TYPE (function));
    DECL_LANG_SPECIFIC (thunk) = DECL_LANG_SPECIFIC (function);
    cxx_dup_lang_specific_decl (thunk);
    DECL_THUNKS (thunk) = NULL_TREE;

    DECL_CONTEXT (thunk) = DECL_CONTEXT (function);
    TREE_READONLY (thunk) = TREE_READONLY (function);
    TREE_THIS_VOLATILE (thunk) = TREE_THIS_VOLATILE (function);
    TREE_PUBLIC (thunk) = TREE_PUBLIC (function);
    SET_DECL_THUNK_P (thunk, this_adjusting);
    THUNK_TARGET (thunk) = function;
    THUNK_FIXED_OFFSET (thunk) = d;
    THUNK_VIRTUAL_OFFSET (thunk) = virtual_offset;
    THUNK_ALIAS (thunk) = NULL_TREE;

    /* The thunk itself is not a constructor or destructor, even if
       the thing it is thunking to is.  */
    DECL_INTERFACE_KNOWN (thunk) = 1;
    DECL_NOT_REALLY_EXTERN (thunk) = 1;
    DECL_SAVED_FUNCTION_DATA (thunk) = NULL;
    DECL_DESTRUCTOR_P (thunk) = 0;
    DECL_CONSTRUCTOR_P (thunk) = 0;
    DECL_EXTERNAL (thunk) = 1;
    DECL_ARTIFICIAL (thunk) = 1;
    /* Even if this thunk is a member of a local class, we don't
       need a static chain.  */
    DECL_NO_STATIC_CHAIN (thunk) = 1;
    /* The THUNK is not a pending inline, even if the FUNCTION is.  */
    DECL_PENDING_INLINE_P (thunk) = 0;
    DECL_INLINE (thunk) = 0;
    DECL_DECLARED_INLINE_P (thunk) = 0;
    /* Nor has it been deferred.  */
    DECL_DEFERRED_FN (thunk) = 0;
    /* Nor is it a template instantiation.  */
    DECL_USE_TEMPLATE (thunk) = 0;
    DECL_TEMPLATE_INFO (thunk) = NULL;

    /* Add it to the list of thunks associated with FUNCTION.  */
    TREE_CHAIN (thunk) = DECL_THUNKS (function);
    DECL_THUNKS (function) = thunk;

    return thunk;
}
Esempio n. 23
0
static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info *osi, const_tree ptr,
		  int object_size_type)
{
  tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  while (handled_component_p (pt_var))
    pt_var = TREE_OPERAND (pt_var, 0);

  if (pt_var
      && TREE_CODE (pt_var) == MEM_REF)
    {
      unsigned HOST_WIDE_INT sz;

      if (!osi || (object_size_type & 1) != 0
	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
	{
	  sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
					    object_size_type & ~1);
	}
      else
	{
	  tree var = TREE_OPERAND (pt_var, 0);
	  if (osi->pass == 0)
	    collect_object_sizes_for (osi, var);
	  if (bitmap_bit_p (computed[object_size_type],
			    SSA_NAME_VERSION (var)))
	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
	  else
	    sz = unknown[object_size_type];
	}
      if (sz != unknown[object_size_type])
	{
	  double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
	  if (dsz.is_negative ())
	    sz = 0;
	  else if (dsz.fits_uhwi ())
	    sz = dsz.to_uhwi ();
	  else
	    sz = unknown[object_size_type];
	}

      if (sz != unknown[object_size_type] && sz < offset_limit)
	pt_var_size = size_int (sz);
    }
  else if (pt_var
	   && DECL_P (pt_var)
	   && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
	   && (unsigned HOST_WIDE_INT)
	        tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
    pt_var_size = DECL_SIZE_UNIT (pt_var);
  else if (pt_var
	   && TREE_CODE (pt_var) == STRING_CST
	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
	   && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	   && (unsigned HOST_WIDE_INT)
	      tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	      < offset_limit)
    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
  else
    return unknown[object_size_type];

  if (pt_var != TREE_OPERAND (ptr, 0))
    {
      tree var;

      if (object_size_type & 1)
	{
	  var = TREE_OPERAND (ptr, 0);

	  while (var != pt_var
		 && TREE_CODE (var) != BIT_FIELD_REF
		 && TREE_CODE (var) != COMPONENT_REF
		 && TREE_CODE (var) != ARRAY_REF
		 && TREE_CODE (var) != ARRAY_RANGE_REF
		 && TREE_CODE (var) != REALPART_EXPR
		 && TREE_CODE (var) != IMAGPART_EXPR)
	    var = TREE_OPERAND (var, 0);
	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
	    var = TREE_OPERAND (var, 0);
	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
	      || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
	      || (pt_var_size
		  && tree_int_cst_lt (pt_var_size,
				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
	    var = pt_var;
	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
	    {
	      tree v = var;
	      /* For &X->fld, compute object size only if fld isn't the last
		 field, as struct { int i; char c[1]; } is often used instead
		 of flexible array member.  */
	      while (v && v != pt_var)
		switch (TREE_CODE (v))
		  {
		  case ARRAY_REF:
		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
		      {
			tree domain
			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
			if (domain
			    && TYPE_MAX_VALUE (domain)
			    && TREE_CODE (TYPE_MAX_VALUE (domain))
			       == INTEGER_CST
			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
						TYPE_MAX_VALUE (domain)))
			  {
			    v = NULL_TREE;
			    break;
			  }
		      }
		    v = TREE_OPERAND (v, 0);
		    break;
		  case REALPART_EXPR:
		  case IMAGPART_EXPR:
		    v = NULL_TREE;
		    break;
		  case COMPONENT_REF:
		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
		      {
			v = NULL_TREE;
			break;
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (TREE_CODE (v) == COMPONENT_REF
			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			   == RECORD_TYPE)
		      {
			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
			  if (TREE_CODE (fld_chain) == FIELD_DECL)
			    break;

			if (fld_chain)
			  {
			    v = NULL_TREE;
			    break;
			  }
			v = TREE_OPERAND (v, 0);
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (v != pt_var)
		      v = NULL_TREE;
		    else
		      v = pt_var;
		    break;
		  default:
		    v = pt_var;
		    break;
		  }
	      if (v == pt_var)
		var = pt_var;
	    }
	}
      else
	var = pt_var;

      if (var != pt_var)
	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
      else if (!pt_var_size)
	return unknown[object_size_type];
      else
	var_size = pt_var_size;
      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
      if (bytes != error_mark_node)
	{
	  if (TREE_CODE (bytes) == INTEGER_CST
	      && tree_int_cst_lt (var_size, bytes))
	    bytes = size_zero_node;
	  else
	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
	}
      if (var != pt_var
	  && pt_var_size
	  && TREE_CODE (pt_var) == MEM_REF
	  && bytes != error_mark_node)
	{
	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
	  if (bytes2 != error_mark_node)
	    {
	      if (TREE_CODE (bytes2) == INTEGER_CST
		  && tree_int_cst_lt (pt_var_size, bytes2))
		bytes2 = size_zero_node;
	      else
		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
	    }
	}
    }
  else if (!pt_var_size)
    return unknown[object_size_type];
  else
    bytes = pt_var_size;

  if (host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Esempio n. 24
0
unsigned HOST_WIDE_INT
tree_to_uhwi (const_tree t)
{
  return tree_low_cst (t, 1);
}
Esempio n. 25
0
static int
plain_type_1 (tree type, int level)
{
  if (type == 0)
    type = void_type_node;
  else if (type == error_mark_node)
    type = integer_type_node;
  else
    type = TYPE_MAIN_VARIANT (type);

  switch (TREE_CODE (type))
    {
    case VOID_TYPE:
    case NULLPTR_TYPE:
      return T_VOID;
    case BOOLEAN_TYPE:
    case INTEGER_TYPE:
      {
	int size = int_size_in_bytes (type) * BITS_PER_UNIT;

	/* Carefully distinguish all the standard types of C,
	   without messing up if the language is not C.
	   Note that we check only for the names that contain spaces;
	   other names might occur by coincidence in other languages.  */
	if (TYPE_NAME (type) != 0
	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
	    && DECL_NAME (TYPE_NAME (type)) != 0
	    && TREE_CODE (DECL_NAME (TYPE_NAME (type))) == IDENTIFIER_NODE)
	  {
	    const char *const name
	      = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));

	    if (!strcmp (name, "char"))
	      return T_CHAR;
	    if (!strcmp (name, "unsigned char"))
	      return T_UCHAR;
	    if (!strcmp (name, "signed char"))
	      return T_CHAR;
	    if (!strcmp (name, "int"))
	      return T_INT;
	    if (!strcmp (name, "unsigned int"))
	      return T_UINT;
	    if (!strcmp (name, "short int"))
	      return T_SHORT;
	    if (!strcmp (name, "short unsigned int"))
	      return T_USHORT;
	    if (!strcmp (name, "long int"))
	      return T_LONG;
	    if (!strcmp (name, "long unsigned int"))
	      return T_ULONG;
	  }

	if (size == INT_TYPE_SIZE)
	  return (TYPE_UNSIGNED (type) ? T_UINT : T_INT);
	if (size == CHAR_TYPE_SIZE)
	  return (TYPE_UNSIGNED (type) ? T_UCHAR : T_CHAR);
	if (size == SHORT_TYPE_SIZE)
	  return (TYPE_UNSIGNED (type) ? T_USHORT : T_SHORT);
	if (size == LONG_TYPE_SIZE)
	  return (TYPE_UNSIGNED (type) ? T_ULONG : T_LONG);
	if (size == LONG_LONG_TYPE_SIZE)	/* better than nothing */
	  return (TYPE_UNSIGNED (type) ? T_ULONG : T_LONG);
	return 0;
      }

    case REAL_TYPE:
      {
	int precision = TYPE_PRECISION (type);
	if (precision == FLOAT_TYPE_SIZE)
	  return T_FLOAT;
	if (precision == DOUBLE_TYPE_SIZE)
	  return T_DOUBLE;
#ifdef EXTENDED_SDB_BASIC_TYPES
	if (precision == LONG_DOUBLE_TYPE_SIZE)
	  return T_LNGDBL;
#else
	if (precision == LONG_DOUBLE_TYPE_SIZE)
	  return T_DOUBLE;	/* better than nothing */
#endif
	return 0;
      }

    case ARRAY_TYPE:
      {
	int m;
	if (level >= 6)
	  return T_VOID;
	else
	  m = plain_type_1 (TREE_TYPE (type), level+1);
	if (sdb_n_dims < SDB_MAX_DIM)
	  sdb_dims[sdb_n_dims++]
	    = (TYPE_DOMAIN (type)
	       && TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != 0
	       && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
	       && host_integerp (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
	       && host_integerp (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0)
	       ? (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
		  - tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1)
	       : 0);

	return PUSH_DERIVED_LEVEL (DT_ARY, m);
      }

    case RECORD_TYPE:
    case UNION_TYPE:
    case QUAL_UNION_TYPE:
    case ENUMERAL_TYPE:
      {
	const char *tag;
#ifdef SDB_ALLOW_FORWARD_REFERENCES
	sdbout_record_type_name (type);
#endif
#ifndef SDB_ALLOW_UNKNOWN_REFERENCES
	if ((TREE_ASM_WRITTEN (type) && KNOWN_TYPE_TAG (type) != 0)
#ifdef SDB_ALLOW_FORWARD_REFERENCES
	    || TYPE_MODE (type) != VOIDmode
#endif
	    )
#endif
	  {
	    /* Output the referenced structure tag name
	       only if the .def has already been finished.
	       At least on 386, the Unix assembler
	       cannot handle forward references to tags.  */
	    /* But the 88100, it requires them, sigh...  */
	    /* And the MIPS requires unknown refs as well...  */
	    tag = KNOWN_TYPE_TAG (type);
	    PUT_SDB_TAG (tag);
	    /* These 3 lines used to follow the close brace.
	       However, a size of 0 without a tag implies a tag of 0,
	       so if we don't know a tag, we can't mention the size.  */
	    sdb_type_size = int_size_in_bytes (type);
	    if (sdb_type_size < 0)
	      sdb_type_size = 0;
	  }
	return ((TREE_CODE (type) == RECORD_TYPE) ? T_STRUCT
		: (TREE_CODE (type) == UNION_TYPE) ? T_UNION
		: (TREE_CODE (type) == QUAL_UNION_TYPE) ? T_UNION
		: T_ENUM);
      }
    case POINTER_TYPE:
    case REFERENCE_TYPE:
      {
	int m;
	if (level >= 6)
	  return T_VOID;
	else
	  m = plain_type_1 (TREE_TYPE (type), level+1);
	return PUSH_DERIVED_LEVEL (DT_PTR, m);
      }
    case FUNCTION_TYPE:
    case METHOD_TYPE:
      {
	int m;
	if (level >= 6)
	  return T_VOID;
	else
	  m = plain_type_1 (TREE_TYPE (type), level+1);
	return PUSH_DERIVED_LEVEL (DT_FCN, m);
      }
    default:
      return 0;
    }
}
Esempio n. 26
0
static void
sdbout_one_type (tree type)
{
  if (current_function_decl != NULL_TREE
      && DECL_SECTION_NAME (current_function_decl) != NULL_TREE)
    ; /* Don't change section amid function.  */
  else
    switch_to_section (text_section);

  switch (TREE_CODE (type))
    {
    case RECORD_TYPE:
    case UNION_TYPE:
    case QUAL_UNION_TYPE:
    case ENUMERAL_TYPE:
      type = TYPE_MAIN_VARIANT (type);
      /* Don't output a type twice.  */
      if (TREE_ASM_WRITTEN (type))
	/* James said test TREE_ASM_BEING_WRITTEN here.  */
	return;

      /* Output nothing if type is not yet defined.  */
      if (!COMPLETE_TYPE_P (type))
	return;

      TREE_ASM_WRITTEN (type) = 1;

      /* This is reputed to cause trouble with the following case,
	 but perhaps checking TYPE_SIZE above will fix it.  */

      /* Here is a testcase:

	struct foo {
	  struct badstr *bbb;
	} forwardref;

	typedef struct intermediate {
	  int aaaa;
	} intermediate_ref;

	typedef struct badstr {
	  int ccccc;
	} badtype;   */

      /* This change, which ought to make better output,
	 used to make the COFF assembler unhappy.
	 Changes involving KNOWN_TYPE_TAG may fix the problem.  */
      /* Before really doing anything, output types we want to refer to.  */
      /* Note that in version 1 the following two lines
	 are not used if forward references are in use.  */
      if (TREE_CODE (type) != ENUMERAL_TYPE)
	sdbout_field_types (type);

      /* Output a structure type.  */
      {
	int size = int_size_in_bytes (type);
	int member_scl = 0;
	tree tem;

	/* Record the type tag, but not in its permanent place just yet.  */
	sdbout_record_type_name (type);

	PUT_SDB_DEF (KNOWN_TYPE_TAG (type));

	switch (TREE_CODE (type))
	  {
	  case UNION_TYPE:
	  case QUAL_UNION_TYPE:
	    PUT_SDB_SCL (C_UNTAG);
	    PUT_SDB_TYPE (T_UNION);
	    member_scl = C_MOU;
	    break;

	  case RECORD_TYPE:
	    PUT_SDB_SCL (C_STRTAG);
	    PUT_SDB_TYPE (T_STRUCT);
	    member_scl = C_MOS;
	    break;

	  case ENUMERAL_TYPE:
	    PUT_SDB_SCL (C_ENTAG);
	    PUT_SDB_TYPE (T_ENUM);
	    member_scl = C_MOE;
	    break;

	  default:
	    break;
	  }

	PUT_SDB_SIZE (size);
	PUT_SDB_ENDEF;

	/* Print out the base class information with fields
	   named after the types they hold.  */
	/* This is only relevant to aggregate types.  TYPE_BINFO is used
	   for other purposes in an ENUMERAL_TYPE, so we must exclude that
	   case.  */
	if (TREE_CODE (type) != ENUMERAL_TYPE && TYPE_BINFO (type))
	  {
	    int i;
	    tree binfo, child;

	    for (binfo = TYPE_BINFO (type), i = 0;
		 BINFO_BASE_ITERATE (binfo, i, child); i++)
	      {
		tree child_type = BINFO_TYPE (child);
		tree child_type_name;

		if (TYPE_NAME (child_type) == 0)
		  continue;
		if (TREE_CODE (TYPE_NAME (child_type)) == IDENTIFIER_NODE)
		  child_type_name = TYPE_NAME (child_type);
		else if (TREE_CODE (TYPE_NAME (child_type)) == TYPE_DECL)
		  {
		    child_type_name = DECL_NAME (TYPE_NAME (child_type));
		    if (child_type_name && template_name_p (child_type_name))
		      child_type_name
			= DECL_ASSEMBLER_NAME (TYPE_NAME (child_type));
		  }
		else
		  continue;

		PUT_SDB_DEF (IDENTIFIER_POINTER (child_type_name));
		PUT_SDB_INT_VAL (tree_low_cst (BINFO_OFFSET (child), 0));
		PUT_SDB_SCL (member_scl);
		sdbout_type (BINFO_TYPE (child));
		PUT_SDB_ENDEF;
	      }
	  }

	/* Output the individual fields.  */

	if (TREE_CODE (type) == ENUMERAL_TYPE)
	  {
	    for (tem = TYPE_VALUES (type); tem; tem = TREE_CHAIN (tem))
	      {
	        tree value = TREE_VALUE (tem);

	        if (TREE_CODE (value) == CONST_DECL)
	          value = DECL_INITIAL (value);

	        if (host_integerp (value, 0))
		  {
		    PUT_SDB_DEF (IDENTIFIER_POINTER (TREE_PURPOSE (tem)));
		    PUT_SDB_INT_VAL (tree_low_cst (value, 0));
		    PUT_SDB_SCL (C_MOE);
		    PUT_SDB_TYPE (T_MOE);
		    PUT_SDB_ENDEF;
		  }
	      }
	  }
	else			/* record or union type */
	  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
	    /* Output the name, type, position (in bits), size (in bits)
	       of each field.  */

	    /* Omit here the nameless fields that are used to skip bits.
	       Also omit fields with variable size or position.
	       Also omit non FIELD_DECL nodes that GNU C++ may put here.  */
	    if (TREE_CODE (tem) == FIELD_DECL
		&& DECL_NAME (tem)
		&& DECL_SIZE (tem)
		&& host_integerp (DECL_SIZE (tem), 1)
		&& host_integerp (bit_position (tem), 0))
	      {
		const char *name;

		name = IDENTIFIER_POINTER (DECL_NAME (tem));
		PUT_SDB_DEF (name);
		if (DECL_BIT_FIELD_TYPE (tem))
		  {
		    PUT_SDB_INT_VAL (int_bit_position (tem));
		    PUT_SDB_SCL (C_FIELD);
		    sdbout_type (DECL_BIT_FIELD_TYPE (tem));
		    PUT_SDB_SIZE (tree_low_cst (DECL_SIZE (tem), 1));
		  }
		else
		  {
		    PUT_SDB_INT_VAL (int_bit_position (tem) / BITS_PER_UNIT);
		    PUT_SDB_SCL (member_scl);
		    sdbout_type (TREE_TYPE (tem));
		  }
		PUT_SDB_ENDEF;
	      }
	/* Output end of a structure,union, or enumeral definition.  */

	PUT_SDB_PLAIN_DEF ("eos");
	PUT_SDB_INT_VAL (size);
	PUT_SDB_SCL (C_EOS);
	PUT_SDB_TAG (KNOWN_TYPE_TAG (type));
	PUT_SDB_SIZE (size);
	PUT_SDB_ENDEF;
	break;
      }

    default:
      break;
    }
}
Esempio n. 27
0
bool
cp_dump_tree (void* dump_info, tree t)
{
  enum tree_code code;
  dump_info_p di = (dump_info_p) dump_info;

  /* Figure out what kind of node this is.  */
  code = TREE_CODE (t);

  if (DECL_P (t))
    {
      if (DECL_LANG_SPECIFIC (t) && DECL_LANGUAGE (t) != lang_cplusplus)
	dump_string (di, language_to_string (DECL_LANGUAGE (t)));
    }

  switch (code)
    {
    case IDENTIFIER_NODE:
      if (IDENTIFIER_OPNAME_P (t))
	{
	  dump_string (di, "operator");
	  return true;
	}
      else if (IDENTIFIER_TYPENAME_P (t))
	{
	  dump_child ("tynm", TREE_TYPE (t));
	  return true;
	}
      break;

    case OFFSET_TYPE:
      dump_string (di, "ptrmem");
      dump_child ("ptd", TYPE_PTRMEM_POINTED_TO_TYPE (t));
      dump_child ("cls", TYPE_PTRMEM_CLASS_TYPE (t));
      return true;

    case RECORD_TYPE:
      if (TYPE_PTRMEMFUNC_P (t))
	{
	  dump_string (di, "ptrmem");
	  dump_child ("ptd", TYPE_PTRMEM_POINTED_TO_TYPE (t));
	  dump_child ("cls", TYPE_PTRMEM_CLASS_TYPE (t));
	  return true;
	}
      /* Fall through.  */

    case UNION_TYPE:
      /* Is it a type used as a base? */
      if (TYPE_CONTEXT (t) && TREE_CODE (TYPE_CONTEXT (t)) == TREE_CODE (t)
	  && CLASSTYPE_AS_BASE (TYPE_CONTEXT (t)) == t)
	{
	  dump_child ("bfld", TYPE_CONTEXT (t));
	  return true;
	}
      
      if (! IS_AGGR_TYPE (t))
	break;

      dump_child ("vfld", TYPE_VFIELD (t));
      if (CLASSTYPE_TEMPLATE_SPECIALIZATION(t))
        dump_string(di, "spec");

      if (!dump_flag (di, TDF_SLIM, t) && TYPE_BINFO (t))
	{
	  int i;
	  tree binfo;
	  tree base_binfo;
	  
	  for (binfo = TYPE_BINFO (t), i = 0;
	       BINFO_BASE_ITERATE (binfo, i, base_binfo); ++i)
	    {
	      dump_child ("base", BINFO_TYPE (base_binfo));
	      if (BINFO_VIRTUAL_P (base_binfo)) 
		dump_string (di, "virtual");
	      dump_access (di, base_binfo);
	    }
	}
      break;

    case FIELD_DECL:
      dump_access (di, t);
      if (DECL_MUTABLE_P (t))
        dump_string(di, "mutable");
      break;

    case VAR_DECL:
      if (TREE_CODE (CP_DECL_CONTEXT (t)) == RECORD_TYPE)
        dump_access (di, t);
      if (TREE_STATIC (t) && !TREE_PUBLIC (t))
        dump_string (di, "static");
      break; 

    case FUNCTION_DECL:
      if (!DECL_THUNK_P (t))
	{
          if (DECL_OVERLOADED_OPERATOR_P (t)) {
	    dump_string (di, "operator");
            dump_op (di, t);
          }
	  if (DECL_FUNCTION_MEMBER_P (t)) 
	    {
	      dump_string (di, "member");
	      dump_access (di, t);
	    }
          if (DECL_PURE_VIRTUAL_P (t))
            dump_string (di, "pure");
          if (DECL_VIRTUAL_P (t))
            dump_string (di, "virtual");
	  if (DECL_CONSTRUCTOR_P (t))
	    dump_string (di, "constructor");
	  if (DECL_DESTRUCTOR_P (t))
	    dump_string (di, "destructor");
	  if (DECL_CONV_FN_P (t))
	    dump_string (di, "conversion");
	  if (DECL_GLOBAL_CTOR_P (t))
	    dump_string (di, "global init");
	  if (DECL_GLOBAL_DTOR_P (t))
	    dump_string (di, "global fini");
	  if (DECL_FRIEND_PSEUDO_TEMPLATE_INSTANTIATION (t))
	    dump_string (di, "pseudo tmpl");
	}
      else
	{
	  tree virt = THUNK_VIRTUAL_OFFSET (t);
	  
	  dump_string (di, "thunk");
	  if (DECL_THIS_THUNK_P (t))
	    dump_string (di, "this adjusting");
	  else
	    {
	      dump_string (di, "result adjusting");
	      if (virt)
		virt = BINFO_VPTR_FIELD (virt);
	    }
	  dump_int (di, "fixd", THUNK_FIXED_OFFSET (t));
	  if (virt)
	    dump_int (di, "virt", tree_low_cst (virt, 0));
	  dump_child ("fn", DECL_INITIAL (t));
	}
      break;

    case NAMESPACE_DECL:
      if (DECL_NAMESPACE_ALIAS (t))
	dump_child ("alis", DECL_NAMESPACE_ALIAS (t));
      else if (!dump_flag (di, TDF_SLIM, t))
	dump_child ("dcls", cp_namespace_decls (t));
      break;

    case TEMPLATE_DECL:
      dump_child ("rslt", DECL_TEMPLATE_RESULT (t));
      dump_child ("inst", DECL_TEMPLATE_INSTANTIATIONS (t));
      dump_child ("spcs", DECL_TEMPLATE_SPECIALIZATIONS (t));
      dump_child ("prms", DECL_TEMPLATE_PARMS (t));
      break;

    case OVERLOAD:
      dump_child ("crnt", OVL_CURRENT (t));
      dump_child ("chan", OVL_CHAIN (t));
      break;

    case TRY_BLOCK:
      dump_stmt (di, t);
      if (CLEANUP_P (t))
	dump_string (di, "cleanup");
      dump_child ("body", TRY_STMTS (t));
      dump_child ("hdlr", TRY_HANDLERS (t));
      break;

    case EH_SPEC_BLOCK:
      dump_stmt (di, t);
      dump_child ("body", EH_SPEC_STMTS (t));
      dump_child ("raises", EH_SPEC_RAISES (t));
      break;

    case PTRMEM_CST:
      dump_child ("clas", PTRMEM_CST_CLASS (t));
      dump_child ("mbr", PTRMEM_CST_MEMBER (t));
      break;

    case THROW_EXPR:
      /* These nodes are unary, but do not have code class `1'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      break;

    case AGGR_INIT_EXPR:
      dump_int (di, "ctor", AGGR_INIT_VIA_CTOR_P (t));
      dump_child ("fn", TREE_OPERAND (t, 0));
      dump_child ("args", TREE_OPERAND (t, 1));
      dump_child ("decl", TREE_OPERAND (t, 2));
      break;
      
    case HANDLER:
      dump_stmt (di, t);
      dump_child ("parm", HANDLER_PARMS (t));
      dump_child ("body", HANDLER_BODY (t));
      break;

    case MUST_NOT_THROW_EXPR:
      dump_stmt (di, t);
      dump_child ("body", TREE_OPERAND (t, 0));
      break;

    case USING_STMT:
      dump_stmt (di, t);
      dump_child ("nmsp", USING_STMT_NAMESPACE (t));
      break;

    case CLEANUP_STMT:
      dump_stmt (di, t);
      dump_child ("decl", CLEANUP_DECL (t));
      dump_child ("expr", CLEANUP_EXPR (t));
      dump_child ("body", CLEANUP_BODY (t));
      break;

    case IF_STMT:
      dump_stmt (di, t);
      dump_child ("cond", IF_COND (t));
      dump_child ("then", THEN_CLAUSE (t));
      dump_child ("else", ELSE_CLAUSE (t));
      break;

    default:
      break;
    }

  return c_dump_tree (di, t);
}
Esempio n. 28
0
/* Define NAME with value TYPE size_unit.  */
static void
builtin_define_type_sizeof (const char *name, tree type)
{
  builtin_define_with_int_value (name,
				 tree_low_cst (TYPE_SIZE_UNIT (type), 1));
}
Esempio n. 29
0
static gimple
vect_recog_pow_pattern (gimple last_stmt, tree *type_in, tree *type_out)
{
  tree fn, base, exp = NULL;
  gimple stmt;
  tree var;

  if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
    return NULL;

  fn = gimple_call_fndecl (last_stmt);
  if (fn == NULL_TREE || DECL_BUILT_IN_CLASS (fn) != BUILT_IN_NORMAL)
   return NULL;

  switch (DECL_FUNCTION_CODE (fn))
    {
    case BUILT_IN_POWIF:
    case BUILT_IN_POWI:
    case BUILT_IN_POWF:
    case BUILT_IN_POW:
      base = gimple_call_arg (last_stmt, 0);
      exp = gimple_call_arg (last_stmt, 1);
      if (TREE_CODE (exp) != REAL_CST
	  && TREE_CODE (exp) != INTEGER_CST)
        return NULL;
      break;

    default:
      return NULL;
    }

  /* We now have a pow or powi builtin function call with a constant
     exponent.  */

  *type_out = NULL_TREE;

  /* Catch squaring.  */
  if ((host_integerp (exp, 0)
       && tree_low_cst (exp, 0) == 2)
      || (TREE_CODE (exp) == REAL_CST
          && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
    {
      *type_in = TREE_TYPE (base);

      var = vect_recog_temp_ssa_var (TREE_TYPE (base), NULL);
      stmt = gimple_build_assign_with_ops (MULT_EXPR, var, base, base);
      SSA_NAME_DEF_STMT (var) = stmt;
      return stmt;
    }

  /* Catch square root.  */
  if (TREE_CODE (exp) == REAL_CST
      && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconsthalf))
    {
      tree newfn = mathfn_built_in (TREE_TYPE (base), BUILT_IN_SQRT);
      *type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
      if (*type_in)
	{
	  gimple stmt = gimple_build_call (newfn, 1, base);
	  if (vectorizable_function (stmt, *type_in, *type_in)
	      != NULL_TREE)
	    {
	      var = vect_recog_temp_ssa_var (TREE_TYPE (base), stmt);
	      gimple_call_set_lhs (stmt, var);
	      return stmt;
	    }
	}
    }

  return NULL;
}
Esempio n. 30
0
void
use_thunk (tree thunk_fndecl, bool emit_p)
{
  tree a, t, function, alias;
  tree virtual_offset;
  HOST_WIDE_INT fixed_offset, virtual_value;
  bool this_adjusting = DECL_THIS_THUNK_P (thunk_fndecl);

  /* We should have called finish_thunk to give it a name.  */
  gcc_assert (DECL_NAME (thunk_fndecl));

  /* We should never be using an alias, always refer to the
     aliased thunk.  */
  gcc_assert (!THUNK_ALIAS (thunk_fndecl));

  if (TREE_ASM_WRITTEN (thunk_fndecl))
    return;

  function = THUNK_TARGET (thunk_fndecl);
  if (DECL_RESULT (thunk_fndecl))
    /* We already turned this thunk into an ordinary function.
       There's no need to process this thunk again.  */
    return;

  if (DECL_THUNK_P (function))
    /* The target is itself a thunk, process it now.  */
    use_thunk (function, emit_p);

  /* Thunks are always addressable; they only appear in vtables.  */
  TREE_ADDRESSABLE (thunk_fndecl) = 1;

  /* Figure out what function is being thunked to.  It's referenced in
     this translation unit.  */
  TREE_ADDRESSABLE (function) = 1;
  mark_used (function);
  if (!emit_p)
    return;

  if (0 && TARGET_USE_LOCAL_THUNK_ALIAS_P (function))
   alias = make_alias_for_thunk (function);
  else
   alias = function;

  fixed_offset = THUNK_FIXED_OFFSET (thunk_fndecl);
  virtual_offset = THUNK_VIRTUAL_OFFSET (thunk_fndecl);

  if (virtual_offset)
    {
      if (!this_adjusting)
	virtual_offset = BINFO_VPTR_FIELD (virtual_offset);
      virtual_value = tree_low_cst (virtual_offset, /*pos=*/0);
      gcc_assert (virtual_value);
    }
  else
    virtual_value = 0;

  /* And, if we need to emit the thunk, it's used.  */
  mark_used (thunk_fndecl);
  /* This thunk is actually defined.  */
  DECL_EXTERNAL (thunk_fndecl) = 0;
  /* The linkage of the function may have changed.  FIXME in linkage
     rewrite.  */
  TREE_PUBLIC (thunk_fndecl) = TREE_PUBLIC (function);
  DECL_VISIBILITY (thunk_fndecl) = DECL_VISIBILITY (function);
  DECL_VISIBILITY_SPECIFIED (thunk_fndecl)
    = DECL_VISIBILITY_SPECIFIED (function);
  if (DECL_ONE_ONLY (function) || DECL_WEAK (function))
    make_decl_one_only (thunk_fndecl, cxx_comdat_group (thunk_fndecl));

  if (flag_syntax_only)
    {
      TREE_ASM_WRITTEN (thunk_fndecl) = 1;
      return;
    }

  push_to_top_level ();

  if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function)
      && targetm.have_named_sections)
    {
      resolve_unique_section (function, 0, flag_function_sections);

      if (DECL_SECTION_NAME (function) != NULL && DECL_ONE_ONLY (function))
	{
	  resolve_unique_section (thunk_fndecl, 0, flag_function_sections);

	  /* Output the thunk into the same section as function.  */
	  DECL_SECTION_NAME (thunk_fndecl) = DECL_SECTION_NAME (function);
	}
    }

  /* Set up cloned argument trees for the thunk.  */
  t = NULL_TREE;
  for (a = DECL_ARGUMENTS (function); a; a = TREE_CHAIN (a))
    {
      tree x = copy_node (a);
      TREE_CHAIN (x) = t;
      DECL_CONTEXT (x) = thunk_fndecl;
      SET_DECL_RTL (x, NULL_RTX);
      DECL_HAS_VALUE_EXPR_P (x) = 0;
      t = x;
    }
  a = nreverse (t);
  DECL_ARGUMENTS (thunk_fndecl) = a;
  TREE_ASM_WRITTEN (thunk_fndecl) = 1;
  cgraph_add_thunk (thunk_fndecl, function,
		    this_adjusting, fixed_offset, virtual_value,
		    virtual_offset, alias);

  if (!this_adjusting
      || !targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
					       virtual_value, alias))
    {
      /* If this is a covariant thunk, or we don't have the necessary
	 code for efficient thunks, generate a thunk function that
	 just makes a call to the real function.  Unfortunately, this
	 doesn't work for varargs.  */

      if (varargs_function_p (function))
	error ("generic thunk code fails for method %q#D which uses %<...%>",
	       function);
    }

  pop_from_top_level ();
}