Example #1
0
static VEC (tree, gc) *
shared_vuses_from_stmt (tree stmt)
{
  ssa_op_iter iter;
  tree vuse;

  if (!stmt)
    return NULL;

  VEC_truncate (tree, shared_lookup_vuses, 0);

  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
    VEC_safe_push (tree, gc, shared_lookup_vuses, vuse);

  if (VEC_length (tree, shared_lookup_vuses) > 1)
    sort_vuses (shared_lookup_vuses);

  return shared_lookup_vuses;
}
Example #2
0
static void
ftrace_debug (const struct btrace_function *bfun, const char *prefix)
{
  const char *fun, *file;
  unsigned int ibegin, iend;
  int lbegin, lend, level;

  fun = ftrace_print_function_name (bfun);
  file = ftrace_print_filename (bfun);
  level = bfun->level;

  lbegin = bfun->lbegin;
  lend = bfun->lend;

  ibegin = bfun->insn_offset;
  iend = ibegin + VEC_length (btrace_insn_s, bfun->insn);

  DEBUG_FTRACE ("%s: fun = %s, file = %s, level = %d, lines = [%d; %d], "
		"insn = [%u; %u)", prefix, fun, file, level, lbegin, lend,
		ibegin, iend);
}
Example #3
0
static void
record_btrace_call_history_range (ULONGEST from, ULONGEST to, int flags)
{
  struct btrace_thread_info *btinfo;
  struct cleanup *uiout_cleanup;
  struct ui_out *uiout;
  unsigned int last, begin, end;

  uiout = current_uiout;
  uiout_cleanup = make_cleanup_ui_out_tuple_begin_end (uiout,
						       "func history");
  btinfo = require_btrace ();
  last = VEC_length (btrace_func_s, btinfo->ftrace);

  begin = (unsigned int) from;
  end = (unsigned int) to;

  DEBUG ("func-history (0x%x): [%u; %u[", flags, begin, end);

  /* Check for wrap-arounds.  */
  if (begin != from || end != to)
    error (_("Bad range."));

  if (end <= begin)
    error (_("Bad range."));

  if (last <= begin)
    error (_("Range out of bounds."));

  /* Truncate the range, if necessary.  */
  if (last < end)
    end = last;

  btrace_func_history (btinfo, uiout, begin, end, flags);

  btinfo->func_iterator.begin = begin;
  btinfo->func_iterator.end = end;

  do_cleanups (uiout_cleanup);
}
Example #4
0
static void *
vec_stack_o_reserve_1 (void *vec, int reserve, size_t vec_offset,
		       size_t elt_size, bool exact MEM_STAT_DECL)
{
  bool found;
  unsigned int ix;
  void *newvec;

  found = false;
  for (ix = VEC_length (void_p, stack_vecs); ix > 0; --ix)
    {
      if (VEC_index (void_p, stack_vecs, ix - 1) == vec)
	{
	  VEC_unordered_remove (void_p, stack_vecs, ix - 1);
	  found = true;
	  break;
	}
    }

  if (!found)
    {
      /* VEC is already on the heap.  */
      return vec_heap_o_reserve_1 (vec, reserve, vec_offset, elt_size,
				   exact PASS_MEM_STAT);
    }

  /* Move VEC to the heap.  */
  reserve += ((struct vec_prefix *) vec)->num;
  newvec = vec_heap_o_reserve_1 (NULL, reserve, vec_offset, elt_size,
				 exact PASS_MEM_STAT);
  if (newvec && vec)
    {
      ((struct vec_prefix *) newvec)->num = ((struct vec_prefix *) vec)->num;
      memcpy (((struct vec_prefix *) newvec)->vec,
	      ((struct vec_prefix *) vec)->vec,
	      ((struct vec_prefix *) vec)->num * elt_size);
    }
  return newvec;
}
Example #5
0
static void
btrace_compute_ftrace (struct btrace_thread_info *btinfo,
		       VEC (btrace_block_s) *btrace)
{
  struct btrace_function *begin, *end;
  struct gdbarch *gdbarch;
  unsigned int blk;
  int level;

  DEBUG ("compute ftrace");

  gdbarch = target_gdbarch ();
  begin = btinfo->begin;
  end = btinfo->end;
  level = begin != NULL ? -btinfo->level : INT_MAX;
  blk = VEC_length (btrace_block_s, btrace);

  while (blk != 0)
    {
      btrace_block_s *block;
      CORE_ADDR pc;

      blk -= 1;

      block = VEC_index (btrace_block_s, btrace, blk);
      pc = block->begin;

      for (;;)
	{
	  int size;
Example #6
0
/* Internal helper function to build a Python Tuple from a GDB Vector.
   A line table entry can have multiple PCs for a given source line.
   Construct a Tuple of all entries for the given source line, LINE
   from the line table VEC.  Construct one line table entry object per
   address.  */

static PyObject *
build_line_table_tuple_from_pcs (int line, VEC (CORE_ADDR) *vec)
{
  int vec_len = 0;
  PyObject *tuple;
  CORE_ADDR pc;
  int i;

  vec_len = VEC_length (CORE_ADDR, vec);
  if (vec_len < 1)
    Py_RETURN_NONE;

  tuple = PyTuple_New (vec_len);

  if (tuple == NULL)
    return NULL;

  for (i = 0; VEC_iterate (CORE_ADDR, vec, i, pc); ++i)
    {
      PyObject *obj = build_linetable_entry (line, pc);

      if (obj == NULL)
	{
	  Py_DECREF (tuple);
Example #7
0
                expand_this = C_CPP_HASHNODE (__vector_keyword);
        }
    }
    return expand_this;
}

/* target hook for resolve_overloaded_builtin(). Returns a function call
   RTX if we can resolve the overloaded builtin */
tree
spu_resolve_overloaded_builtin (location_t loc, tree fndecl, void *passed_args)
{
#define SCALAR_TYPE_P(t) (INTEGRAL_TYPE_P (t) \
			  || SCALAR_FLOAT_TYPE_P (t) \
			  || POINTER_TYPE_P (t))
    VEC(tree,gc) *fnargs = (VEC(tree,gc) *) passed_args;
    unsigned int nargs = VEC_length (tree, fnargs);
    int new_fcode, fcode = DECL_FUNCTION_CODE (fndecl) - END_BUILTINS;
    struct spu_builtin_description *desc;
    tree match = NULL_TREE;

    /* The vector types are not available if the backend is not initialized.  */
    gcc_assert (!flag_preprocess_only);

    desc = &spu_builtins[fcode];
    if (desc->type != B_OVERLOAD)
        return NULL_TREE;

    /* Compare the signature of each internal builtin function with the
       function arguments until a match is found. */

    for (new_fcode = fcode + 1; spu_builtins[new_fcode].type == B_INTERNAL;
Example #8
0
/* Walk the decls we marked as necessary and see if they reference new
   variables or functions and add them into the worklists.  */
bool
varpool_analyze_pending_decls (void)
{
  bool changed = false;

  timevar_push (TV_VARPOOL);
  while (varpool_first_unanalyzed_node)
    {
      struct varpool_node *node = varpool_first_unanalyzed_node, *next;
      tree decl = node->decl;
      bool analyzed = node->analyzed;

      varpool_first_unanalyzed_node->analyzed = true;

      varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed;

      /* When reading back varpool at LTO time, we re-construct the queue in order
         to have "needed" list right by inserting all needed nodes into varpool.
	 We however don't want to re-analyze already analyzed nodes.  */
      if (!analyzed)
	{
	  gcc_assert (!in_lto_p || cgraph_function_flags_ready);
          /* Compute the alignment early so function body expanders are
	     already informed about increased alignment.  */
          align_variable (decl, 0);
	}
      if (node->alias && node->alias_of)
	{
	  struct varpool_node *tgt = varpool_node (node->alias_of);
          struct varpool_node *n;

	  for (n = tgt; n && n->alias;
	       n = n->analyzed ? varpool_alias_aliased_node (n) : NULL)
	    if (n == node)
	      {
		error ("variable %q+D part of alias cycle", node->decl);
		node->alias = false;
		continue;
	      }
	  if (!VEC_length (ipa_ref_t, node->ref_list.references))
	    ipa_record_reference (NULL, node, NULL, tgt, IPA_REF_ALIAS, NULL);
	  /* C++ FE sometimes change linkage flags after producing same body aliases.  */
	  if (node->extra_name_alias)
	    {
	      DECL_WEAK (node->decl) = DECL_WEAK (node->alias_of);
	      TREE_PUBLIC (node->decl) = TREE_PUBLIC (node->alias_of);
	      DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->alias_of);
	      DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (node->alias_of);
	      if (TREE_PUBLIC (node->decl))
		{
		  DECL_COMDAT (node->decl) = DECL_COMDAT (node->alias_of);
		  DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->alias_of);
		  if (DECL_ONE_ONLY (node->alias_of) && !node->same_comdat_group)
		    {
		      node->same_comdat_group = tgt;
		      if (!tgt->same_comdat_group)
			tgt->same_comdat_group = node;
		      else
			{
			  struct varpool_node *n;
			  for (n = tgt->same_comdat_group;
			       n->same_comdat_group != tgt;
			       n = n->same_comdat_group)
			    ;
			  n->same_comdat_group = node;
			}
		    }
		}
	    }
	}
      else if (DECL_INITIAL (decl))
	record_references_in_initializer (decl, analyzed);
      if (node->same_comdat_group)
	{
	  for (next = node->same_comdat_group;
	       next != node;
	       next = next->same_comdat_group)
	    varpool_mark_needed_node (next);
	}
      changed = true;
    }
  timevar_pop (TV_VARPOOL);
  return changed;
}
void
streamer_tree_cache_append (struct streamer_tree_cache_d *cache, tree t)
{
  unsigned ix = VEC_length (tree, cache->nodes);
  streamer_tree_cache_insert_1 (cache, t, &ix, false);
}
Example #10
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
struct pointer_set_t *p_set = wtd->p_set;

if (is_invisiref_parm (stmt)
  /* Don't dereference parms in a thunk, pass the references through. */
  && !(DECL_THUNK_P (current_function_decl)
   && TREE_CODE (stmt) == PARM_DECL))
{
  *stmt_p = convert_from_reference (stmt);
  *walk_subtrees = 0;
  return NULL;
}

/* Map block scope extern declarations to visible declarations with the
 same name and type in outer scopes if any.  */
if (cp_function_chain->extern_decl_map
  && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
  && DECL_EXTERNAL (stmt))
{
  struct cxx_int_tree_map *h, in;
  in.uid = DECL_UID (stmt);
  h = (struct cxx_int_tree_map *)
  htab_find_with_hash (cp_function_chain->extern_decl_map,
			   &in, in.uid);
  if (h)
{
  *stmt_p = h->to;
  *walk_subtrees = 0;
  return NULL;
}
}

/* Other than invisiref parms, don't walk the same tree twice.  */
if (pointer_set_contains (p_set, stmt))
{
  *walk_subtrees = 0;
  return NULL_TREE;
}

if (TREE_CODE (stmt) == ADDR_EXPR
  && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
{
  *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
  *walk_subtrees = 0;
}
else if (TREE_CODE (stmt) == RETURN_EXPR
   && TREE_OPERAND (stmt, 0)
   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
*walk_subtrees = 0;
else if (TREE_CODE (stmt) == OMP_CLAUSE)
switch (OMP_CLAUSE_CODE (stmt))
  {
  case OMP_CLAUSE_LASTPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  {
	*walk_subtrees = 0;
	if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	  cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			cp_genericize_r, data, NULL);
  }
break;
  case OMP_CLAUSE_PRIVATE:
  case OMP_CLAUSE_SHARED:
  case OMP_CLAUSE_FIRSTPRIVATE:
  case OMP_CLAUSE_COPYIN:
  case OMP_CLAUSE_COPYPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  *walk_subtrees = 0;
break;
  case OMP_CLAUSE_REDUCTION:
gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
break;
  default:
break;
  }
else if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;

/* Due to the way voidify_wrapper_expr is written, we don't get a chance
 to lower this construct before scanning it, so we need to lower these
 before doing anything else.  */
else if (TREE_CODE (stmt) == CLEANUP_STMT)
*stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					 : TRY_FINALLY_EXPR,
		  void_type_node,
		  CLEANUP_BODY (stmt),
		  CLEANUP_EXPR (stmt));

else if (TREE_CODE (stmt) == IF_STMT)
{
  genericize_if_stmt (stmt_p);
  /* *stmt_p has changed, tail recurse to handle it again.  */
  return cp_genericize_r (stmt_p, walk_subtrees, data);
}

/* COND_EXPR might have incompatible types in branches if one or both
 arms are bitfields.FILE *
my_dump_begin (int phase, int *flag_ptr)  Fix it up now.  */
else if (TREE_CODE (stmt) == COND_EXPR)
{
  tree type_left
= (TREE_OPERAND (stmt, 1)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
   : NULL_TREE);
  tree type_right
= (TREE_OPERAND (stmt, 2)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
   : NULL_TREE);
  if (type_left
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 1))))
{
  TREE_OPERAND (stmt, 1)
	= fold_convert (type_left, TREE_OPERAND (stmt, 1));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_left));
}
  if (type_right
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 2))))
{
  TREE_OPERAND (stmt, 2)
	= fold_convert (type_right, TREE_OPERAND (stmt, 2));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_right));
}
}

else if (TREE_CODE (stmt) == BIND_EXPR)
{
  VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
  cp_walk_tree (&BIND_EXPR_BODY (stmt),
		cp_genericize_r, data, NULL);
  VEC_pop (tree, wtd->bind_expr_stack);
}

else if (TREE_CODE (stmt) == USING_STMT)
{
  tree block = NULL_TREE;

  /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
	 BLOCK, and append an IMPORTED_DECL to its
 BLOCK_VARS chained list.  */
  if (wtd->bind_expr_stack)
{
  int i;
  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						 wtd->bind_expr_stack, i))))
	  break;
}
  if (block)
{
  tree using_directive;
  gcc_assert (TREE_OPERAND (stmt, 0));

  using_directive = make_node (IMPORTED_DECL);
  TREE_TYPE (using_directive) = void_type_node;

  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	= TREE_OPERAND (stmt, 0);
  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
  BLOCK_VARS (block) = using_directive;
}
  /* The USING_STMT won't appear in GENERIC.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

else if (TREE_CODE (stmt) == DECL_EXPR
   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
{
  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

pointer_set_insert (p_set, *stmt_p);

return NULL;
}
Example #11
0
int
btrace_data_append (struct btrace_data *dst,
		    const struct btrace_data *src)
{
  switch (src->format)
    {
    case BTRACE_FORMAT_NONE:
      return 0;

    case BTRACE_FORMAT_BTS:
      switch (dst->format)
	{
	default:
	  return -1;

	case BTRACE_FORMAT_NONE:
	  dst->format = BTRACE_FORMAT_BTS;
	  dst->variant.bts.blocks = NULL;

	  /* Fall-through.  */
	case BTRACE_FORMAT_BTS:
	  {
	    unsigned int blk;

	    /* We copy blocks in reverse order to have the oldest block at
	       index zero.  */
	    blk = VEC_length (btrace_block_s, src->variant.bts.blocks);
	    while (blk != 0)
	      {
		btrace_block_s *block;

		block = VEC_index (btrace_block_s, src->variant.bts.blocks,
				   --blk);

		VEC_safe_push (btrace_block_s, dst->variant.bts.blocks, block);
	      }
	  }
	}
      return 0;

    case BTRACE_FORMAT_PT:
      switch (dst->format)
	{
	default:
	  return -1;

	case BTRACE_FORMAT_NONE:
	  dst->format = BTRACE_FORMAT_PT;
	  dst->variant.pt.data = NULL;
	  dst->variant.pt.size = 0;

	  /* fall-through.  */
	case BTRACE_FORMAT_PT:
	  {
	    gdb_byte *data;
	    size_t size;

	    size = src->variant.pt.size + dst->variant.pt.size;
	    data = (gdb_byte *) xmalloc (size);

	    memcpy (data, dst->variant.pt.data, dst->variant.pt.size);
	    memcpy (data + dst->variant.pt.size, src->variant.pt.data,
		    src->variant.pt.size);

	    xfree (dst->variant.pt.data);

	    dst->variant.pt.data = data;
	    dst->variant.pt.size = size;
	  }
	}
      return 0;
    }

  internal_error (__FILE__, __LINE__, _("Unkown branch trace format."));
}
Example #12
0
tree
ubsan_create_data (const char *name, location_t loc, ...)
{
    va_list args;
    tree ret, t;
    tree fields[3];
    VEC(tree, gc) *saved_args = NULL;
    size_t i = 0;

    /* Firstly, create a pointer to type descriptor type.  */
    tree td_type = ubsan_type_descriptor_type ();
    TYPE_READONLY (td_type) = 1;
    td_type = build_pointer_type (td_type);

    /* Create the structure type.  */
    ret = make_node (RECORD_TYPE);
    if (loc != UNKNOWN_LOCATION)
    {
        fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
                                ubsan_source_location_type ());
        DECL_CONTEXT (fields[i]) = ret;
        i++;
    }

    va_start (args, loc);
    for (t = va_arg (args, tree); t != NULL_TREE;
            i++, t = va_arg (args, tree))
    {
        gcc_checking_assert (i < 3);
        /* Save the tree argument for later use.  */
        VEC_safe_push (tree, gc, saved_args, t);
        fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
                                td_type);
        DECL_CONTEXT (fields[i]) = ret;
        if (i)
            DECL_CHAIN (fields[i - 1]) = fields[i];
    }
    TYPE_FIELDS (ret) = fields[0];
    TYPE_NAME (ret) = get_identifier (name);
    layout_type (ret);
    va_end (args);

    /* Now, fill in the type.  */
    char tmp_name[32];
    static unsigned int ubsan_var_id_num;
    ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++);
    tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
                           ret);
    TREE_STATIC (var) = 1;
    TREE_PUBLIC (var) = 0;
    DECL_ARTIFICIAL (var) = 1;
    DECL_IGNORED_P (var) = 1;
    DECL_EXTERNAL (var) = 0;

    VEC(constructor_elt, gc) *v;
    v = VEC_alloc (constructor_elt, gc, i);

    tree ctor = build_constructor (ret, v);

    /* If desirable, set the __ubsan_source_location element.  */
    if (loc != UNKNOWN_LOCATION)
        CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));

    size_t nelts = VEC_length (tree, saved_args);
    for (i = 0; i < nelts; i++)
    {
        t = VEC_index (tree, saved_args, i);
        CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
    }

    TREE_CONSTANT (ctor) = 1;
    TREE_STATIC (ctor) = 1;
    DECL_INITIAL (var) = ctor;
    rest_of_decl_compilation (var, 1, 0);

    return var;
}
Example #13
0
static void
determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
			   bool no_other_refs)
{
  struct loop *nest, *aloop;
  VEC (data_reference_p, heap) *datarefs = NULL;
  VEC (ddr_p, heap) *dependences = NULL;
  struct mem_ref_group *gr;
  struct mem_ref *ref, *refb;
  VEC (loop_p, heap) *vloops = NULL;
  unsigned *loop_data_size;
  unsigned i, j, n;
  unsigned volume, dist, adist;
  HOST_WIDE_INT vol;
  data_reference_p dr;
  ddr_p dep;

  if (loop->inner)
    return;

  /* Find the outermost loop of the loop nest of loop (we require that
     there are no sibling loops inside the nest).  */
  nest = loop;
  while (1)
    {
      aloop = loop_outer (nest);

      if (aloop == current_loops->tree_root
	  || aloop->inner->next)
	break;

      nest = aloop;
    }

  /* For each loop, determine the amount of data accessed in each iteration.
     We use this to estimate whether the reference is evicted from the
     cache before its reuse.  */
  find_loop_nest (nest, &vloops);
  n = VEC_length (loop_p, vloops);
  loop_data_size = XNEWVEC (unsigned, n);
  volume = volume_of_references (refs);
  i = n;
  while (i-- != 0)
    {
      loop_data_size[i] = volume;
      /* Bound the volume by the L2 cache size, since above this bound,
	 all dependence distances are equivalent.  */
      if (volume > L2_CACHE_SIZE_BYTES)
	continue;

      aloop = VEC_index (loop_p, vloops, i);
      vol = estimated_loop_iterations_int (aloop, false);
      if (vol < 0)
	vol = expected_loop_iterations (aloop);
      volume *= vol;
    }

  /* Prepare the references in the form suitable for data dependence
     analysis.  We ignore unanalyzable data references (the results
     are used just as a heuristics to estimate temporality of the
     references, hence we do not need to worry about correctness).  */
  for (gr = refs; gr; gr = gr->next)
    for (ref = gr->refs; ref; ref = ref->next)
      {
	dr = create_data_ref (nest, ref->mem, ref->stmt, !ref->write_p);

	if (dr)
	  {
	    ref->reuse_distance = volume;
	    dr->aux = ref;
	    VEC_safe_push (data_reference_p, heap, datarefs, dr);
	  }
	else
	  no_other_refs = false;
      }

  for (i = 0; VEC_iterate (data_reference_p, datarefs, i, dr); i++)
    {
      dist = self_reuse_distance (dr, loop_data_size, n, loop);
      ref = (struct mem_ref *) dr->aux;
      if (ref->reuse_distance > dist)
	ref->reuse_distance = dist;

      if (no_other_refs)
	ref->independent_p = true;
    }

  compute_all_dependences (datarefs, &dependences, vloops, true);

  for (i = 0; VEC_iterate (ddr_p, dependences, i, dep); i++)
    {
      if (DDR_ARE_DEPENDENT (dep) == chrec_known)
	continue;

      ref = (struct mem_ref *) DDR_A (dep)->aux;
      refb = (struct mem_ref *) DDR_B (dep)->aux;

      if (DDR_ARE_DEPENDENT (dep) == chrec_dont_know
	  || DDR_NUM_DIST_VECTS (dep) == 0)
	{
	  /* If the dependence cannot be analyzed, assume that there might be
	     a reuse.  */
	  dist = 0;
      
	  ref->independent_p = false;
	  refb->independent_p = false;
	}
      else
	{
	  /* The distance vectors are normalized to be always lexicographically
	     positive, hence we cannot tell just from them whether DDR_A comes
	     before DDR_B or vice versa.  However, it is not important,
	     anyway -- if DDR_A is close to DDR_B, then it is either reused in
	     DDR_B (and it is not nontemporal), or it reuses the value of DDR_B
	     in cache (and marking it as nontemporal would not affect
	     anything).  */

	  dist = volume;
	  for (j = 0; j < DDR_NUM_DIST_VECTS (dep); j++)
	    {
	      adist = volume_of_dist_vector (DDR_DIST_VECT (dep, j),
					     loop_data_size, n);

	      /* If this is a dependence in the innermost loop (i.e., the
		 distances in all superloops are zero) and it is not
		 the trivial self-dependence with distance zero, record that
		 the references are not completely independent.  */
	      if (lambda_vector_zerop (DDR_DIST_VECT (dep, j), n - 1)
		  && (ref != refb
		      || DDR_DIST_VECT (dep, j)[n-1] != 0))
		{
		  ref->independent_p = false;
		  refb->independent_p = false;
		}

	      /* Ignore accesses closer than
		 L1_CACHE_SIZE_BYTES / NONTEMPORAL_FRACTION,
	      	 so that we use nontemporal prefetches e.g. if single memory
		 location is accessed several times in a single iteration of
		 the loop.  */
	      if (adist < L1_CACHE_SIZE_BYTES / NONTEMPORAL_FRACTION)
		continue;

	      if (adist < dist)
		dist = adist;
	    }
	}

      if (ref->reuse_distance > dist)
	ref->reuse_distance = dist;
      if (refb->reuse_distance > dist)
	refb->reuse_distance = dist;
    }

  free_dependence_relations (dependences);
  free_data_refs (datarefs);
  free (loop_data_size);

  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "Reuse distances:\n");
      for (gr = refs; gr; gr = gr->next)
	for (ref = gr->refs; ref; ref = ref->next)
	  fprintf (dump_file, " ref %p distance %u\n",
		   (void *) ref, ref->reuse_distance);
    }
}
Example #14
0
/* Find list of values for that we want to measure histograms.  */
static void
rtl_find_values_to_profile (histogram_values *values)
{
  rtx insn;
  unsigned i, libcall_level;

  life_analysis (NULL, PROP_DEATH_NOTES);

  *values = VEC_alloc (histogram_value, 0);
  libcall_level = 0;
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    {
      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
	libcall_level++;

      /* Do not instrument values inside libcalls (we are going to split block
	 due to instrumentation, and libcall blocks should be local to a single
	 basic block).  */
      if (!libcall_level)
	insn_values_to_profile (insn, values);

      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
	{
	  gcc_assert (libcall_level > 0);
	  libcall_level--;
	}
    }
  gcc_assert (libcall_level == 0);

  for (i = 0; i < VEC_length (histogram_value, *values); i++)
    {
      histogram_value hist = VEC_index (histogram_value, *values, i);

      switch (hist->type)
	{
	case HIST_TYPE_INTERVAL:
	  if (dump_file)
	    fprintf (dump_file,
		     "Interval counter for insn %d, range %d -- %d.\n",
		     INSN_UID ((rtx)hist->insn),
		     hist->hdata.intvl.int_start,
		     (hist->hdata.intvl.int_start
		      + hist->hdata.intvl.steps - 1));
	  hist->n_counters = hist->hdata.intvl.steps +
		  (hist->hdata.intvl.may_be_less ? 1 : 0) +
		  (hist->hdata.intvl.may_be_more ? 1 : 0);
	  break;

	case HIST_TYPE_POW2:
	  if (dump_file)
	    fprintf (dump_file,
		     "Pow2 counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters 
		= GET_MODE_BITSIZE (hist->mode)
		  +  (hist->hdata.pow2.may_be_other ? 1 : 0);
	  break;

	case HIST_TYPE_SINGLE_VALUE:
	  if (dump_file)
	    fprintf (dump_file,
		     "Single value counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters = 3;
	  break;

	case HIST_TYPE_CONST_DELTA:
	  if (dump_file)
	    fprintf (dump_file,
		     "Constant delta counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters = 4;
	  break;

	default:
	  abort ();
	}
    }
  allocate_reg_info (max_reg_num (), FALSE, FALSE);
}
Example #15
0
static void
btrace_compute_ftrace_bts (struct thread_info *tp,
			   const struct btrace_data_bts *btrace)
{
  struct btrace_thread_info *btinfo;
  struct btrace_function *begin, *end;
  struct gdbarch *gdbarch;
  unsigned int blk, ngaps;
  int level;

  gdbarch = target_gdbarch ();
  btinfo = &tp->btrace;
  begin = btinfo->begin;
  end = btinfo->end;
  ngaps = btinfo->ngaps;
  level = begin != NULL ? -btinfo->level : INT_MAX;
  blk = VEC_length (btrace_block_s, btrace->blocks);

  while (blk != 0)
    {
      btrace_block_s *block;
      CORE_ADDR pc;

      blk -= 1;

      block = VEC_index (btrace_block_s, btrace->blocks, blk);
      pc = block->begin;

      for (;;)
	{
	  struct btrace_insn insn;
	  int size;

	  /* We should hit the end of the block.  Warn if we went too far.  */
	  if (block->end < pc)
	    {
	      /* Indicate the gap in the trace - unless we're at the
		 beginning.  */
	      if (begin != NULL)
		{
		  warning (_("Recorded trace may be corrupted around %s."),
			   core_addr_to_string_nz (pc));

		  end = ftrace_new_gap (end, BDE_BTS_OVERFLOW);
		  ngaps += 1;
		}
	      break;
	    }

	  end = ftrace_update_function (end, pc);
	  if (begin == NULL)
	    begin = end;

	  /* Maintain the function level offset.
	     For all but the last block, we do it here.  */
	  if (blk != 0)
	    level = min (level, end->level);

	  size = 0;
	  TRY
	    {
	      size = gdb_insn_length (gdbarch, pc);
	    }
	  CATCH (error, RETURN_MASK_ERROR)
	    {
	    }
	  END_CATCH

	  insn.pc = pc;
	  insn.size = size;
	  insn.iclass = ftrace_classify_insn (gdbarch, pc);

	  ftrace_update_insns (end, &insn);

	  /* We're done once we pushed the instruction at the end.  */
	  if (block->end == pc)
	    break;

	  /* We can't continue if we fail to compute the size.  */
	  if (size <= 0)
	    {
	      warning (_("Recorded trace may be incomplete around %s."),
		       core_addr_to_string_nz (pc));

	      /* Indicate the gap in the trace.  We just added INSN so we're
		 not at the beginning.  */
	      end = ftrace_new_gap (end, BDE_BTS_INSN_SIZE);
	      ngaps += 1;

	      break;
	    }

	  pc += size;

	  /* Maintain the function level offset.
	     For the last block, we do it here to not consider the last
	     instruction.
	     Since the last instruction corresponds to the current instruction
	     and is not really part of the execution history, it shouldn't
	     affect the level.  */
	  if (blk == 0)
	    level = min (level, end->level);
	}
    }

  btinfo->begin = begin;
  btinfo->end = end;
  btinfo->ngaps = ngaps;

  /* LEVEL is the minimal function level of all btrace function segments.
     Define the global level offset to -LEVEL so all function levels are
     normalized to start at zero.  */
  btinfo->level = -level;
}
	      pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0);
	    } while (!tree_int_cst_lt (high, pos)
		     && tree_int_cst_lt (low, pos));
	  j++;
	}
    }
}

/* If all values in the constructor vector are the same, return the value.
   Otherwise return NULL_TREE.  Not supposed to be called for empty
   vectors.  */

static tree
constructor_contains_same_values_p (VEC (constructor_elt, gc) *vec)
{
  int i, len = VEC_length (constructor_elt, vec);
  tree prev = NULL_TREE;

  for (i = 0; i < len; i++)
    {
      constructor_elt *elt = VEC_index (constructor_elt, vec, i);

      if (!prev)
	prev = elt->value;
      else if (!operand_equal_p (elt->value, prev, OEP_ONLY_CONST))
	return NULL_TREE;
    }
  return prev;
}

/* Create an appropriate array type and declaration and assemble a static array
Example #17
0
static int
btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
{
  struct btrace_thread_info *btinfo;
  struct btrace_function *last_bfun;
  struct btrace_insn *last_insn;
  btrace_block_s *first_new_block;

  btinfo = &tp->btrace;
  last_bfun = btinfo->end;
  gdb_assert (last_bfun != NULL);
  gdb_assert (!VEC_empty (btrace_block_s, btrace->blocks));

  /* If the existing trace ends with a gap, we just glue the traces
     together.  We need to drop the last (i.e. chronologically first) block
     of the new trace,  though, since we can't fill in the start address.*/
  if (VEC_empty (btrace_insn_s, last_bfun->insn))
    {
      VEC_pop (btrace_block_s, btrace->blocks);
      return 0;
    }

  /* Beware that block trace starts with the most recent block, so the
     chronologically first block in the new trace is the last block in
     the new trace's block vector.  */
  first_new_block = VEC_last (btrace_block_s, btrace->blocks);
  last_insn = VEC_last (btrace_insn_s, last_bfun->insn);

  /* If the current PC at the end of the block is the same as in our current
     trace, there are two explanations:
       1. we executed the instruction and some branch brought us back.
       2. we have not made any progress.
     In the first case, the delta trace vector should contain at least two
     entries.
     In the second case, the delta trace vector should contain exactly one
     entry for the partial block containing the current PC.  Remove it.  */
  if (first_new_block->end == last_insn->pc
      && VEC_length (btrace_block_s, btrace->blocks) == 1)
    {
      VEC_pop (btrace_block_s, btrace->blocks);
      return 0;
    }

  DEBUG ("stitching %s to %s", ftrace_print_insn_addr (last_insn),
	 core_addr_to_string_nz (first_new_block->end));

  /* Do a simple sanity check to make sure we don't accidentally end up
     with a bad block.  This should not occur in practice.  */
  if (first_new_block->end < last_insn->pc)
    {
      warning (_("Error while trying to read delta trace.  Falling back to "
		 "a full read."));
      return -1;
    }

  /* We adjust the last block to start at the end of our current trace.  */
  gdb_assert (first_new_block->begin == 0);
  first_new_block->begin = last_insn->pc;

  /* We simply pop the last insn so we can insert it again as part of
     the normal branch trace computation.
     Since instruction iterators are based on indices in the instructions
     vector, we don't leave any pointers dangling.  */
  DEBUG ("pruning insn at %s for stitching",
	 ftrace_print_insn_addr (last_insn));

  VEC_pop (btrace_insn_s, last_bfun->insn);

  /* The instructions vector may become empty temporarily if this has
     been the only instruction in this function segment.
     This violates the invariant but will be remedied shortly by
     btrace_compute_ftrace when we add the new trace.  */

  /* The only case where this would hurt is if the entire trace consisted
     of just that one instruction.  If we remove it, we might turn the now
     empty btrace function segment into a gap.  But we don't want gaps at
     the beginning.  To avoid this, we remove the entire old trace.  */
  if (last_bfun == btinfo->begin && VEC_empty (btrace_insn_s, last_bfun->insn))
    btrace_clear (tp);

  return 0;
}
Example #18
0
static void print_expr(tree node, int depth,tree target)
{
  static struct expr_level* prev = NULL;

  if(find_expr(prev, node))
    {
      out_printf("%s(loop) 0,0\n", space(depth));
      return;
    }

  struct expr_level curr_level;

  curr_level.current = node;
  curr_level.prev = prev;
  prev = &curr_level;

  int num_args = 0;
  char* id = NULL;

  #define DEFTREECODE(_id, _name, _flags, _num_args) case _id: num_args = _num_args; id = #_id; break;

  switch(TREE_CODE(node))
    {
      #include <tree.def>
    default:
      break;
    }

  #undef DEFTREECODE

  unsigned int i;

  switch(TREE_CODE(node))
    {
    case CALL_EXPR:
      {
        int special_args = TREE_INT_CST_LOW(TREE_OPERAND(node,0));
        int elements = print_params(node, depth, false, id, target);
        out_printf("%s%s %d,%d,%p\n", space(depth), id, elements, special_args, node);
      }
      break;

    case CONSTRUCTOR:
      {
        int special_args = VEC_length(constructor_elt, CONSTRUCTOR_ELTS(node));
        int elements = print_params(node, depth, false, id, target);
        out_printf("%s%s %d,%d,%p\n", space(depth), id, elements, special_args, node);
      }
      break;

    default:
      {
        int elements = print_params(node, depth, false, id,target);
        out_printf("%s%s %d,%d,%p\n", space(depth), id, elements, num_args, node);
      }
    }

  print_params(node, depth, true, id,target);
    
  switch(TREE_CODE(node))
    {
    case CONSTRUCTOR:
      {
        tree index, val;
 
        FOR_EACH_CONSTRUCTOR_ELT(CONSTRUCTOR_ELTS(node), i, index, val)
          {
            out_printf("%sCONSTRUCTOR_ELT 0,2\n", space(depth));
            print_expr(index, depth + 1, target);
            print_expr(val, depth + 1, target);
          }
      }
      break;

    default:
      break;
    }
Example #19
0
static void
ctf_write_uploaded_tp (struct trace_file_writer *self,
		       struct uploaded_tp *tp)
{
  struct ctf_trace_file_writer *writer
    = (struct ctf_trace_file_writer *) self;
  int32_t int32;
  int64_t int64;
  uint32_t u32;
  const gdb_byte zero = 0;
  int a;
  char *act;

  /* Event Id.  */
  int32 = CTF_EVENT_ID_TP_DEF;
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &int32, 4, 4);

  /* address */
  int64 = tp->addr;
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &int64, 8, 8);

  /* traceframe_usage */
  int64 = tp->traceframe_usage;
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &int64, 8, 8);

  /* number */
  ctf_save_write_int32 (&writer->tcs, tp->number);

  /* enabled */
  ctf_save_write_int32 (&writer->tcs, tp->enabled);

  /* step */
  ctf_save_write_int32 (&writer->tcs, tp->step);

  /* pass */
  ctf_save_write_int32 (&writer->tcs, tp->pass);

  /* hit_count */
  ctf_save_write_int32 (&writer->tcs, tp->hit_count);

  /* type */
  ctf_save_write_int32 (&writer->tcs, tp->type);

  /* condition  */
  if (tp->cond != NULL)
    ctf_save_write (&writer->tcs, (gdb_byte *) tp->cond, strlen (tp->cond));
  ctf_save_write (&writer->tcs, &zero, 1);

  /* actions */
  u32 = VEC_length (char_ptr, tp->actions);
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &u32, 4, 4);
  for (a = 0; VEC_iterate (char_ptr, tp->actions, a, act); ++a)
    ctf_save_write (&writer->tcs, (gdb_byte *) act, strlen (act) + 1);

  /* step_actions */
  u32 = VEC_length (char_ptr, tp->step_actions);
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &u32, 4, 4);
  for (a = 0; VEC_iterate (char_ptr, tp->step_actions, a, act); ++a)
    ctf_save_write (&writer->tcs, (gdb_byte *) act, strlen (act) + 1);

  /* at_string */
  if (tp->at_string != NULL)
    ctf_save_write (&writer->tcs, (gdb_byte *) tp->at_string,
		    strlen (tp->at_string));
  ctf_save_write (&writer->tcs, &zero, 1);

  /* cond_string */
  if (tp->cond_string != NULL)
    ctf_save_write (&writer->tcs, (gdb_byte *) tp->cond_string,
		    strlen (tp->cond_string));
  ctf_save_write (&writer->tcs, &zero, 1);

  /* cmd_strings */
  u32 = VEC_length (char_ptr, tp->cmd_strings);
  ctf_save_align_write (&writer->tcs, (gdb_byte *) &u32, 4, 4);
  for (a = 0; VEC_iterate (char_ptr, tp->cmd_strings, a, act); ++a)
    ctf_save_write (&writer->tcs, (gdb_byte *) act, strlen (act) + 1);

}
Example #20
0
VEC (char_ptr) *
location_completer (struct cmd_list_element *ignore, 
		    const char *text, const char *word)
{
  int n_syms, n_files, ix;
  VEC (char_ptr) *fn_list = NULL;
  VEC (char_ptr) *list = NULL;
  const char *p;
  int quote_found = 0;
  int quoted = *text == '\'' || *text == '"';
  int quote_char = '\0';
  const char *colon = NULL;
  char *file_to_match = NULL;
  const char *symbol_start = text;
  const char *orig_text = text;
  size_t text_len;

  /* Do we have an unquoted colon, as in "break foo.c:bar"?  */
  for (p = text; *p != '\0'; ++p)
    {
      if (*p == '\\' && p[1] == '\'')
	p++;
      else if (*p == '\'' || *p == '"')
	{
	  quote_found = *p;
	  quote_char = *p++;
	  while (*p != '\0' && *p != quote_found)
	    {
	      if (*p == '\\' && p[1] == quote_found)
		p++;
	      p++;
	    }

	  if (*p == quote_found)
	    quote_found = 0;
	  else
	    break;		/* Hit the end of text.  */
	}
#if HAVE_DOS_BASED_FILE_SYSTEM
      /* If we have a DOS-style absolute file name at the beginning of
	 TEXT, and the colon after the drive letter is the only colon
	 we found, pretend the colon is not there.  */
      else if (p < text + 3 && *p == ':' && p == text + 1 + quoted)
	;
#endif
      else if (*p == ':' && !colon)
	{
	  colon = p;
	  symbol_start = p + 1;
	}
      else if (strchr (current_language->la_word_break_characters(), *p))
	symbol_start = p + 1;
    }

  if (quoted)
    text++;
  text_len = strlen (text);

  /* Where is the file name?  */
  if (colon)
    {
      char *s;

      file_to_match = (char *) xmalloc (colon - text + 1);
      strncpy (file_to_match, text, colon - text + 1);
      /* Remove trailing colons and quotes from the file name.  */
      for (s = file_to_match + (colon - text);
	   s > file_to_match;
	   s--)
	if (*s == ':' || *s == quote_char)
	  *s = '\0';
    }
  /* If the text includes a colon, they want completion only on a
     symbol name after the colon.  Otherwise, we need to complete on
     symbols as well as on files.  */
  if (colon)
    {
      list = make_file_symbol_completion_list (symbol_start, word,
					       file_to_match);
      xfree (file_to_match);
    }
  else
    {
      list = make_symbol_completion_list (symbol_start, word);
      /* If text includes characters which cannot appear in a file
	 name, they cannot be asking for completion on files.  */
      if (strcspn (text, 
		   gdb_completer_file_name_break_characters) == text_len)
	fn_list = make_source_files_completion_list (text, text);
    }

  n_syms = VEC_length (char_ptr, list);
  n_files = VEC_length (char_ptr, fn_list);

  /* Catenate fn_list[] onto the end of list[].  */
  if (!n_syms)
    {
      VEC_free (char_ptr, list); /* Paranoia.  */
      list = fn_list;
      fn_list = NULL;
    }
  else
    {
      char *fn;

      for (ix = 0; VEC_iterate (char_ptr, fn_list, ix, fn); ++ix)
	VEC_safe_push (char_ptr, list, fn);
      VEC_free (char_ptr, fn_list);
    }

  if (n_syms && n_files)
    {
      /* Nothing.  */
    }
  else if (n_files)
    {
      char *fn;

      /* If we only have file names as possible completion, we should
	 bring them in sync with what rl_complete expects.  The
	 problem is that if the user types "break /foo/b TAB", and the
	 possible completions are "/foo/bar" and "/foo/baz"
	 rl_complete expects us to return "bar" and "baz", without the
	 leading directories, as possible completions, because `word'
	 starts at the "b".  But we ignore the value of `word' when we
	 call make_source_files_completion_list above (because that
	 would not DTRT when the completion results in both symbols
	 and file names), so make_source_files_completion_list returns
	 the full "/foo/bar" and "/foo/baz" strings.  This produces
	 wrong results when, e.g., there's only one possible
	 completion, because rl_complete will prepend "/foo/" to each
	 candidate completion.  The loop below removes that leading
	 part.  */
      for (ix = 0; VEC_iterate (char_ptr, list, ix, fn); ++ix)
	{
	  memmove (fn, fn + (word - text),
		   strlen (fn) + 1 - (word - text));
	}
    }
  else if (!n_syms)
    {
      /* No completions at all.  As the final resort, try completing
	 on the entire text as a symbol.  */
      list = make_symbol_completion_list (orig_text, word);
    }

  return list;
}
Example #21
0
int
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
  int saved_stmts_are_full_exprs_p = 0;
  enum tree_code code = TREE_CODE (*expr_p);
  enum gimplify_status ret;
  tree block = NULL;
  VEC(gimple, heap) *bind_expr_stack = NULL;

  if (STATEMENT_CODE_P (code))
    {
      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
      current_stmt_tree ()->stmts_are_full_exprs_p
	= STMT_IS_FULL_EXPR_P (*expr_p);
    }

  switch (code)
    {
    case PTRMEM_CST:
      *expr_p = cplus_expand_constant (*expr_p);
      ret = GS_OK;
      break;

    case AGGR_INIT_EXPR:
      simplify_aggr_init_expr (expr_p);
      ret = GS_OK;
      break;

    case THROW_EXPR:
      /* FIXME communicate throw type to back end, probably by moving
	 THROW_EXPR into ../tree.def.  */
      *expr_p = TREE_OPERAND (*expr_p, 0);
      ret = GS_OK;
      break;

    case MUST_NOT_THROW_EXPR:
      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
      break;

      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
	 LHS of an assignment might also be involved in the RHS, as in bug
	 25979.  */
    case INIT_EXPR:
      cp_gimplify_init_expr (expr_p, pre_p, post_p);
      ret = GS_OK;
      break;

    case EMPTY_CLASS_EXPR:
      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
      ret = GS_OK;
      break;

    case BASELINK:
      *expr_p = BASELINK_FUNCTIONS (*expr_p);
      ret = GS_OK;
      break;

    case TRY_BLOCK:
      genericize_try_block (expr_p);
      ret = GS_OK;
      break;

    case HANDLER:
      genericize_catch_block (expr_p);
      ret = GS_OK;
      break;

    case EH_SPEC_BLOCK:
      genericize_eh_spec_block (expr_p);
      ret = GS_OK;
      break;

    case USING_STMT:
      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */

      bind_expr_stack = gimple_bind_expr_stack ();
      if (bind_expr_stack)
	{
	  int i;
	  for (i = VEC_length (gimple, bind_expr_stack) - 1; i >= 0; i--)
	    if ((block = gimple_bind_block (VEC_index (gimple,
						       bind_expr_stack,
						       i))))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (*expr_p, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (*expr_p, 0);
	  TREE_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GIMPLE.  */
      *expr_p = NULL;
      ret = GS_ALL_DONE;
      break;

    case FOR_STMT:
      gimplify_for_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case WHILE_STMT:
      gimplify_while_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case DO_STMT:
      gimplify_do_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case SWITCH_STMT:
      gimplify_switch_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case OMP_FOR:
      ret = cp_gimplify_omp_for (expr_p, pre_p);
      break;

    case CONTINUE_STMT:
      gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
      *expr_p = NULL_TREE;
      ret = GS_ALL_DONE;
      break;

    case BREAK_STMT:
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
      *expr_p = NULL_TREE;
      ret = GS_ALL_DONE;
      break;

    case EXPR_STMT:
      gimplify_expr_stmt (expr_p);
      ret = GS_OK;
      break;

    case UNARY_PLUS_EXPR:
      {
	tree arg = TREE_OPERAND (*expr_p, 0);
	tree type = TREE_TYPE (*expr_p);
	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
					    : arg;
	ret = GS_OK;
      }
      break;

    default:
      ret = c_gimplify_expr (expr_p, pre_p, post_p);
      break;
    }

  /* Restore saved state.  */
  if (STATEMENT_CODE_P (code))
    current_stmt_tree ()->stmts_are_full_exprs_p
      = saved_stmts_are_full_exprs_p;

  return ret;
}
Example #22
0
static void
record_btrace_call_history (int size, int flags)
{
  struct btrace_thread_info *btinfo;
  struct cleanup *uiout_cleanup;
  struct ui_out *uiout;
  unsigned int context, last, begin, end;

  uiout = current_uiout;
  uiout_cleanup = make_cleanup_ui_out_tuple_begin_end (uiout,
						       "insn history");
  btinfo = require_btrace ();
  last = VEC_length (btrace_func_s, btinfo->ftrace);

  context = abs (size);
  begin = btinfo->func_iterator.begin;
  end = btinfo->func_iterator.end;

  DEBUG ("func-history (0x%x): %d, prev: [%u; %u[", flags, size, begin, end);

  if (context == 0)
    error (_("Bad record function-call-history-size."));

  /* We start at the end.  */
  if (end < begin)
    {
      /* Truncate the context, if necessary.  */
      context = min (context, last);

      end = last;
      begin = end - context;
    }
  else if (size < 0)
    {
      if (begin == 0)
	{
	  printf_unfiltered (_("At the start of the branch trace record.\n"));

	  btinfo->func_iterator.end = 0;
	  return;
	}

      /* Truncate the context, if necessary.  */
      context = min (context, begin);

      end = begin;
      begin -= context;
    }
  else
    {
      if (end == last)
	{
	  printf_unfiltered (_("At the end of the branch trace record.\n"));

	  btinfo->func_iterator.begin = last;
	  return;
	}

      /* Truncate the context, if necessary.  */
      context = min (context, last - end);

      begin = end;
      end += context;
    }

  btrace_func_history (btinfo, uiout, begin, end, flags);

  btinfo->func_iterator.begin = begin;
  btinfo->func_iterator.end = end;

  do_cleanups (uiout_cleanup);
}
}

void
normalize_mem_ranges (VEC(mem_range_s) *ranges)
{
    /* This function must not use any VEC operation on RANGES that
       reallocates the memory block as that invalidates the RANGES
       pointer, which callers expect to remain valid.  */

    if (!VEC_empty (mem_range_s, ranges))
    {
        struct mem_range *ra, *rb;
        int a, b;

        qsort (VEC_address (mem_range_s, ranges),
               VEC_length (mem_range_s, ranges),
               sizeof (mem_range_s),
               compare_mem_ranges);

        a = 0;
        ra = VEC_index (mem_range_s, ranges, a);
        for (b = 1; VEC_iterate (mem_range_s, ranges, b, rb); b++)
        {
            /* If mem_range B overlaps or is adjacent to mem_range A,
               merge them.  */
            if (rb->start <= ra->start + ra->length)
            {
                ra->length = max (ra->length,
                                  (rb->start - ra->start) + rb->length);
                continue;		/* next b, same a */
            }
Example #24
0
void
walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
{
  void *bd = NULL;
  basic_block dest;
  block_stmt_iterator bsi;
  bool is_interesting;
  basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2);
  int sp = 0;

  while (true)
    {
      /* Don't worry about unreachable blocks.  */
      if (EDGE_COUNT (bb->preds) > 0
	  || bb == ENTRY_BLOCK_PTR
	  || bb == EXIT_BLOCK_PTR)
	{
	  /* If block BB is not interesting to the caller, then none of the
	     callbacks that walk the statements in BB are going to be
	     executed.  */
	  is_interesting = walk_data->interesting_blocks == NULL
	                   || TEST_BIT (walk_data->interesting_blocks,
					bb->index);

	  /* Callback to initialize the local data structure.  */
	  if (walk_data->initialize_block_local_data)
	    {
	      bool recycled;

	      /* First get some local data, reusing any local data pointer we may
	         have saved.  */
	      if (VEC_length (void_p, walk_data->free_block_data) > 0)
		{
		  bd = VEC_pop (void_p, walk_data->free_block_data);
		  recycled = 1;
		}
	      else
		{
		  bd = xcalloc (1, walk_data->block_local_data_size);
		  recycled = 0;
		}

	      /* Push the local data into the local data stack.  */
	      VEC_safe_push (void_p, heap, walk_data->block_data_stack, bd);

	      /* Call the initializer.  */
	      walk_data->initialize_block_local_data (walk_data, bb,
						      recycled);

	    }

	  /* Callback for operations to execute before we have walked the
	     dominator children, but before we walk statements.  */
	  if (walk_data->before_dom_children_before_stmts)
	    (*walk_data->before_dom_children_before_stmts) (walk_data, bb);

	  /* Statement walk before walking dominator children.  */
	  if (is_interesting && walk_data->before_dom_children_walk_stmts)
	    {
	      if (walk_data->walk_stmts_backward)
		for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
		  (*walk_data->before_dom_children_walk_stmts) (walk_data, bb,
								bsi);
	      else
		for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
		  (*walk_data->before_dom_children_walk_stmts) (walk_data, bb,
								bsi);
	    }

	  /* Callback for operations to execute before we have walked the
	     dominator children, and after we walk statements.  */
	  if (walk_data->before_dom_children_after_stmts)
	    (*walk_data->before_dom_children_after_stmts) (walk_data, bb);

	  /* Mark the current BB to be popped out of the recursion stack
	     once childs are processed.  */
	  worklist[sp++] = bb;
	  worklist[sp++] = NULL;

	  for (dest = first_dom_son (walk_data->dom_direction, bb);
	       dest; dest = next_dom_son (walk_data->dom_direction, dest))
	    worklist[sp++] = dest;
	}
      /* NULL is used to signalize pop operation in recursion stack.  */
      while (sp > 0 && !worklist[sp - 1])
	{
	  --sp;
	  bb = worklist[--sp];
	  is_interesting = walk_data->interesting_blocks == NULL
	                   || TEST_BIT (walk_data->interesting_blocks,
				        bb->index);
	  /* Callback for operations to execute after we have walked the
	     dominator children, but before we walk statements.  */
	  if (walk_data->after_dom_children_before_stmts)
	    (*walk_data->after_dom_children_before_stmts) (walk_data, bb);

	  /* Statement walk after walking dominator children.  */
	  if (is_interesting && walk_data->after_dom_children_walk_stmts)
	    {
	      if (walk_data->walk_stmts_backward)
		for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
		  (*walk_data->after_dom_children_walk_stmts) (walk_data, bb,
							       bsi);
	      else
		for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
		  (*walk_data->after_dom_children_walk_stmts) (walk_data, bb,
							       bsi);
	    }

	  /* Callback for operations to execute after we have walked the
	     dominator children and after we have walked statements.  */
	  if (walk_data->after_dom_children_after_stmts)
	    (*walk_data->after_dom_children_after_stmts) (walk_data, bb);

	  if (walk_data->initialize_block_local_data)
	    {
	      /* And finally pop the record off the block local data stack.  */
	      bd = VEC_pop (void_p, walk_data->block_data_stack);
	      /* And save the block data so that we can re-use it.  */
	      VEC_safe_push (void_p, heap, walk_data->free_block_data, bd);
	    }
	}
      if (sp)
	bb = worklist[--sp];
      else
	break;
    }
  free (worklist);
}
static inline void
finalize_ssa_defs (gimple stmt)
{
  unsigned new_i;
  struct def_optype_d new_list;
  def_optype_p old_ops, last;
  unsigned int num = VEC_length (tree, build_defs);

  /* There should only be a single real definition per assignment.  */
  gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);

  /* Pre-pend the vdef we may have built.  */
  if (build_vdef != NULL_TREE)
    {
      tree oldvdef = gimple_vdef (stmt);
      if (oldvdef
	  && TREE_CODE (oldvdef) == SSA_NAME)
	oldvdef = SSA_NAME_VAR (oldvdef);
      if (oldvdef != build_vdef)
	gimple_set_vdef (stmt, build_vdef);
      VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
      ++num;
    }

  new_list.next = NULL;
  last = &new_list;

  old_ops = gimple_def_ops (stmt);

  new_i = 0;

  /* Clear and unlink a no longer necessary VDEF.  */
  if (build_vdef == NULL_TREE
      && gimple_vdef (stmt) != NULL_TREE)
    {
      if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
	{
	  unlink_stmt_vdef (stmt);
	  release_ssa_name (gimple_vdef (stmt));
	}
      gimple_set_vdef (stmt, NULL_TREE);
    }

  /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
  if (gimple_vdef (stmt)
      && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
    mark_sym_for_renaming (gimple_vdef (stmt));

  /* Check for the common case of 1 def that hasn't changed.  */
  if (old_ops && old_ops->next == NULL && num == 1
      && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
    return;

  /* If there is anything in the old list, free it.  */
  if (old_ops)
    {
      old_ops->next = gimple_ssa_operands (cfun)->free_defs;
      gimple_ssa_operands (cfun)->free_defs = old_ops;
    }

  /* If there is anything remaining in the build_defs list, simply emit it.  */
  for ( ; new_i < num; new_i++)
    last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);

  /* Now set the stmt's operands.  */
  gimple_set_def_ops (stmt, new_list.next);
}
static void
dequeue_and_dump (dump_info_p di)
{
  dump_queue_p dq;
  splay_tree_node stn;
  dump_node_info_p dni;
  tree t;
  unsigned int index;
  enum tree_code code;
  enum tree_code_class code_class;
  const char* code_name;

  /* Get the next node from the queue.  */
  dq = di->queue;
  stn = dq->node;
  t = (tree) stn->key;
  dni = (dump_node_info_p) stn->value;
  index = dni->index;

  /* Remove the node from the queue, and put it on the free list.  */
  di->queue = dq->next;
  if (!di->queue)
    di->queue_end = 0;
  dq->next = di->free_list;
  di->free_list = dq;

  /* Print the node index.  */
  dump_index (di, index);
  /* And the type of node this is.  */
  if (dni->binfo_p)
    code_name = "binfo";
  else
    code_name = tree_code_name[(int) TREE_CODE (t)];
  fprintf (di->stream, "%-16s ", code_name);
  di->column = 25;

  /* Figure out what kind of node this is.  */
  code = TREE_CODE (t);
  code_class = TREE_CODE_CLASS (code);

  /* Although BINFOs are TREE_VECs, we dump them specially so as to be
     more informative.  */
  if (dni->binfo_p)
    {
      unsigned ix;
      tree base;
      VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (t);

      dump_child ("type", BINFO_TYPE (t));

      if (BINFO_VIRTUAL_P (t))
	dump_string_field (di, "spec", "virt");

      dump_int (di, "bases", BINFO_N_BASE_BINFOS (t));
      for (ix = 0; BINFO_BASE_ITERATE (t, ix, base); ix++)
	{
	  tree access = (accesses ? VEC_index (tree, accesses, ix)
			 : access_public_node);
	  const char *string = NULL;

	  if (access == access_public_node)
	    string = "pub";
	  else if (access == access_protected_node)
	    string = "prot";
	  else if (access == access_private_node)
	    string = "priv";
	  else
	    gcc_unreachable ();

	  dump_string_field (di, "accs", string);
	  queue_and_dump_index (di, "binf", base, DUMP_BINFO);
	}

      goto done;
    }

  /* We can knock off a bunch of expression nodes in exactly the same
     way.  */
  if (IS_EXPR_CODE_CLASS (code_class))
    {
      /* If we're dumping children, dump them now.  */
      queue_and_dump_type (di, t);

      switch (code_class)
	{
	case tcc_unary:
	  dump_child ("op 0", TREE_OPERAND (t, 0));
	  break;

	case tcc_binary:
	case tcc_comparison:
	  dump_child ("op 0", TREE_OPERAND (t, 0));
	  dump_child ("op 1", TREE_OPERAND (t, 1));
	  break;

	case tcc_expression:
	case tcc_reference:
	case tcc_statement:
	case tcc_vl_exp:
	  /* These nodes are handled explicitly below.  */
	  break;

	default:
	  gcc_unreachable ();
	}
    }
  else if (DECL_P (t))
    {
      expanded_location xloc;
      /* All declarations have names.  */
      if (DECL_NAME (t))
	dump_child ("name", DECL_NAME (t));
      if (DECL_ASSEMBLER_NAME_SET_P (t)
	  && DECL_ASSEMBLER_NAME (t) != DECL_NAME (t))
	dump_child ("mngl", DECL_ASSEMBLER_NAME (t));
      if (DECL_ABSTRACT_ORIGIN (t))
        dump_child ("orig", DECL_ABSTRACT_ORIGIN (t));
      /* And types.  */
      queue_and_dump_type (di, t);
      dump_child ("scpe", DECL_CONTEXT (t));
      /* And a source position.  */
      xloc = expand_location (DECL_SOURCE_LOCATION (t));
      if (xloc.file)
	{
	  const char *filename = lbasename (xloc.file);

	  dump_maybe_newline (di);
	  fprintf (di->stream, "srcp: %s:%-6d ", filename,
		   xloc.line);
	  di->column += 6 + strlen (filename) + 8;
	}
      /* And any declaration can be compiler-generated.  */
      if (CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_DECL_COMMON)
	  && DECL_ARTIFICIAL (t))
	dump_string_field (di, "note", "artificial");
      if (DECL_CHAIN (t) && !dump_flag (di, TDF_SLIM, NULL))
	dump_child ("chain", DECL_CHAIN (t));
    }
  else if (code_class == tcc_type)
    {
      /* All types have qualifiers.  */
      int quals = lang_hooks.tree_dump.type_quals (t);

      if (quals != TYPE_UNQUALIFIED)
	{
	  fprintf (di->stream, "qual: %c%c%c     ",
		   (quals & TYPE_QUAL_CONST) ? 'c' : ' ',
		   (quals & TYPE_QUAL_VOLATILE) ? 'v' : ' ',
		   (quals & TYPE_QUAL_RESTRICT) ? 'r' : ' ');
	  di->column += 14;
	}

      /* All types have associated declarations.  */
      dump_child ("name", TYPE_NAME (t));

      /* All types have a main variant.  */
      if (TYPE_MAIN_VARIANT (t) != t)
	dump_child ("unql", TYPE_MAIN_VARIANT (t));

      /* And sizes.  */
      dump_child ("size", TYPE_SIZE (t));

      /* All types have alignments.  */
      dump_int (di, "algn", TYPE_ALIGN (t));
    }
  else if (code_class == tcc_constant)
    /* All constants can have types.  */
    queue_and_dump_type (di, t);

  /* Give the language-specific code a chance to print something.  If
     it's completely taken care of things, don't bother printing
     anything more ourselves.  */
  if (lang_hooks.tree_dump.dump_tree (di, t))
    goto done;

  /* Now handle the various kinds of nodes.  */
  switch (code)
    {
      int i;

    case IDENTIFIER_NODE:
      dump_string_field (di, "strg", IDENTIFIER_POINTER (t));
      dump_int (di, "lngt", IDENTIFIER_LENGTH (t));
      break;

    case TREE_LIST:
      dump_child ("purp", TREE_PURPOSE (t));
      dump_child ("valu", TREE_VALUE (t));
      dump_child ("chan", TREE_CHAIN (t));
      break;

    case STATEMENT_LIST:
      {
	tree_stmt_iterator it;
	for (i = 0, it = tsi_start (t); !tsi_end_p (it); tsi_next (&it), i++)
	  {
	    char buffer[32];
	    sprintf (buffer, "%u", i);
	    dump_child (buffer, tsi_stmt (it));
	  }
      }
      break;

    case TREE_VEC:
      dump_int (di, "lngt", TREE_VEC_LENGTH (t));
      for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
	{
	  char buffer[32];
	  sprintf (buffer, "%u", i);
	  dump_child (buffer, TREE_VEC_ELT (t, i));
	}
      break;

    case INTEGER_TYPE:
    case ENUMERAL_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      dump_string_field (di, "sign", TYPE_UNSIGNED (t) ? "unsigned": "signed");
      dump_child ("min", TYPE_MIN_VALUE (t));
      dump_child ("max", TYPE_MAX_VALUE (t));

      if (code == ENUMERAL_TYPE)
	dump_child ("csts", TYPE_VALUES (t));
      break;

    case REAL_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      break;

    case FIXED_POINT_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      dump_string_field (di, "sign", TYPE_UNSIGNED (t) ? "unsigned": "signed");
      dump_string_field (di, "saturating",
			 TYPE_SATURATING (t) ? "saturating": "non-saturating");
      break;

    case POINTER_TYPE:
      dump_child ("ptd", TREE_TYPE (t));
      break;

    case REFERENCE_TYPE:
      dump_child ("refd", TREE_TYPE (t));
      break;

    case METHOD_TYPE:
      dump_child ("clas", TYPE_METHOD_BASETYPE (t));
      /* Fall through.  */

    case FUNCTION_TYPE:
      dump_child ("retn", TREE_TYPE (t));
      dump_child ("prms", TYPE_ARG_TYPES (t));
      break;

    case ARRAY_TYPE:
      dump_child ("elts", TREE_TYPE (t));
      dump_child ("domn", TYPE_DOMAIN (t));
      break;

    case RECORD_TYPE:
    case UNION_TYPE:
      if (TREE_CODE (t) == RECORD_TYPE)
	dump_string_field (di, "tag", "struct");
      else
	dump_string_field (di, "tag", "union");

      dump_child ("flds", TYPE_FIELDS (t));
      dump_child ("fncs", TYPE_METHODS (t));
      queue_and_dump_index (di, "binf", TYPE_BINFO (t),
			    DUMP_BINFO);
      break;

    case CONST_DECL:
      dump_child ("cnst", DECL_INITIAL (t));
      break;

    case DEBUG_EXPR_DECL:
      dump_int (di, "-uid", DEBUG_TEMP_UID (t));
      /* Fall through.  */

    case VAR_DECL:
    case PARM_DECL:
    case FIELD_DECL:
    case RESULT_DECL:
      if (TREE_CODE (t) == PARM_DECL)
	dump_child ("argt", DECL_ARG_TYPE (t));
      else
	dump_child ("init", DECL_INITIAL (t));
      dump_child ("size", DECL_SIZE (t));
      dump_int (di, "algn", DECL_ALIGN (t));

      if (TREE_CODE (t) == FIELD_DECL)
	{
	  if (DECL_FIELD_OFFSET (t))
	    dump_child ("bpos", bit_position (t));
	}
      else if (TREE_CODE (t) == VAR_DECL
	       || TREE_CODE (t) == PARM_DECL)
	{
	  dump_int (di, "used", TREE_USED (t));
	  if (DECL_REGISTER (t))
	    dump_string_field (di, "spec", "register");
	}
      break;

    case FUNCTION_DECL:
      dump_child ("args", DECL_ARGUMENTS (t));
      if (DECL_EXTERNAL (t))
	dump_string_field (di, "body", "undefined");
      if (TREE_PUBLIC (t))
	dump_string_field (di, "link", "extern");
      else
	dump_string_field (di, "link", "static");
      if (DECL_SAVED_TREE (t) && !dump_flag (di, TDF_SLIM, t))
	dump_child ("body", DECL_SAVED_TREE (t));
      break;

    case INTEGER_CST:
      if (TREE_INT_CST_HIGH (t))
	dump_int (di, "high", TREE_INT_CST_HIGH (t));
      dump_int (di, "low", TREE_INT_CST_LOW (t));
      break;

    case STRING_CST:
      fprintf (di->stream, "strg: %-7s ", TREE_STRING_POINTER (t));
      dump_int (di, "lngt", TREE_STRING_LENGTH (t));
      break;

    case REAL_CST:
      dump_real (di, "valu", TREE_REAL_CST_PTR (t));
      break;

    case FIXED_CST:
      dump_fixed (di, "valu", TREE_FIXED_CST_PTR (t));
      break;

    case TRUTH_NOT_EXPR:
    case ADDR_EXPR:
    case INDIRECT_REF:
    case CLEANUP_POINT_EXPR:
    case SAVE_EXPR:
    case REALPART_EXPR:
    case IMAGPART_EXPR:
      /* These nodes are unary, but do not have code class `1'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      break;

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case INIT_EXPR:
    case MODIFY_EXPR:
    case COMPOUND_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
      /* These nodes are binary, but do not have code class `2'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      break;

    case COMPONENT_REF:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      break;

    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      dump_child ("op 3", TREE_OPERAND (t, 3));
      break;

    case COND_EXPR:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      break;

    case TRY_FINALLY_EXPR:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      break;

    case CALL_EXPR:
      {
	int i = 0;
	tree arg;
	call_expr_arg_iterator iter;
	dump_child ("fn", CALL_EXPR_FN (t));
	FOR_EACH_CALL_EXPR_ARG (arg, iter, t)
	  {
	    char buffer[32];
	    sprintf (buffer, "%u", i);
	    dump_child (buffer, arg);
	    i++;
	  }
      }
      break;

    case CONSTRUCTOR:
      {
	unsigned HOST_WIDE_INT cnt;
	tree index, value;
	dump_int (di, "lngt", VEC_length (constructor_elt,
					  CONSTRUCTOR_ELTS (t)));
	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), cnt, index, value)
	  {
	    dump_child ("idx", index);
	    dump_child ("val", value);
	  }
      }
  FOR_EACH_CONSTRUCTOR_VALUE (v, ix, value)
    {
      pp_expression (pp, value);
      if (ix != VEC_length (constructor_elt, v) - 1)
	pp_separate_with (pp, ',');
    }
rtx
addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
		  bool really_expand)
{
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
  enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
  rtx address, sym, bse, idx, st, off;
  struct mem_addr_template *templ;

  if (addr->step && !integer_onep (addr->step))
    st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
  else
    st = NULL_RTX;

  if (addr->offset && !integer_zerop (addr->offset))
    off = immed_double_int_const
	    (double_int_sext (tree_to_double_int (addr->offset),
			      TYPE_PRECISION (TREE_TYPE (addr->offset))),
	     pointer_mode);
  else
    off = NULL_RTX;

  if (!really_expand)
    {
      unsigned int templ_index
	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);

      if (templ_index
	  >= VEC_length (mem_addr_template, mem_addr_template_list))
	VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
			       templ_index + 1);

      /* Reuse the templates for addresses, so that we do not waste memory.  */
      templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
      if (!templ->ref)
	{
	  sym = (addr->symbol ?
		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
		 : NULL_RTX);
	  bse = (addr->base ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
		 : NULL_RTX);
	  idx = (addr->index ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
		 : NULL_RTX);

	  gen_addr_rtx (pointer_mode, sym, bse, idx,
			st? const0_rtx : NULL_RTX,
			off? const0_rtx : NULL_RTX,
			&templ->ref,
			&templ->step_p,
			&templ->off_p);
	}

      if (st)
	*templ->step_p = st;
      if (off)
	*templ->off_p = off;

      return templ->ref;
    }

  /* Otherwise really expand the expressions.  */
  sym = (addr->symbol
	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  bse = (addr->base
	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  idx = (addr->index
	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);

  gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
  if (pointer_mode != address_mode)
    address = convert_memory_address (address_mode, address);
  return address;
}
Example #29
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
  struct pointer_set_t *p_set = wtd->p_set;

  /* If in an OpenMP context, note var uses.  */
  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
      && (TREE_CODE (stmt) == VAR_DECL
	  || TREE_CODE (stmt) == PARM_DECL
	  || TREE_CODE (stmt) == RESULT_DECL)
      && omp_var_to_track (stmt))
    omp_cxx_notice_variable (wtd->omp_ctx, stmt);

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      /* If in an OpenMP context, note var uses.  */
      if (__builtin_expect (wtd->omp_ctx != NULL, 0)
	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	else if (wtd->omp_ctx != NULL)
	  {
	    /* Private clause doesn't cause any references to the
	       var in outer contexts, avoid calling
	       omp_cxx_notice_variable for it.  */
	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
	    wtd->omp_ctx = NULL;
	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
			  data, NULL);
	    wtd->omp_ctx = old;
	    *walk_subtrees = 0;
	  }
	break;
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  else if (TREE_CODE (stmt) == BIND_EXPR)
    {
      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
	{
	  tree decl;
	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
	    if (TREE_CODE (decl) == VAR_DECL
		&& !DECL_EXTERNAL (decl)
		&& omp_var_to_track (decl))
	      {
		splay_tree_node n
		  = splay_tree_lookup (wtd->omp_ctx->variables,
				       (splay_tree_key) decl);
		if (n == NULL)
		  splay_tree_insert (wtd->omp_ctx->variables,
				     (splay_tree_key) decl,
				     TREE_STATIC (decl)
				     ? OMP_CLAUSE_DEFAULT_SHARED
				     : OMP_CLAUSE_DEFAULT_PRIVATE);
	      }
	}
      VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
		    cp_genericize_r, data, NULL);
      VEC_pop (tree, wtd->bind_expr_stack);
    }

  else if (TREE_CODE (stmt) == USING_STMT)
    {
      tree block = NULL_TREE;

      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */
      if (wtd->bind_expr_stack)
	{
	  int i;
	  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	    if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						     wtd->bind_expr_stack, i))))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (stmt, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (stmt, 0);
	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GENERIC.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }

  else if (TREE_CODE (stmt) == DECL_EXPR
	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
    {
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
    {
      struct cp_genericize_omp_taskreg omp_ctx;
      tree c, decl;
      splay_tree_node n;

      *walk_subtrees = 0;
      cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
      omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
      omp_ctx.default_shared = omp_ctx.is_parallel;
      omp_ctx.outer = wtd->omp_ctx;
      omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
      wtd->omp_ctx = &omp_ctx;
      for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
	switch (OMP_CLAUSE_CODE (c))
	  {
	  case OMP_CLAUSE_SHARED:
	  case OMP_CLAUSE_PRIVATE:
	  case OMP_CLAUSE_FIRSTPRIVATE:
	  case OMP_CLAUSE_LASTPRIVATE:
	    decl = OMP_CLAUSE_DECL (c);
	    if (decl == error_mark_node || !omp_var_to_track (decl))
	      break;
	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
	    if (n != NULL)
	      break;
	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
			       ? OMP_CLAUSE_DEFAULT_SHARED
			       : OMP_CLAUSE_DEFAULT_PRIVATE);
	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
		&& omp_ctx.outer)
	      omp_cxx_notice_variable (omp_ctx.outer, decl);
	    break;
	  case OMP_CLAUSE_DEFAULT:
	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
	      omp_ctx.default_shared = true;
	  default:
	    break;
	  }
      cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
      wtd->omp_ctx = omp_ctx.outer;
      splay_tree_delete (omp_ctx.variables);
    }
  else if (TREE_CODE (stmt) == CONVERT_EXPR)
    gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}