static bool
mark_address (gimple stmt, tree addr, void *data)
{
  addr = get_base_address (addr);
  if (TREE_CODE (addr) == FUNCTION_DECL)
    {
      struct cgraph_node *node = cgraph_get_create_node (addr);
      cgraph_mark_address_taken_node (node);
      ipa_record_reference ((struct cgraph_node *)data, NULL,
			    node, NULL,
			    IPA_REF_ADDR, stmt);
    }
  else if (addr && TREE_CODE (addr) == VAR_DECL
	   && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
    {
      struct varpool_node *vnode = varpool_node (addr);
      int walk_subtrees;

      if (lang_hooks.callgraph.analyze_expr)
	lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
      varpool_mark_needed_node (vnode);
      ipa_record_reference ((struct cgraph_node *)data, NULL,
			    NULL, vnode,
			    IPA_REF_ADDR, stmt);
    }

  return false;
}
static bool
mark_load (gimple stmt, tree t, void *data)
{
  t = get_base_address (t);
  if (t && TREE_CODE (t) == FUNCTION_DECL)
    {
      /* ??? This can happen on platforms with descriptors when these are
	 directly manipulated in the code.  Pretend that it's an address.  */
      struct cgraph_node *node = cgraph_get_create_node (t);
      cgraph_mark_address_taken_node (node);
      ipa_record_reference ((struct cgraph_node *)data, NULL,
			    node, NULL,
			    IPA_REF_ADDR, stmt);
    }
  else if (t && TREE_CODE (t) == VAR_DECL
	   && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
    {
      struct varpool_node *vnode = varpool_node (t);
      int walk_subtrees;

      if (lang_hooks.callgraph.analyze_expr)
	lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
      varpool_mark_needed_node (vnode);
      ipa_record_reference ((struct cgraph_node *)data, NULL,
			    NULL, vnode,
			    IPA_REF_LOAD, stmt);
    }
  return false;
}
Ejemplo n.º 3
0
static bool
mark_load (gimple stmt, tree t, tree, void *data)
{
  t = get_base_address (t);
  if (t && TREE_CODE (t) == FUNCTION_DECL)
    {
      /* ??? This can happen on platforms with descriptors when these are
	 directly manipulated in the code.  Pretend that it's an address.  */
      struct cgraph_node *node = cgraph_get_create_node (t);
      cgraph_mark_address_taken_node (node);
      ipa_record_reference ((symtab_node *)data,
			    node,
			    IPA_REF_ADDR, stmt);
    }
  else if (t && TREE_CODE (t) == VAR_DECL
	   && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
    {
      varpool_node *vnode = varpool_node_for_decl (t);

      ipa_record_reference ((symtab_node *)data,
			    vnode,
			    IPA_REF_LOAD, stmt);
    }
  return false;
}
Ejemplo n.º 4
0
static tree
record_reference (tree *tp, int *walk_subtrees, void *data)
{
  tree t = *tp;
  tree decl;
  struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;

  t = canonicalize_constructor_val (t, NULL);
  if (!t)
    t = *tp;
  else if (t != *tp)
    *tp = t;

  switch (TREE_CODE (t))
    {
    case VAR_DECL:
    case FUNCTION_DECL:
      gcc_unreachable ();
      break;

    case FDESC_EXPR:
    case ADDR_EXPR:
      /* Record dereferences to the functions.  This makes the
	 functions reachable unconditionally.  */
      decl = get_base_var (*tp);
      if (TREE_CODE (decl) == FUNCTION_DECL)
	{
	  struct cgraph_node *node = cgraph_get_create_node (decl);
	  if (!ctx->only_vars)
	    cgraph_mark_address_taken_node (node);
	  ipa_record_reference (ctx->varpool_node,
				node,
			        IPA_REF_ADDR, NULL);
	}

      if (TREE_CODE (decl) == VAR_DECL)
	{
	  varpool_node *vnode = varpool_node_for_decl (decl);
	  ipa_record_reference (ctx->varpool_node,
				vnode,
				IPA_REF_ADDR, NULL);
	}
      *walk_subtrees = 0;
      break;

    default:
      /* Save some cycles by not walking types and declaration as we
	 won't find anything useful there anyway.  */
      if (IS_TYPE_OR_DECL_P (*tp))
	{
	  *walk_subtrees = 0;
	  break;
	}
      break;
    }

  return NULL_TREE;
}
Ejemplo n.º 5
0
static void
record_eh_tables (struct cgraph_node *node, struct function *fun)
{
  eh_region i;

  if (DECL_FUNCTION_PERSONALITY (node->decl))
    {
      tree per_decl = DECL_FUNCTION_PERSONALITY (node->decl);
      struct cgraph_node *per_node = cgraph_get_create_node (per_decl);

      ipa_record_reference (node, per_node, IPA_REF_ADDR, NULL);
      cgraph_mark_address_taken_node (per_node);
    }

  i = fun->eh->region_tree;
  if (!i)
    return;

  while (1)
    {
      switch (i->type)
	{
	case ERT_CLEANUP:
	case ERT_MUST_NOT_THROW:
	  break;

	case ERT_TRY:
	  {
	    eh_catch c;
	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
	      record_type_list (node, c->type_list);
	  }
	  break;

	case ERT_ALLOWED_EXCEPTIONS:
	  record_type_list (node, i->u.allowed.type_list);
	  break;
	}
      /* If there are sub-regions, process them.  */
      if (i->inner)
	i = i->inner;
      /* If there are peers, process them.  */
      else if (i->next_peer)
	i = i->next_peer;
      /* Otherwise, step back up the tree to the next peer.  */
      else
	{
	  do
	    {
	      i = i->outer;
	      if (i == NULL)
		return;
	    }
	  while (i->next_peer == NULL);
	  i = i->next_peer;
	}
    }
}
Ejemplo n.º 6
0
void
ipa_clone_referring (symtab_node dest_node,
		    struct ipa_ref_list *src)
{
  struct ipa_ref *ref;
  int i;
  for (i = 0; ipa_ref_list_referring_iterate (src, i, ref); i++)
    ipa_record_reference (ref->referring,
			  dest_node,
			  ref->use, ref->stmt);
}
Ejemplo n.º 7
0
static bool
mark_address (gimple stmt, tree addr, tree, void *data)
{
  addr = get_base_address (addr);
  if (TREE_CODE (addr) == FUNCTION_DECL)
    {
      struct cgraph_node *node = cgraph_get_create_node (addr);
      cgraph_mark_address_taken_node (node);
      ipa_record_reference ((symtab_node *)data,
			    node,
			    IPA_REF_ADDR, stmt);
    }
  else if (addr && TREE_CODE (addr) == VAR_DECL
	   && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
    {
      varpool_node *vnode = varpool_node_for_decl (addr);

      ipa_record_reference ((symtab_node *)data,
			    vnode,
			    IPA_REF_ADDR, stmt);
    }

  return false;
}
Ejemplo n.º 8
0
static bool
mark_store (gimple stmt, tree t, tree, void *data)
{
  t = get_base_address (t);
  if (t && TREE_CODE (t) == VAR_DECL
      && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
    {
      varpool_node *vnode = varpool_node_for_decl (t);

      ipa_record_reference ((symtab_node *)data,
			    vnode,
			    IPA_REF_STORE, stmt);
     }
  return false;
}
Ejemplo n.º 9
0
struct ipa_ref *
ipa_clone_ref (struct ipa_ref *ref,
	       symtab_node *dest_node,
	       gimple stmt)
{
  bool speculative = ref->speculative;
  unsigned int stmt_uid = ref->lto_stmt_uid;
  struct ipa_ref *ref2;

  ref2 = ipa_record_reference (dest_node,
			       ref->referred,
			       ref->use, stmt);
  ref2->speculative = speculative;
  ref2->lto_stmt_uid = stmt_uid;
  return ref2;
}
Ejemplo n.º 10
0
struct ipa_ref *
ipa_maybe_record_reference (symtab_node *referring_node, tree val,
			    enum ipa_ref_use use_type, gimple stmt)
{
  STRIP_NOPS (val);
  if (TREE_CODE (val) != ADDR_EXPR)
    return NULL;
  val = get_base_var (val);
  if (val && (TREE_CODE (val) == FUNCTION_DECL
	       || TREE_CODE (val) == VAR_DECL))
    {
      symtab_node *referred = symtab_get_node (val);
      gcc_checking_assert (referred);
      return ipa_record_reference (referring_node, referred,
				   use_type, stmt);
    }
  return NULL;
}
Ejemplo n.º 11
0
void
ipa_clone_referring (symtab_node *dest_node,
		    struct ipa_ref_list *src)
{
  struct ipa_ref *ref, *ref2;
  int i;
  for (i = 0; ipa_ref_list_referring_iterate (src, i, ref); i++)
    {
      bool speculative = ref->speculative;
      unsigned int stmt_uid = ref->lto_stmt_uid;

      ref2 = ipa_record_reference (ref->referring,
				   dest_node,
				   ref->use, ref->stmt);
      ref2->speculative = speculative;
      ref2->lto_stmt_uid = stmt_uid;
    }
}
Ejemplo n.º 12
0
Archivo: symtab.c Proyecto: Roffi/gcc
bool
symtab_resolve_alias (symtab_node node, symtab_node target)
{
  symtab_node n;

  gcc_assert (!node->symbol.analyzed
	      && !vec_safe_length (node->symbol.ref_list.references));

  /* Never let cycles to creep into the symbol table alias references;
     those will make alias walkers to be infinite.  */
  for (n = target; n && n->symbol.alias;
       n = n->symbol.analyzed ? symtab_alias_target (n) : NULL)
    if (n == node)
       {
	 if (is_a <cgraph_node> (node))
           error ("function %q+D part of alias cycle", node->symbol.decl);
         else if (is_a <varpool_node> (node))
           error ("variable %q+D part of alias cycle", node->symbol.decl);
	 else
	   gcc_unreachable ();
	 node->symbol.alias = false;
	 return false;
       }

  /* "analyze" the node - i.e. mark the reference.  */
  node->symbol.definition = true;
  node->symbol.alias = true;
  node->symbol.analyzed = true;
  ipa_record_reference (node, target, IPA_REF_ALIAS, NULL);

  /* Alias targets become reudndant after alias is resolved into an reference.
     We do not want to keep it around or we would have to mind updating them
     when renaming symbols.  */
  node->symbol.alias_target = NULL;

  if (node->symbol.cpp_implicit_alias && cgraph_state >= CGRAPH_STATE_CONSTRUCTION)
    fixup_same_cpp_alias_visibility (node, target);

  /* If alias has address taken, so does the target.  */
  if (node->symbol.address_taken)
    symtab_alias_ultimate_target (target, NULL)->symbol.address_taken = true;
  return true;
}
Ejemplo n.º 13
0
Archivo: varpool.c Proyecto: Lao16/gcc
void
varpool_analyze_node (struct varpool_node *node)
{
  tree decl = node->symbol.decl;

  /* When reading back varpool at LTO time, we re-construct the queue in order
     to have "needed" list right by inserting all needed nodes into varpool.
     We however don't want to re-analyze already analyzed nodes.  */
  if (!node->analyzed)
    {
      gcc_assert (!in_lto_p || cgraph_function_flags_ready);
      /* Compute the alignment early so function body expanders are
	 already informed about increased alignment.  */
      align_variable (decl, 0);
    }
  if (node->alias && node->alias_of)
    {
      struct varpool_node *tgt = varpool_node_for_decl (node->alias_of);
      struct varpool_node *n;

      for (n = tgt; n && n->alias;
	   n = n->analyzed ? varpool_alias_aliased_node (n) : NULL)
	if (n == node)
	  {
	    error ("variable %q+D part of alias cycle", node->symbol.decl);
	    node->alias = false;
	    continue;
	  }
      if (!vec_safe_length (node->symbol.ref_list.references))
	ipa_record_reference ((symtab_node)node, (symtab_node)tgt, IPA_REF_ALIAS, NULL);
      if (node->extra_name_alias)
	{
	  DECL_WEAK (node->symbol.decl) = DECL_WEAK (node->alias_of);
	  DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->alias_of);
	  DECL_VISIBILITY (node->symbol.decl) = DECL_VISIBILITY (node->alias_of);
	  fixup_same_cpp_alias_visibility ((symtab_node) node,
					   (symtab_node) tgt, node->alias_of);
	}
    }
  else if (DECL_INITIAL (decl))
    record_references_in_initializer (decl, node->analyzed);
  node->analyzed = true;
}
static bool
mark_store (gimple stmt, tree t, void *data)
{
  t = get_base_address (t);
  if (t && TREE_CODE (t) == VAR_DECL
      && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
    {
      struct varpool_node *vnode = varpool_node (t);
      int walk_subtrees;

      if (lang_hooks.callgraph.analyze_expr)
	lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
      varpool_mark_needed_node (vnode);
      ipa_record_reference ((struct cgraph_node *)data, NULL,
			    NULL, vnode,
			    IPA_REF_STORE, stmt);
     }
  return false;
}
Ejemplo n.º 15
0
static void
record_type_list (struct cgraph_node *node, tree list)
{
  for (; list; list = TREE_CHAIN (list))
    {
      tree type = TREE_VALUE (list);
      
      if (TYPE_P (type))
	type = lookup_type_for_runtime (type);
      STRIP_NOPS (type);
      if (TREE_CODE (type) == ADDR_EXPR)
	{
	  type = TREE_OPERAND (type, 0);
	  if (TREE_CODE (type) == VAR_DECL)
	    {
	      varpool_node *vnode = varpool_node_for_decl (type);
	      ipa_record_reference (node,
				    vnode,
				    IPA_REF_ADDR, NULL);
	    }
	}
    }
}
Ejemplo n.º 16
0
static unsigned int
build_cgraph_edges (void)
{
  basic_block bb;
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
  struct pointer_set_t *visited_nodes = pointer_set_create ();
  gimple_stmt_iterator gsi;
  tree decl;
  unsigned ix;

  /* Create the callgraph edges and record the nodes referenced by the function.
     body.  */
  FOR_EACH_BB_FN (bb, cfun)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl;

	  if (is_gimple_debug (stmt))
	    continue;

	  if (is_gimple_call (stmt))
	    {
	      int freq = compute_call_stmt_bb_frequency (current_function_decl,
							 bb);
	      decl = gimple_call_fndecl (stmt);
	      if (decl)
		cgraph_create_edge (node, cgraph_get_create_node (decl),
				    stmt, bb->count, freq);
	      else if (gimple_call_internal_p (stmt))
		;
	      else
		cgraph_create_indirect_edge (node, stmt,
					     gimple_call_flags (stmt),
					     bb->count, freq);
	    }
	  ipa_record_stmt_references (node, stmt);
	  if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
	      && gimple_omp_parallel_child_fn (stmt))
	    {
	      tree fn = gimple_omp_parallel_child_fn (stmt);
	      ipa_record_reference (node,
				    cgraph_get_create_node (fn),
				    IPA_REF_ADDR, stmt);
	    }
	  if (gimple_code (stmt) == GIMPLE_OMP_TASK)
	    {
	      tree fn = gimple_omp_task_child_fn (stmt);
	      if (fn)
		ipa_record_reference (node,
				      cgraph_get_create_node (fn),
				      IPA_REF_ADDR, stmt);
	      fn = gimple_omp_task_copy_fn (stmt);
	      if (fn)
		ipa_record_reference (node,
				      cgraph_get_create_node (fn),
				      IPA_REF_ADDR, stmt);
	    }
	}
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	ipa_record_stmt_references (node, gsi_stmt (gsi));
   }

  /* Look for initializers of constant variables and private statics.  */
  FOR_EACH_LOCAL_DECL (cfun, ix, decl)
    if (TREE_CODE (decl) == VAR_DECL
	&& (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
	&& !DECL_HAS_VALUE_EXPR_P (decl))
      varpool_finalize_decl (decl);
  record_eh_tables (node, cfun);

  pointer_set_destroy (visited_nodes);
  return 0;
}
Ejemplo n.º 17
0
/* Walk the decls we marked as necessary and see if they reference new
   variables or functions and add them into the worklists.  */
bool
varpool_analyze_pending_decls (void)
{
  bool changed = false;

  timevar_push (TV_VARPOOL);
  while (varpool_first_unanalyzed_node)
    {
      struct varpool_node *node = varpool_first_unanalyzed_node, *next;
      tree decl = node->decl;
      bool analyzed = node->analyzed;

      varpool_first_unanalyzed_node->analyzed = true;

      varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed;

      /* When reading back varpool at LTO time, we re-construct the queue in order
         to have "needed" list right by inserting all needed nodes into varpool.
	 We however don't want to re-analyze already analyzed nodes.  */
      if (!analyzed)
	{
	  gcc_assert (!in_lto_p || cgraph_function_flags_ready);
          /* Compute the alignment early so function body expanders are
	     already informed about increased alignment.  */
          align_variable (decl, 0);
	}
      if (node->alias && node->alias_of)
	{
	  struct varpool_node *tgt = varpool_node (node->alias_of);
          struct varpool_node *n;

	  for (n = tgt; n && n->alias;
	       n = n->analyzed ? varpool_alias_aliased_node (n) : NULL)
	    if (n == node)
	      {
		error ("variable %q+D part of alias cycle", node->decl);
		node->alias = false;
		continue;
	      }
	  if (!VEC_length (ipa_ref_t, node->ref_list.references))
	    ipa_record_reference (NULL, node, NULL, tgt, IPA_REF_ALIAS, NULL);
	  /* C++ FE sometimes change linkage flags after producing same body aliases.  */
	  if (node->extra_name_alias)
	    {
	      DECL_WEAK (node->decl) = DECL_WEAK (node->alias_of);
	      TREE_PUBLIC (node->decl) = TREE_PUBLIC (node->alias_of);
	      DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->alias_of);
	      DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (node->alias_of);
	      if (TREE_PUBLIC (node->decl))
		{
		  DECL_COMDAT (node->decl) = DECL_COMDAT (node->alias_of);
		  DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->alias_of);
		  if (DECL_ONE_ONLY (node->alias_of) && !node->same_comdat_group)
		    {
		      node->same_comdat_group = tgt;
		      if (!tgt->same_comdat_group)
			tgt->same_comdat_group = node;
		      else
			{
			  struct varpool_node *n;
			  for (n = tgt->same_comdat_group;
			       n->same_comdat_group != tgt;
			       n = n->same_comdat_group)
			    ;
			  n->same_comdat_group = node;
			}
		    }
		}
	    }
	}
      else if (DECL_INITIAL (decl))
	record_references_in_initializer (decl, analyzed);
      if (node->same_comdat_group)
	{
	  for (next = node->same_comdat_group;
	       next != node;
	       next = next->same_comdat_group)
	    varpool_mark_needed_node (next);
	}
      changed = true;
    }
  timevar_pop (TV_VARPOOL);
  return changed;
}