Exemple #1
0
void
clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
		     bool update_original, int *overall_size)
{
  if (duplicate)
    {
      /* We may eliminate the need for out-of-line copy to be output.
	 In that case just go ahead and re-use it.  This is not just an
	 memory optimization.  Making offline copy of fuction disappear
	 from the program will improve future decisions on inlining.  */
      if (!e->callee->callers->next_caller
	  /* Recursive inlining never wants the master clone to
	     be overwritten.  */
	  && update_original
	  && can_remove_node_now_p (e->callee, e))
	{
	  /* TODO: When callee is in a comdat group, we could remove all of it,
	     including all inline clones inlined into it.  That would however
	     need small function inlining to register edge removal hook to
	     maintain the priority queue.

	     For now we keep the ohter functions in the group in program until
	     cgraph_remove_unreachable_functions gets rid of them.  */
	  gcc_assert (!e->callee->global.inlined_to);
          symtab_dissolve_same_comdat_group_list ((symtab_node) e->callee);
	  if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->symbol.decl))
	    {
	      if (overall_size)
	        *overall_size -= inline_summary (e->callee)->size;
	      nfunctions_inlined++;
	    }
	  duplicate = false;
	  e->callee->symbol.externally_visible = false;
          update_noncloned_frequencies (e->callee, e->frequency);
	}
      else
	{
	  struct cgraph_node *n;
	  n = cgraph_clone_node (e->callee, e->callee->symbol.decl,
				 e->count, e->frequency,
				 update_original, vNULL, true);
	  cgraph_redirect_edge_callee (e, n);
	}
    }
  else
    symtab_dissolve_same_comdat_group_list ((symtab_node) e->callee);

  if (e->caller->global.inlined_to)
    e->callee->global.inlined_to = e->caller->global.inlined_to;
  else
    e->callee->global.inlined_to = e->caller;

  /* Recursively clone all bodies.  */
  for (e = e->callee->callees; e; e = e->next_callee)
    if (!e->inline_failed)
      clone_inlined_nodes (e, duplicate, update_original, overall_size);
}
Exemple #2
0
static void
update_noncloned_frequencies (struct cgraph_node *node,
			      int freq_scale)
{
  struct cgraph_edge *e;

  /* We do not want to ignore high loop nest after freq drops to 0.  */
  if (!freq_scale)
    freq_scale = 1;
  for (e = node->callees; e; e = e->next_callee)
    {
      e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
      if (e->frequency > CGRAPH_FREQ_MAX)
        e->frequency = CGRAPH_FREQ_MAX;
      if (!e->inline_failed)
        update_noncloned_frequencies (e->callee, freq_scale);
    }
  for (e = node->indirect_calls; e; e = e->next_callee)
    {
      e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
      if (e->frequency > CGRAPH_FREQ_MAX)
        e->frequency = CGRAPH_FREQ_MAX;
    }
}
void
clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
		     bool update_original, int *overall_size, int freq_scale)
{
  struct cgraph_node *inlining_into;
  struct cgraph_edge *next;

  if (e->caller->global.inlined_to)
    inlining_into = e->caller->global.inlined_to;
  else
    inlining_into = e->caller;

  if (duplicate)
    {
      /* We may eliminate the need for out-of-line copy to be output.
	 In that case just go ahead and re-use it.  This is not just an
	 memory optimization.  Making offline copy of fuction disappear
	 from the program will improve future decisions on inlining.  */
      if (!e->callee->callers->next_caller
	  /* Recursive inlining never wants the master clone to
	     be overwritten.  */
	  && update_original
	  && can_remove_node_now_p (e->callee, e)
	  /* We cannot overwrite a master clone with non-inline clones
	     until after these clones are materialized.  */
	  && !master_clone_with_noninline_clones_p (e->callee))
	{
	  /* TODO: When callee is in a comdat group, we could remove all of it,
	     including all inline clones inlined into it.  That would however
	     need small function inlining to register edge removal hook to
	     maintain the priority queue.

	     For now we keep the ohter functions in the group in program until
	     cgraph_remove_unreachable_functions gets rid of them.  */
	  gcc_assert (!e->callee->global.inlined_to);
	  e->callee->dissolve_same_comdat_group_list ();
	  if (e->callee->definition && !DECL_EXTERNAL (e->callee->decl))
	    {
	      if (overall_size)
	        *overall_size -= inline_summary (e->callee)->size;
	      nfunctions_inlined++;
	    }
	  duplicate = false;
	  e->callee->externally_visible = false;
          update_noncloned_frequencies (e->callee, e->frequency);
	}
      else
	{
	  struct cgraph_node *n;

	  if (freq_scale == -1)
	    freq_scale = e->frequency;
	  n = e->callee->create_clone (e->callee->decl,
				       MIN (e->count, e->callee->count),
				       freq_scale,
				       update_original, vNULL, true,
				       inlining_into,
				       NULL);
	  e->redirect_callee (n);
	}
    }
  else
    e->callee->dissolve_same_comdat_group_list ();

  e->callee->global.inlined_to = inlining_into;

  /* Recursively clone all bodies.  */
  for (e = e->callee->callees; e; e = next)
    {
      next = e->next_callee;
      if (!e->inline_failed)
        clone_inlined_nodes (e, duplicate, update_original, overall_size, freq_scale);
      if (e->speculative && !speculation_useful_p (e, true))
	{
	  e->resolve_speculation (NULL);
	  speculation_removed = true;
	}
    }
}