Пример #1
0
static void
build_single_def_use_links (void)
{
  /* We use the multiple definitions problem to compute our restricted
     use-def chains.  */
  df_set_flags (DF_EQ_NOTES);
  df_md_add_problem ();
  df_note_add_problem ();
  df_analyze ();
  df_maybe_reorganize_use_refs (DF_REF_ORDER_BY_INSN_WITH_NOTES);

  use_def_ref.create (DF_USES_TABLE_SIZE ());
  use_def_ref.safe_grow_cleared (DF_USES_TABLE_SIZE ());

  reg_defs.create (max_reg_num ());
  reg_defs.safe_grow_cleared (max_reg_num ());

  reg_defs_stack.create (n_basic_blocks_for_fn (cfun) * 10);
  local_md = BITMAP_ALLOC (NULL);
  local_lr = BITMAP_ALLOC (NULL);

  /* Walk the dominator tree looking for single reaching definitions
     dominating the uses.  This is similar to how SSA form is built.  */
  single_def_use_dom_walker (CDI_DOMINATORS)
    .walk (cfun->cfg->x_entry_block_ptr);

  BITMAP_FREE (local_lr);
  BITMAP_FREE (local_md);
  reg_defs.release ();
  reg_defs_stack.release ();
}
Пример #2
0
/* Finalization of the RTL loop passes.  */
static unsigned int
rtl_loop_done (void)
{
  basic_block bb;

  if (current_loops)
    loop_optimizer_finalize (current_loops);

  free_dominance_info (CDI_DOMINATORS);

  /* Finalize layout changes.  */
  FOR_EACH_BB (bb)
    if (bb->next_bb != EXIT_BLOCK_PTR)
      bb->aux = bb->next_bb;
  cfg_layout_finalize ();

  cleanup_cfg (CLEANUP_EXPENSIVE);
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
  reg_scan (get_insns (), max_reg_num ());
  if (dump_file)
    dump_flow_info (dump_file, dump_flags);

  current_loops = NULL;
  return 0;
}
Пример #3
0
static void
rest_of_handle_web (void)
{
  web_main ();
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
  cleanup_cfg (CLEANUP_EXPENSIVE);
  reg_scan (get_insns (), max_reg_num ());
}
Пример #4
0
void
init_alias_analysis (void)
{
  unsigned int maxreg = max_reg_num ();
  (VEC_rtx_gc_safe_grow_cleared
   (&(reg_base_value), maxreg, "../../../gcc-4.6.0/gcc/alias.c", 2755,
    __FUNCTION__, arf ()));
}
Пример #5
0
/* Run tracer.  */
static unsigned int
rest_of_handle_tracer (void)
{
  if (dump_file)
    dump_flow_info (dump_file, dump_flags);
  tracer (0);
  cleanup_cfg (CLEANUP_EXPENSIVE);
  reg_scan (get_insns (), max_reg_num ());
  return 0;
}
Пример #6
0
     void regstat_init_n_sets_and_refs (void)
{
  unsigned int i;
  unsigned int max_regno = max_reg_num ();

  for (i = 0; i < max_regno; i++)
    {
      (regstat_n_sets_and_refs[i].sets = (df->def_regs[(i)]->n_refs));
      (regstat_n_sets_and_refs[i].refs =
       (df->use_regs[(i)]->n_refs) + REG_N_SETS (i));
    }
}
Пример #7
0
static bool
rtl_value_profile_transformations (void)
{
  rtx insn, next;
  int changed = false;

  for (insn = get_insns (); insn; insn = next)
    {
      next = NEXT_INSN (insn);

      if (!INSN_P (insn))
	continue;

      /* Scan for insn carrying a histogram.  */
      if (!find_reg_note (insn, REG_VALUE_PROFILE, 0))
	continue;

      /* Ignore cold areas -- we are growing a code.  */
      if (!maybe_hot_bb_p (BLOCK_FOR_INSN (insn)))
	continue;

      if (dump_file)
	{
	  fprintf (dump_file, "Trying transformations on insn %d\n",
		   INSN_UID (insn));
	  print_rtl_single (dump_file, insn);
	}

      /* Transformations:  */
      if (flag_value_profile_transformations
	  && (mod_subtract_transform (insn)
	      || divmod_fixed_value_transform (insn)
	      || mod_pow2_value_transform (insn)))
	changed = true;
#ifdef HAVE_prefetch
      if (flag_speculative_prefetching
	  && speculative_prefetching_transform (insn))
	changed = true;
#endif
    }

  if (changed)
    {
      commit_edge_insertions ();
      allocate_reg_info (max_reg_num (), FALSE, FALSE);
    }

  return changed;
}
Пример #8
0
Файл: web.c Проект: 0mp/freebsd
static void
web_main (void)
{
  struct df *df;
  struct web_entry *def_entry;
  struct web_entry *use_entry;
  unsigned int i;
  int max = max_reg_num ();
  char *used;

  df = df_init (DF_EQUIV_NOTES);
  df_chain_add_problem (df, DF_UD_CHAIN);
  df_analyze (df);
  df_reorganize_refs (&df->def_info);
  df_reorganize_refs (&df->use_info);

  def_entry = XCNEWVEC (struct web_entry, DF_DEFS_SIZE (df));
  use_entry = XCNEWVEC (struct web_entry, DF_USES_SIZE (df));
  used = XCNEWVEC (char, max);

  if (dump_file)
    df_dump (df, dump_file);

  /* Produce the web.  */
  for (i = 0; i < DF_USES_SIZE (df); i++)
    union_defs (df, DF_USES_GET (df, i), def_entry, use_entry, unionfind_union);

  /* Update the instruction stream, allocating new registers for split pseudos
     in progress.  */
  for (i = 0; i < DF_USES_SIZE (df); i++)
    replace_ref (DF_USES_GET (df, i), 
		 entry_register (use_entry + i, DF_USES_GET (df, i), used));
  for (i = 0; i < DF_DEFS_SIZE (df); i++)
    replace_ref (DF_DEFS_GET (df, i), 
		 entry_register (def_entry + i, DF_DEFS_GET (df, i), used));

  /* Dataflow information is corrupt here, but it can be easily updated
     by creating new entries for new registers and updates or calling
     df_insns_modify.  */
  free (def_entry);
  free (use_entry);
  free (used);
  df_finish (df);
  df = NULL;
}
Пример #9
0
void
web_main (void)
{
  struct df *df;
  struct web_entry *def_entry;
  struct web_entry *use_entry;
  unsigned int i;
  int max = max_reg_num ();
  char *used;

  df = df_init ();
  df_analyze (df, 0, DF_UD_CHAIN | DF_EQUIV_NOTES);

  def_entry = xcalloc (df->n_defs, sizeof (struct web_entry));
  use_entry = xcalloc (df->n_uses, sizeof (struct web_entry));
  used = xcalloc (max, sizeof (char));

  if (dump_file)
    df_dump (df, DF_UD_CHAIN | DF_DU_CHAIN, dump_file);

  /* Produce the web.  */
  for (i = 0; i < df->n_uses; i++)
    union_defs (df, df->uses[i], def_entry, use_entry);

  /* Update the instruction stream, allocating new registers for split pseudos
     in progress.  */
  for (i = 0; i < df->n_uses; i++)
    replace_ref (df->uses[i], entry_register (use_entry + i, df->uses[i],
					      used));
  for (i = 0; i < df->n_defs; i++)
    replace_ref (df->defs[i], entry_register (def_entry + i, df->defs[i],
					      used));

  /* Dataflow information is corrupt here, but it can be easily updated
     by creating new entries for new registers and updates or calling
     df_insns_modify.  */
  free (def_entry);
  free (use_entry);
  free (used);
  df_finish (df);
}
Пример #10
0
/* Spill pseudos which are assigned to hard registers in SET.  Add
   affected insns for processing in the subsequent constraint
   pass.  */
static void
spill_pseudos (HARD_REG_SET set)
{
  int i;
  bitmap_head to_process;
  rtx_insn *insn;

  if (hard_reg_set_empty_p (set))
    return;
  if (lra_dump_file != NULL)
    {
      fprintf (lra_dump_file, "	   Spilling non-eliminable hard regs:");
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
	if (TEST_HARD_REG_BIT (set, i))
	  fprintf (lra_dump_file, " %d", i);
      fprintf (lra_dump_file, "\n");
    }
  bitmap_initialize (&to_process, &reg_obstack);
  for (i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
    if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
	&& overlaps_hard_reg_set_p (set,
				    PSEUDO_REGNO_MODE (i), reg_renumber[i]))
      {
	if (lra_dump_file != NULL)
	  fprintf (lra_dump_file, "	 Spilling r%d(%d)\n",
		   i, reg_renumber[i]);
	reg_renumber[i] = -1;
	bitmap_ior_into (&to_process, &lra_reg_info[i].insn_bitmap);
      }
  IOR_HARD_REG_SET (lra_no_alloc_regs, set);
  for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
    if (bitmap_bit_p (&to_process, INSN_UID (insn)))
      {
	lra_push_insn (insn);
	lra_set_used_insn_alternative (insn, -1);
      }
  bitmap_clear (&to_process);
}
Пример #11
0
/* The major function for aggressive pseudo coalescing of moves only
   if the both pseudos were spilled and not special reload pseudos.  */
bool
lra_coalesce (void)
{
  basic_block bb;
  rtx mv, set, insn, next, *sorted_moves;
  int i, mv_num, sregno, dregno;
  int coalesced_moves;
  int max_regno = max_reg_num ();
  bitmap_head involved_insns_bitmap;

  timevar_push (TV_LRA_COALESCE);

  if (lra_dump_file != NULL)
    fprintf (lra_dump_file,
	     "\n********** Pseudos coalescing #%d: **********\n\n",
	     ++lra_coalesce_iter);
  first_coalesced_pseudo = XNEWVEC (int, max_regno);
  next_coalesced_pseudo = XNEWVEC (int, max_regno);
  for (i = 0; i < max_regno; i++)
    first_coalesced_pseudo[i] = next_coalesced_pseudo[i] = i;
  sorted_moves = XNEWVEC (rtx, get_max_uid ());
  mv_num = 0;
  /* Collect moves.  */
  coalesced_moves = 0;
  FOR_EACH_BB (bb)
    {
      FOR_BB_INSNS_SAFE (bb, insn, next)
	if (INSN_P (insn)
	    && (set = single_set (insn)) != NULL_RTX
	    && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set))
	    && (sregno = REGNO (SET_SRC (set))) >= FIRST_PSEUDO_REGISTER
	    && (dregno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
	    && mem_move_p (sregno, dregno)
	    && coalescable_pseudo_p (sregno) && coalescable_pseudo_p (dregno)
	    && ! side_effects_p (set)
	    && !(lra_intersected_live_ranges_p
		 (lra_reg_info[sregno].live_ranges,
		  lra_reg_info[dregno].live_ranges)))
	  sorted_moves[mv_num++] = insn;
    }
  qsort (sorted_moves, mv_num, sizeof (rtx), move_freq_compare_func);
  /* Coalesced copies, most frequently executed first.	*/
  bitmap_initialize (&coalesced_pseudos_bitmap, &reg_obstack);
  bitmap_initialize (&involved_insns_bitmap, &reg_obstack);
  for (i = 0; i < mv_num; i++)
    {
      mv = sorted_moves[i];
      set = single_set (mv);
      lra_assert (set != NULL && REG_P (SET_SRC (set))
		  && REG_P (SET_DEST (set)));
      sregno = REGNO (SET_SRC (set));
      dregno = REGNO (SET_DEST (set));
      if (first_coalesced_pseudo[sregno] == first_coalesced_pseudo[dregno])
	{
	  coalesced_moves++;
	  if (lra_dump_file != NULL)
	    fprintf
	      (lra_dump_file, "      Coalescing move %i:r%d-r%d (freq=%d)\n",
	       INSN_UID (mv), sregno, dregno,
	       BLOCK_FOR_INSN (mv)->frequency);
	  /* We updated involved_insns_bitmap when doing the merge.  */
	}
      else if (!(lra_intersected_live_ranges_p
		 (lra_reg_info[first_coalesced_pseudo[sregno]].live_ranges,
		  lra_reg_info[first_coalesced_pseudo[dregno]].live_ranges)))
	{
	  coalesced_moves++;
	  if (lra_dump_file != NULL)
	    fprintf
	      (lra_dump_file,
	       "  Coalescing move %i:r%d(%d)-r%d(%d) (freq=%d)\n",
	       INSN_UID (mv), sregno, ORIGINAL_REGNO (SET_SRC (set)),
	       dregno, ORIGINAL_REGNO (SET_DEST (set)),
	       BLOCK_FOR_INSN (mv)->frequency);
	  bitmap_ior_into (&involved_insns_bitmap,
			   &lra_reg_info[sregno].insn_bitmap);
	  bitmap_ior_into (&involved_insns_bitmap,
			   &lra_reg_info[dregno].insn_bitmap);
	  merge_pseudos (sregno, dregno);
	}
    }
  bitmap_initialize (&used_pseudos_bitmap, &reg_obstack);
  FOR_EACH_BB (bb)
    {
      update_live_info (df_get_live_in (bb));
      update_live_info (df_get_live_out (bb));
      FOR_BB_INSNS_SAFE (bb, insn, next)
	if (INSN_P (insn)
	    && bitmap_bit_p (&involved_insns_bitmap, INSN_UID (insn)))
	  {
	    if (! substitute (&insn))
	      continue;
	    lra_update_insn_regno_info (insn);
	    if ((set = single_set (insn)) != NULL_RTX && set_noop_p (set))
	      {
		/* Coalesced move.  */
		if (lra_dump_file != NULL)
		  fprintf (lra_dump_file, "	 Removing move %i (freq=%d)\n",
			 INSN_UID (insn), BLOCK_FOR_INSN (insn)->frequency);
		lra_set_insn_deleted (insn);
	      }
	  }
    }
  bitmap_clear (&used_pseudos_bitmap);
  bitmap_clear (&involved_insns_bitmap);
  bitmap_clear (&coalesced_pseudos_bitmap);
  if (lra_dump_file != NULL && coalesced_moves != 0)
    fprintf (lra_dump_file, "Coalesced Moves = %d\n", coalesced_moves);
  free (sorted_moves);
  free (next_coalesced_pseudo);
  free (first_coalesced_pseudo);
  timevar_pop (TV_LRA_COALESCE);
  return coalesced_moves != 0;
}
Пример #12
0
/* Find list of values for that we want to measure histograms.  */
static void
rtl_find_values_to_profile (histogram_values *values)
{
  rtx insn;
  unsigned i, libcall_level;

  life_analysis (NULL, PROP_DEATH_NOTES);

  *values = VEC_alloc (histogram_value, 0);
  libcall_level = 0;
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    {
      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
	libcall_level++;

      /* Do not instrument values inside libcalls (we are going to split block
	 due to instrumentation, and libcall blocks should be local to a single
	 basic block).  */
      if (!libcall_level)
	insn_values_to_profile (insn, values);

      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
	{
	  gcc_assert (libcall_level > 0);
	  libcall_level--;
	}
    }
  gcc_assert (libcall_level == 0);

  for (i = 0; i < VEC_length (histogram_value, *values); i++)
    {
      histogram_value hist = VEC_index (histogram_value, *values, i);

      switch (hist->type)
	{
	case HIST_TYPE_INTERVAL:
	  if (dump_file)
	    fprintf (dump_file,
		     "Interval counter for insn %d, range %d -- %d.\n",
		     INSN_UID ((rtx)hist->insn),
		     hist->hdata.intvl.int_start,
		     (hist->hdata.intvl.int_start
		      + hist->hdata.intvl.steps - 1));
	  hist->n_counters = hist->hdata.intvl.steps +
		  (hist->hdata.intvl.may_be_less ? 1 : 0) +
		  (hist->hdata.intvl.may_be_more ? 1 : 0);
	  break;

	case HIST_TYPE_POW2:
	  if (dump_file)
	    fprintf (dump_file,
		     "Pow2 counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters 
		= GET_MODE_BITSIZE (hist->mode)
		  +  (hist->hdata.pow2.may_be_other ? 1 : 0);
	  break;

	case HIST_TYPE_SINGLE_VALUE:
	  if (dump_file)
	    fprintf (dump_file,
		     "Single value counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters = 3;
	  break;

	case HIST_TYPE_CONST_DELTA:
	  if (dump_file)
	    fprintf (dump_file,
		     "Constant delta counter for insn %d.\n",
		     INSN_UID ((rtx)hist->insn));
	  hist->n_counters = 4;
	  break;

	default:
	  abort ();
	}
    }
  allocate_reg_info (max_reg_num (), FALSE, FALSE);
}