Exemplo n.º 1
0
static void
mark_all_labels (rtx f)
{
  rtx insn;

  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      FOR_EACH_BB (bb)
	{
	  /* In cfglayout mode, we don't bother with trivial next-insn
	     propagation of LABEL_REFs into JUMP_LABEL.  This will be
	     handled by other optimizers using better algorithms.  */
	  FOR_BB_INSNS (bb, insn)
	    {
	      gcc_assert (! INSN_DELETED_P (insn));
	      if (NONDEBUG_INSN_P (insn))
	        mark_jump_label (PATTERN (insn), insn, 0);
	    }

	  /* In cfglayout mode, there may be non-insns between the
	     basic blocks.  If those non-insns represent tablejump data,
	     they contain label references that we must record.  */
	  for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	  for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
Exemplo n.º 2
0
static rtx
prev_insn_in_block (rtx insn)
{
  basic_block bb = BLOCK_FOR_INSN (insn);

  if (!bb)
    return NULL_RTX;

  while (insn != BB_HEAD (bb))
    {
      insn = PREV_INSN (insn);
      if (INSN_P (insn))
        return insn;
    }
  return NULL_RTX;
}
Exemplo n.º 3
0
static bool
copy_bb_p (basic_block bb, int code_may_grow)
{
  int size = 0;
  int max_size = uncond_jump_length;
  rtx insn;
  int n_succ;
  edge e;

  if (!bb->frequency)
    return false;
  if (!bb->pred || !bb->pred->pred_next)
    return false;
  if (!cfg_layout_can_duplicate_bb_p (bb))
    return false;

  /* Avoid duplicating blocks which have many successors (PR/13430).  */
  n_succ = 0;
  for (e = bb->succ; e; e = e->succ_next)
    {
      n_succ++;
      if (n_succ > 8)
	return false;
    }

  if (code_may_grow && maybe_hot_bb_p (bb))
    max_size *= 8;

  for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
       insn = NEXT_INSN (insn))
    {
      if (INSN_P (insn))
	size += get_attr_length (insn);
    }

  if (size <= max_size)
    return true;

  if (rtl_dump_file)
    {
      fprintf (rtl_dump_file,
	       "Block %d can't be copied because its size = %d.\n",
	       bb->index, size);
    }

  return false;
}
Exemplo n.º 4
0
/* Emit a slim dump of X (an insn) to the file F, including any register
   note attached to the instruction.  */
void
dump_insn_slim (FILE *f, const_rtx x)
{
  char t[BUF_LEN + 32];
  rtx note;

  print_insn (t, x, 1);
  fputs (t, f);
  putc ('\n', f);
  if (INSN_P (x) && REG_NOTES (x))
    for (note = REG_NOTES (x); note; note = XEXP (note, 1))
      {
        print_value (t, XEXP (note, 0), 1);
	fprintf (f, "      %s: %s\n",
		 GET_REG_NOTE_NAME (REG_NOTE_KIND (note)), t);
      }
}
Exemplo n.º 5
0
static bool
local_ref_killed_between_p (df_ref ref, rtx_insn *from, rtx_insn *to)
{
  rtx_insn *insn;

  for (insn = from; insn != to; insn = NEXT_INSN (insn))
    {
      df_ref def;
      if (!INSN_P (insn))
	continue;

      FOR_EACH_INSN_DEF (def, insn)
	if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
	  return true;
    }
  return false;
}
Exemplo n.º 6
0
static void
print_insn_with_notes (pretty_printer *pp, const_rtx x)
{
  pp_string (pp, print_rtx_head);
  print_insn (pp, x, 1);
  pp_newline (pp);
  if (INSN_P (x) && REG_NOTES (x))
    for (rtx note = REG_NOTES (x); note; note = XEXP (note, 1))
      {
	pp_printf (pp, "%s      %s ", print_rtx_head,
		   GET_REG_NOTE_NAME (REG_NOTE_KIND (note)));
	if (GET_CODE (note) == INT_LIST)
	  pp_printf (pp, "%d", XINT (note, 0));
	else
	  print_pattern (pp, XEXP (note, 0), 1);
	pp_newline (pp);
      }
}
Exemplo n.º 7
0
void
init_target_units (void)
{
  rtx insn;
  int unit;

  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
    {
      if (! INSN_P (insn))
	continue;

      unit = insn_unit (insn);

      if (unit < 0)
	target_units |= ~unit;
      else
	target_units |= (1 << unit);
    }
}
Exemplo n.º 8
0
static void
clear_regs_live_in_seq (HARD_REG_SET * regs, rtx insn, int length)
{
  basic_block bb;
  regset_head live;
  HARD_REG_SET hlive;
  rtx x;
  int i;

  /* Initialize liveness propagation.  */
  bb = BLOCK_FOR_INSN (insn);
  INIT_REG_SET (&live);
  bitmap_copy (&live, DF_LR_OUT (bb));
  df_simulate_initialize_backwards (bb, &live);

  /* Propagate until INSN if found.  */
  for (x = BB_END (bb); x != insn; x = PREV_INSN (x))
    df_simulate_one_insn_backwards (bb, x, &live);

  /* Clear registers live after INSN.  */
  renumbered_reg_set_to_hard_reg_set (&hlive, &live);
  AND_COMPL_HARD_REG_SET (*regs, hlive);

  /* Clear registers live in and before the sequence.  */
  for (i = 0; i < length;)
    {
      rtx prev = PREV_INSN (x);
      df_simulate_one_insn_backwards (bb, x, &live);

      if (INSN_P (x))
        {
          renumbered_reg_set_to_hard_reg_set (&hlive, &live);
          AND_COMPL_HARD_REG_SET (*regs, hlive);
          i++;
        }

      x = prev;
    }

  /* Free unused data.  */
  CLEAR_REG_SET (&live);
}
Exemplo n.º 9
0
static unsigned int arm_pertask_ssp_rtl_execute(void)
{
	rtx_insn *insn;

	for (insn = get_insns(); insn; insn = NEXT_INSN(insn)) {
		const char *sym;
		rtx body;
		rtx mask, masked_sp;

		/*
		 * Find a SET insn involving a SYMBOL_REF to __stack_chk_guard
		 */
		if (!INSN_P(insn))
			continue;
		body = PATTERN(insn);
		if (GET_CODE(body) != SET ||
		    GET_CODE(SET_SRC(body)) != SYMBOL_REF)
			continue;
		sym = XSTR(SET_SRC(body), 0);
		if (strcmp(sym, "__stack_chk_guard"))
			continue;

		/*
		 * Replace the source of the SET insn with an expression that
		 * produces the address of the copy of the stack canary value
		 * stored in struct thread_info
		 */
		mask = GEN_INT(sext_hwi(sp_mask, GET_MODE_PRECISION(Pmode)));
		masked_sp = gen_reg_rtx(Pmode);

		emit_insn_before(gen_rtx_set(masked_sp,
					     gen_rtx_AND(Pmode,
							 stack_pointer_rtx,
							 mask)),
				 insn);

		SET_SRC(body) = gen_rtx_PLUS(Pmode, masked_sp,
					     GEN_INT(canary_offset));
	}
	return 0;
}
Exemplo n.º 10
0
static void
reload_cse_regs_1 (rtx first)
{
  rtx insn;
  rtx testreg = gen_rtx_REG (VOIDmode, -1);

  cselib_init (true);
  init_alias_analysis ();

  for (insn = first; insn; insn = NEXT_INSN (insn))
    {
      if (INSN_P (insn))
	reload_cse_simplify (insn, testreg);

      cselib_process_insn (insn);
    }

  /* Clean up.  */
  end_alias_analysis ();
  cselib_finish ();
}
Exemplo n.º 11
0
bool
cfg_layout_can_duplicate_bb_p (basic_block bb)
{
  edge s;

  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
    return false;

  /* Duplicating fallthru block to exit would require adding a jump
     and splitting the real last BB.  */
  for (s = bb->succ; s; s = s->succ_next)
    if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
       return false;

  /* Do not attempt to duplicate tablejumps, as we need to unshare
     the dispatch table.  This is difficult to do, as the instructions
     computing jump destination may be hoisted outside the basic block.  */
  if (tablejump_p (BB_END (bb), NULL, NULL))
    return false;

  /* Do not duplicate blocks containing insns that can't be copied.  */
  if (targetm.cannot_copy_insn_p)
    {
      rtx insn = BB_HEAD (bb);
      while (1)
	{
	  if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
	    return false;
	  if (insn == BB_END (bb))
	    break;
	  insn = NEXT_INSN (insn);
	}
    }

  return true;
}
Exemplo n.º 12
0
edge
single_def_use_dom_walker::before_dom_children (basic_block bb)
{
  int bb_index = bb->index;
  struct df_md_bb_info *md_bb_info = df_md_get_bb_info (bb_index);
  struct df_lr_bb_info *lr_bb_info = df_lr_get_bb_info (bb_index);
  rtx_insn *insn;

  bitmap_copy (local_md, &md_bb_info->in);
  bitmap_copy (local_lr, &lr_bb_info->in);

  /* Push a marker for the leave_block callback.  */
  reg_defs_stack.safe_push (NULL);

  process_uses (df_get_artificial_uses (bb_index), DF_REF_AT_TOP);
  process_defs (df_get_artificial_defs (bb_index), DF_REF_AT_TOP);

  /* We don't call df_simulate_initialize_forwards, as it may overestimate
     the live registers if there are unused artificial defs.  We prefer
     liveness to be underestimated.  */

  FOR_BB_INSNS (bb, insn)
    if (INSN_P (insn))
      {
        unsigned int uid = INSN_UID (insn);
        process_uses (DF_INSN_UID_USES (uid), 0);
        process_uses (DF_INSN_UID_EQ_USES (uid), 0);
        process_defs (DF_INSN_UID_DEFS (uid), 0);
	df_simulate_one_insn_forwards (bb, insn, local_lr);
      }

  process_uses (df_get_artificial_uses (bb_index), 0);
  process_defs (df_get_artificial_defs (bb_index), 0);

  return NULL;
}
Exemplo n.º 13
0
/* The major function for aggressive pseudo coalescing of moves only
   if the both pseudos were spilled and not special reload pseudos.  */
bool
lra_coalesce (void)
{
  basic_block bb;
  rtx mv, set, insn, next, *sorted_moves;
  int i, mv_num, sregno, dregno;
  int coalesced_moves;
  int max_regno = max_reg_num ();
  bitmap_head involved_insns_bitmap;

  timevar_push (TV_LRA_COALESCE);

  if (lra_dump_file != NULL)
    fprintf (lra_dump_file,
	     "\n********** Pseudos coalescing #%d: **********\n\n",
	     ++lra_coalesce_iter);
  first_coalesced_pseudo = XNEWVEC (int, max_regno);
  next_coalesced_pseudo = XNEWVEC (int, max_regno);
  for (i = 0; i < max_regno; i++)
    first_coalesced_pseudo[i] = next_coalesced_pseudo[i] = i;
  sorted_moves = XNEWVEC (rtx, get_max_uid ());
  mv_num = 0;
  /* Collect moves.  */
  coalesced_moves = 0;
  FOR_EACH_BB (bb)
    {
      FOR_BB_INSNS_SAFE (bb, insn, next)
	if (INSN_P (insn)
	    && (set = single_set (insn)) != NULL_RTX
	    && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set))
	    && (sregno = REGNO (SET_SRC (set))) >= FIRST_PSEUDO_REGISTER
	    && (dregno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
	    && mem_move_p (sregno, dregno)
	    && coalescable_pseudo_p (sregno) && coalescable_pseudo_p (dregno)
	    && ! side_effects_p (set)
	    && !(lra_intersected_live_ranges_p
		 (lra_reg_info[sregno].live_ranges,
		  lra_reg_info[dregno].live_ranges)))
	  sorted_moves[mv_num++] = insn;
    }
  qsort (sorted_moves, mv_num, sizeof (rtx), move_freq_compare_func);
  /* Coalesced copies, most frequently executed first.	*/
  bitmap_initialize (&coalesced_pseudos_bitmap, &reg_obstack);
  bitmap_initialize (&involved_insns_bitmap, &reg_obstack);
  for (i = 0; i < mv_num; i++)
    {
      mv = sorted_moves[i];
      set = single_set (mv);
      lra_assert (set != NULL && REG_P (SET_SRC (set))
		  && REG_P (SET_DEST (set)));
      sregno = REGNO (SET_SRC (set));
      dregno = REGNO (SET_DEST (set));
      if (first_coalesced_pseudo[sregno] == first_coalesced_pseudo[dregno])
	{
	  coalesced_moves++;
	  if (lra_dump_file != NULL)
	    fprintf
	      (lra_dump_file, "      Coalescing move %i:r%d-r%d (freq=%d)\n",
	       INSN_UID (mv), sregno, dregno,
	       BLOCK_FOR_INSN (mv)->frequency);
	  /* We updated involved_insns_bitmap when doing the merge.  */
	}
      else if (!(lra_intersected_live_ranges_p
		 (lra_reg_info[first_coalesced_pseudo[sregno]].live_ranges,
		  lra_reg_info[first_coalesced_pseudo[dregno]].live_ranges)))
	{
	  coalesced_moves++;
	  if (lra_dump_file != NULL)
	    fprintf
	      (lra_dump_file,
	       "  Coalescing move %i:r%d(%d)-r%d(%d) (freq=%d)\n",
	       INSN_UID (mv), sregno, ORIGINAL_REGNO (SET_SRC (set)),
	       dregno, ORIGINAL_REGNO (SET_DEST (set)),
	       BLOCK_FOR_INSN (mv)->frequency);
	  bitmap_ior_into (&involved_insns_bitmap,
			   &lra_reg_info[sregno].insn_bitmap);
	  bitmap_ior_into (&involved_insns_bitmap,
			   &lra_reg_info[dregno].insn_bitmap);
	  merge_pseudos (sregno, dregno);
	}
    }
  bitmap_initialize (&used_pseudos_bitmap, &reg_obstack);
  FOR_EACH_BB (bb)
    {
      update_live_info (df_get_live_in (bb));
      update_live_info (df_get_live_out (bb));
      FOR_BB_INSNS_SAFE (bb, insn, next)
	if (INSN_P (insn)
	    && bitmap_bit_p (&involved_insns_bitmap, INSN_UID (insn)))
	  {
	    if (! substitute (&insn))
	      continue;
	    lra_update_insn_regno_info (insn);
	    if ((set = single_set (insn)) != NULL_RTX && set_noop_p (set))
	      {
		/* Coalesced move.  */
		if (lra_dump_file != NULL)
		  fprintf (lra_dump_file, "	 Removing move %i (freq=%d)\n",
			 INSN_UID (insn), BLOCK_FOR_INSN (insn)->frequency);
		lra_set_insn_deleted (insn);
	      }
	  }
    }
  bitmap_clear (&used_pseudos_bitmap);
  bitmap_clear (&involved_insns_bitmap);
  bitmap_clear (&coalesced_pseudos_bitmap);
  if (lra_dump_file != NULL && coalesced_moves != 0)
    fprintf (lra_dump_file, "Coalesced Moves = %d\n", coalesced_moves);
  free (sorted_moves);
  free (next_coalesced_pseudo);
  free (first_coalesced_pseudo);
  timevar_pop (TV_LRA_COALESCE);
  return coalesced_moves != 0;
}
Exemplo n.º 14
0
static void
collect_pattern_seqs (void)
{
  htab_iterator hti0, hti1, hti2;
  p_hash_bucket hash_bucket;
  p_hash_elem e0, e1;
#if defined STACK_REGS || defined HAVE_cc0
  basic_block bb;
  bitmap_head dont_collect;

  /* Extra initialization step to ensure that no stack registers (if present)
     or cc0 code (if present) are live across abnormal edges.
     Set a flag in DONT_COLLECT for an insn if a stack register is live
     after the insn or the insn is cc0 setter or user.  */
  bitmap_initialize (&dont_collect, NULL);

#ifdef STACK_REGS
  FOR_EACH_BB (bb)
  {
    regset_head live;
    rtx insn;
    rtx prev;

    /* Initialize liveness propagation.  */
    INIT_REG_SET (&live);
    bitmap_copy (&live, DF_LR_OUT (bb));
    df_simulate_initialize_backwards (bb, &live);

    /* Propagate liveness info and mark insns where a stack reg is live.  */
    insn = BB_END (bb);
    for (insn = BB_END (bb); ; insn = prev)
      {
	prev = PREV_INSN (insn);
	if (INSN_P (insn))
	  {
	    int reg;
	    for (reg = FIRST_STACK_REG; reg <= LAST_STACK_REG; reg++)
	      {
		if (REGNO_REG_SET_P (&live, reg))
		  {
		    bitmap_set_bit (&dont_collect, INSN_UID (insn));
		    break;
		  }
	      }
	    
	  }
	if (insn == BB_HEAD (bb))
	  break;
	df_simulate_one_insn_backwards (bb, insn, &live);
	insn = prev;
      }

    /* Free unused data.  */
    CLEAR_REG_SET (&live);
  }
#endif

#ifdef HAVE_cc0
  /* Mark CC0 setters and users as ineligible for collection into sequences.
     This is an over-conservative fix, since it is OK to include
     a cc0_setter, but only if we also include the corresponding cc0_user,
     and vice versa.  */
  FOR_EACH_BB (bb)
  {
    rtx insn;
    rtx next_tail;

    next_tail = NEXT_INSN (BB_END (bb));

    for (insn = BB_HEAD (bb); insn != next_tail; insn = NEXT_INSN (insn))
      {
	if (INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
	  bitmap_set_bit (&dont_collect, INSN_UID (insn));
      }
  }
#endif

#endif /* defined STACK_REGS || defined HAVE_cc0 */

  /* Initialize PATTERN_SEQS to empty.  */
  pattern_seqs = 0;

  /* Try to match every abstractable insn with every other insn in the same
     HASH_BUCKET.  */

  FOR_EACH_HTAB_ELEMENT (hash_buckets, hash_bucket, p_hash_bucket, hti0)
    if (htab_elements (hash_bucket->seq_candidates) > 1)
      FOR_EACH_HTAB_ELEMENT (hash_bucket->seq_candidates, e0, p_hash_elem, hti1)
        FOR_EACH_HTAB_ELEMENT (hash_bucket->seq_candidates, e1, p_hash_elem,
                               hti2)
          if (e0 != e1
#if defined STACK_REGS || defined HAVE_cc0
              && !bitmap_bit_p (&dont_collect, INSN_UID (e0->insn))
              && !bitmap_bit_p (&dont_collect, INSN_UID (e1->insn))
#endif
             )
            match_seqs (e0, e1);
#if defined STACK_REGS || defined HAVE_cc0
  /* Free unused data.  */
  bitmap_clear (&dont_collect);
#endif
}
Exemplo n.º 15
0
/* Find values inside INSN for that we want to measure histograms for
   division/modulo optimization and stores them to VALUES.  */
static void
insn_divmod_values_to_profile (rtx insn, histogram_values *values)
{
  rtx set, set_src, op1, op2;
  enum machine_mode mode;
  histogram_value hist;

  if (!INSN_P (insn))
    return;

  set = single_set (insn);
  if (!set)
    return;

  mode = GET_MODE (SET_DEST (set));
  if (!INTEGRAL_MODE_P (mode))
    return;

  set_src = SET_SRC (set);
  switch (GET_CODE (set_src))
    {
    case DIV:
    case MOD:
    case UDIV:
    case UMOD:
      op1 = XEXP (set_src, 0);
      op2 = XEXP (set_src, 1);
      if (side_effects_p (op2))
	return;

      /* Check for a special case where the divisor is power of 2.  */
      if ((GET_CODE (set_src) == UMOD) && !CONSTANT_P (op2))
	{
	  hist = ggc_alloc (sizeof (*hist));
	  hist->value = op2;
	  hist->seq = NULL_RTX;
	  hist->mode = mode;
	  hist->insn = insn;
	  hist->type = HIST_TYPE_POW2;
	  hist->hdata.pow2.may_be_other = 1;
	  VEC_safe_push (histogram_value, *values, hist);
	}

      /* Check whether the divisor is not in fact a constant.  */
      if (!CONSTANT_P (op2))
	{
	  hist = ggc_alloc (sizeof (*hist));
	  hist->value = op2;
	  hist->mode = mode;
	  hist->seq = NULL_RTX;
	  hist->insn = insn;
	  hist->type = HIST_TYPE_SINGLE_VALUE;
	  VEC_safe_push (histogram_value, *values, hist);
	}

      /* For mod, check whether it is not often a noop (or replaceable by
	 a few subtractions).  */
      if (GET_CODE (set_src) == UMOD && !side_effects_p (op1))
	{
	  rtx tmp;

	  hist = ggc_alloc (sizeof (*hist));
	  start_sequence ();
	  tmp = simplify_gen_binary (DIV, mode, copy_rtx (op1), copy_rtx (op2));
	  hist->value = force_operand (tmp, NULL_RTX);
	  hist->seq = get_insns ();
	  end_sequence ();
	  hist->mode = mode;
	  hist->insn = insn;
	  hist->type = HIST_TYPE_INTERVAL;
	  hist->hdata.intvl.int_start = 0;
	  hist->hdata.intvl.steps = 2;
	  hist->hdata.intvl.may_be_less = 1;
	  hist->hdata.intvl.may_be_more = 1;
	  VEC_safe_push (histogram_value, *values, hist);
	}
      return;

    default:
      return;
    }
}
Exemplo n.º 16
0
static void
mark_all_labels (rtx f)
{
  rtx insn;
  rtx prev_nonjump_insn = NULL;

  for (insn = f; insn; insn = NEXT_INSN (insn))
    if (INSN_P (insn))
      {
	mark_jump_label (PATTERN (insn), insn, 0);

	/* If the previous non-jump insn sets something to a label,
	   something that this jump insn uses, make that label the primary
	   target of this insn if we don't yet have any.  That previous
	   insn must be a single_set and not refer to more than one label.
	   The jump insn must not refer to other labels as jump targets
	   and must be a plain (set (pc) ...), maybe in a parallel, and
	   may refer to the item being set only directly or as one of the
	   arms in an IF_THEN_ELSE.  */
	if (! INSN_DELETED_P (insn)
	    && JUMP_P (insn)
	    && JUMP_LABEL (insn) == NULL)
	  {
	    rtx label_note = NULL;
	    rtx pc = pc_set (insn);
	    rtx pc_src = pc != NULL ? SET_SRC (pc) : NULL;

	    if (prev_nonjump_insn != NULL)
	      label_note
		= find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

	    if (label_note != NULL && pc_src != NULL)
	      {
		rtx label_set = single_set (prev_nonjump_insn);
		rtx label_dest
		  = label_set != NULL ? SET_DEST (label_set) : NULL;

		if (label_set != NULL
		    /* The source must be the direct LABEL_REF, not a
		       PLUS, UNSPEC, IF_THEN_ELSE etc.  */
		    && GET_CODE (SET_SRC (label_set)) == LABEL_REF
		    && (rtx_equal_p (label_dest, pc_src)
			|| (GET_CODE (pc_src) == IF_THEN_ELSE
			    && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
				|| rtx_equal_p (label_dest,
						XEXP (pc_src, 2))))))

		  {
		    /* The CODE_LABEL referred to in the note must be the
		       CODE_LABEL in the LABEL_REF of the "set".  We can
		       conveniently use it for the marker function, which
		       requires a LABEL_REF wrapping.  */
		    gcc_assert (XEXP (label_note, 0)
				== XEXP (SET_SRC (label_set), 0));

		    mark_jump_label_1 (label_set, insn, false, true);
		    gcc_assert (JUMP_LABEL (insn)
				== XEXP (SET_SRC (label_set), 0));
		  }
	      }
	  }
	else if (! INSN_DELETED_P (insn))
	  prev_nonjump_insn = insn;
      }
    else if (LABEL_P (insn))
      prev_nonjump_insn = NULL;

  /* If we are in cfglayout mode, there may be non-insns between the
     basic blocks.  If those non-insns represent tablejump data, they
     contain label references that we must record.  */
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      rtx insn;
      FOR_EACH_BB (bb)
	{
	  for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }

	  for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
    }
Exemplo n.º 17
0
static void
print_rtx (const_rtx in_rtx)
{
  int i = 0;
  int j;
  const char *format_ptr;
  int is_insn;

  if (sawclose)
    {
      if (flag_simple)
	fputc (' ', outfile);
      else
	fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, "");
      sawclose = 0;
    }

  if (in_rtx == 0)
    {
      fputs ("(nil)", outfile);
      sawclose = 1;
      return;
    }
  else if (GET_CODE (in_rtx) > NUM_RTX_CODE)
    {
       fprintf (outfile, "(??? bad code %d\n%s%*s)", GET_CODE (in_rtx),
		print_rtx_head, indent * 2, "");
       sawclose = 1;
       return;
    }

  is_insn = INSN_P (in_rtx);

  /* Print name of expression code.  */
  if (flag_simple && CONST_INT_P (in_rtx))
    fputc ('(', outfile);
  else
    fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx)));

  if (! flag_simple)
    {
      if (RTX_FLAG (in_rtx, in_struct))
	fputs ("/s", outfile);

      if (RTX_FLAG (in_rtx, volatil))
	fputs ("/v", outfile);

      if (RTX_FLAG (in_rtx, unchanging))
	fputs ("/u", outfile);

      if (RTX_FLAG (in_rtx, frame_related))
	fputs ("/f", outfile);

      if (RTX_FLAG (in_rtx, jump))
	fputs ("/j", outfile);

      if (RTX_FLAG (in_rtx, call))
	fputs ("/c", outfile);

      if (RTX_FLAG (in_rtx, return_val))
	fputs ("/i", outfile);

      /* Print REG_NOTE names for EXPR_LIST and INSN_LIST.  */
      if ((GET_CODE (in_rtx) == EXPR_LIST
	   || GET_CODE (in_rtx) == INSN_LIST
	   || GET_CODE (in_rtx) == INT_LIST)
	  && (int)GET_MODE (in_rtx) < REG_NOTE_MAX)
	fprintf (outfile, ":%s",
		 GET_REG_NOTE_NAME (GET_MODE (in_rtx)));

      /* For other rtl, print the mode if it's not VOID.  */
      else if (GET_MODE (in_rtx) != VOIDmode)
	fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx)));

#ifndef GENERATOR_FILE
      if (GET_CODE (in_rtx) == VAR_LOCATION)
	{
	  if (TREE_CODE (PAT_VAR_LOCATION_DECL (in_rtx)) == STRING_CST)
	    fputs (" <debug string placeholder>", outfile);
	  else
	    print_mem_expr (outfile, PAT_VAR_LOCATION_DECL (in_rtx));
	  fputc (' ', outfile);
	  print_rtx (PAT_VAR_LOCATION_LOC (in_rtx));
	  if (PAT_VAR_LOCATION_STATUS (in_rtx)
	      == VAR_INIT_STATUS_UNINITIALIZED)
	    fprintf (outfile, " [uninit]");
	  sawclose = 1;
	  i = GET_RTX_LENGTH (VAR_LOCATION);
	}
#endif
    }

#ifndef GENERATOR_FILE
  if (CONST_DOUBLE_AS_FLOAT_P (in_rtx))
    i = 5;
#endif

  /* Get the format string and skip the first elements if we have handled
     them already.  */
  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i;
  for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
    switch (*format_ptr++)
      {
	const char *str;

      case 'T':
	str = XTMPL (in_rtx, i);
	goto string;

      case 'S':
      case 's':
	str = XSTR (in_rtx, i);
      string:

	if (str == 0)
	  fputs (" \"\"", outfile);
	else
	  fprintf (outfile, " (\"%s\")", str);
	sawclose = 1;
	break;

	/* 0 indicates a field for internal use that should not be printed.
	   An exception is the third field of a NOTE, where it indicates
	   that the field has several different valid contents.  */
      case '0':
	if (i == 1 && REG_P (in_rtx))
	  {
	    if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx))
	      fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx));
	  }
#ifndef GENERATOR_FILE
	else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF)
	  {
	    int flags = SYMBOL_REF_FLAGS (in_rtx);
	    if (flags)
	      fprintf (outfile, " [flags %#x]", flags);
	  }
	else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF)
	  {
	    tree decl = SYMBOL_REF_DECL (in_rtx);
	    if (decl)
	      print_node_brief (outfile, "", decl, dump_flags);
	  }
#endif
	else if (i == 4 && NOTE_P (in_rtx))
	  {
	    switch (NOTE_KIND (in_rtx))
	      {
	      case NOTE_INSN_EH_REGION_BEG:
	      case NOTE_INSN_EH_REGION_END:
		if (flag_dump_unnumbered)
		  fprintf (outfile, " #");
		else
		  fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx));
		sawclose = 1;
		break;

	      case NOTE_INSN_BLOCK_BEG:
	      case NOTE_INSN_BLOCK_END:
#ifndef GENERATOR_FILE
		dump_addr (outfile, " ", NOTE_BLOCK (in_rtx));
#endif
		sawclose = 1;
		break;

	      case NOTE_INSN_BASIC_BLOCK:
		{
#ifndef GENERATOR_FILE
		  basic_block bb = NOTE_BASIC_BLOCK (in_rtx);
		  if (bb != 0)
		    fprintf (outfile, " [bb %d]", bb->index);
#endif
		  break;
	        }

	      case NOTE_INSN_DELETED_LABEL:
	      case NOTE_INSN_DELETED_DEBUG_LABEL:
		{
		  const char *label = NOTE_DELETED_LABEL_NAME (in_rtx);
		  if (label)
		    fprintf (outfile, " (\"%s\")", label);
		  else
		    fprintf (outfile, " \"\"");
		}
		break;

	      case NOTE_INSN_SWITCH_TEXT_SECTIONS:
		{
#ifndef GENERATOR_FILE
		  basic_block bb = NOTE_BASIC_BLOCK (in_rtx);
		  if (bb != 0)
		    fprintf (outfile, " [bb %d]", bb->index);
#endif
		  break;
		}

	      case NOTE_INSN_VAR_LOCATION:
	      case NOTE_INSN_CALL_ARG_LOCATION:
#ifndef GENERATOR_FILE
		fputc (' ', outfile);
		print_rtx (NOTE_VAR_LOCATION (in_rtx));
#endif
		break;

	      case NOTE_INSN_CFI:
#ifndef GENERATOR_FILE
		fputc ('\n', outfile);
		output_cfi_directive (outfile, NOTE_CFI (in_rtx));
		fputc ('\t', outfile);
#endif
		break;

	      default:
		break;
	      }
	  }
	else if (i == 8 && JUMP_P (in_rtx) && JUMP_LABEL (in_rtx) != NULL)
	  {
	    /* Output the JUMP_LABEL reference.  */
	    fprintf (outfile, "\n%s%*s -> ", print_rtx_head, indent * 2, "");
	    if (GET_CODE (JUMP_LABEL (in_rtx)) == RETURN)
	      fprintf (outfile, "return");
	    else if (GET_CODE (JUMP_LABEL (in_rtx)) == SIMPLE_RETURN)
	      fprintf (outfile, "simple_return");
	    else
	      fprintf (outfile, "%d", INSN_UID (JUMP_LABEL (in_rtx)));
	  }
	else if (i == 0 && GET_CODE (in_rtx) == VALUE)
	  {
#ifndef GENERATOR_FILE
	    cselib_val *val = CSELIB_VAL_PTR (in_rtx);

	    fprintf (outfile, " %u:%u", val->uid, val->hash);
	    dump_addr (outfile, " @", in_rtx);
	    dump_addr (outfile, "/", (void*)val);
#endif
	  }
	else if (i == 0 && GET_CODE (in_rtx) == DEBUG_EXPR)
	  {
#ifndef GENERATOR_FILE
	    fprintf (outfile, " D#%i",
		     DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (in_rtx)));
#endif
	  }
	else if (i == 0 && GET_CODE (in_rtx) == ENTRY_VALUE)
	  {
	    indent += 2;
	    if (!sawclose)
	      fprintf (outfile, " ");
	    print_rtx (ENTRY_VALUE_EXP (in_rtx));
	    indent -= 2;
	  }
	break;

      case 'e':
      do_e:
	indent += 2;
	if (i == 7 && INSN_P (in_rtx))
	  /* Put REG_NOTES on their own line.  */
	  fprintf (outfile, "\n%s%*s",
		   print_rtx_head, indent * 2, "");
	if (!sawclose)
	  fprintf (outfile, " ");
	print_rtx (XEXP (in_rtx, i));
	indent -= 2;
	break;

      case 'E':
      case 'V':
	indent += 2;
	if (sawclose)
	  {
	    fprintf (outfile, "\n%s%*s",
		     print_rtx_head, indent * 2, "");
	    sawclose = 0;
	  }
	fputs (" [", outfile);
	if (NULL != XVEC (in_rtx, i))
	  {
	    indent += 2;
	    if (XVECLEN (in_rtx, i))
	      sawclose = 1;

	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
	      print_rtx (XVECEXP (in_rtx, i, j));

	    indent -= 2;
	  }
	if (sawclose)
	  fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, "");

	fputs ("]", outfile);
	sawclose = 1;
	indent -= 2;
	break;

      case 'w':
	if (! flag_simple)
	  fprintf (outfile, " ");
	fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i));
	if (! flag_simple)
	  fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]",
		   (unsigned HOST_WIDE_INT) XWINT (in_rtx, i));
	break;

      case 'i':
	if (i == 5 && INSN_P (in_rtx))
	  {
#ifndef GENERATOR_FILE
	    /*  Pretty-print insn locations.  Ignore scoping as it is mostly
		redundant with line number information and do not print anything
		when there is no location information available.  */
	    if (INSN_LOCATION (in_rtx) && insn_file (in_rtx))
	      fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx));
#endif
	  }
	else if (i == 6 && GET_CODE (in_rtx) == ASM_OPERANDS)
	  {
#ifndef GENERATOR_FILE
	    fprintf (outfile, " %s:%i",
		     LOCATION_FILE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx)),
		     LOCATION_LINE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx)));
#endif
	  }
	else if (i == 1 && GET_CODE (in_rtx) == ASM_INPUT)
	  {
#ifndef GENERATOR_FILE
	    fprintf (outfile, " %s:%i",
		     LOCATION_FILE (ASM_INPUT_SOURCE_LOCATION (in_rtx)),
		     LOCATION_LINE (ASM_INPUT_SOURCE_LOCATION (in_rtx)));
#endif
	  }
	else if (i == 6 && NOTE_P (in_rtx))
	  {
	    /* This field is only used for NOTE_INSN_DELETED_LABEL, and
	       other times often contains garbage from INSN->NOTE death.  */
	    if (NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_LABEL
		|| NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_DEBUG_LABEL)
	      fprintf (outfile, " %d",  XINT (in_rtx, i));
	  }
#if !defined(GENERATOR_FILE) && NUM_UNSPECV_VALUES > 0
	else if (i == 1
		 && GET_CODE (in_rtx) == UNSPEC_VOLATILE
		 && XINT (in_rtx, 1) >= 0
		 && XINT (in_rtx, 1) < NUM_UNSPECV_VALUES)
	  fprintf (outfile, " %s", unspecv_strings[XINT (in_rtx, 1)]);
#endif
#if !defined(GENERATOR_FILE) && NUM_UNSPEC_VALUES > 0
	else if (i == 1
		 && (GET_CODE (in_rtx) == UNSPEC
		     || GET_CODE (in_rtx) == UNSPEC_VOLATILE)
		 && XINT (in_rtx, 1) >= 0
		 && XINT (in_rtx, 1) < NUM_UNSPEC_VALUES)
	  fprintf (outfile, " %s", unspec_strings[XINT (in_rtx, 1)]);
#endif
	else
	  {
	    int value = XINT (in_rtx, i);
	    const char *name;

#ifndef GENERATOR_FILE
	    if (REG_P (in_rtx) && (unsigned) value < FIRST_PSEUDO_REGISTER)
	      fprintf (outfile, " %d %s", value, reg_names[value]);
	    else if (REG_P (in_rtx)
		     && (unsigned) value <= LAST_VIRTUAL_REGISTER)
	      {
		if (value == VIRTUAL_INCOMING_ARGS_REGNUM)
		  fprintf (outfile, " %d virtual-incoming-args", value);
		else if (value == VIRTUAL_STACK_VARS_REGNUM)
		  fprintf (outfile, " %d virtual-stack-vars", value);
		else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM)
		  fprintf (outfile, " %d virtual-stack-dynamic", value);
		else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM)
		  fprintf (outfile, " %d virtual-outgoing-args", value);
		else if (value == VIRTUAL_CFA_REGNUM)
		  fprintf (outfile, " %d virtual-cfa", value);
		else if (value == VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM)
		  fprintf (outfile, " %d virtual-preferred-stack-boundary",
			   value);
		else
		  fprintf (outfile, " %d virtual-reg-%d", value,
			   value-FIRST_VIRTUAL_REGISTER);
	      }
	    else
#endif
	      if (flag_dump_unnumbered
		     && (is_insn || NOTE_P (in_rtx)))
	      fputc ('#', outfile);
	    else
	      fprintf (outfile, " %d", value);

#ifndef GENERATOR_FILE
	    if (REG_P (in_rtx) && REG_ATTRS (in_rtx))
	      {
		fputs (" [", outfile);
		if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx))
		  fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx));
		if (REG_EXPR (in_rtx))
		  print_mem_expr (outfile, REG_EXPR (in_rtx));

		if (REG_OFFSET (in_rtx))
		  fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC,
			   REG_OFFSET (in_rtx));
		fputs (" ]", outfile);
	      }
#endif

	    if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i)
		&& XINT (in_rtx, i) >= 0
		&& (name = get_insn_name (XINT (in_rtx, i))) != NULL)
	      fprintf (outfile, " {%s}", name);
	    sawclose = 0;
	  }
	break;

      /* Print NOTE_INSN names rather than integer codes.  */

      case 'n':
	fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i)));
	sawclose = 0;
	break;

      case 'u':
	if (XEXP (in_rtx, i) != NULL)
	  {
	    rtx sub = XEXP (in_rtx, i);
	    enum rtx_code subc = GET_CODE (sub);

	    if (GET_CODE (in_rtx) == LABEL_REF)
	      {
		if (subc == NOTE
		    && NOTE_KIND (sub) == NOTE_INSN_DELETED_LABEL)
		  {
		    if (flag_dump_unnumbered)
		      fprintf (outfile, " [# deleted]");
		    else
		      fprintf (outfile, " [%d deleted]", INSN_UID (sub));
		    sawclose = 0;
		    break;
		  }

		if (subc != CODE_LABEL)
		  goto do_e;
	      }

	    if (flag_dump_unnumbered
		|| (flag_dump_unnumbered_links && (i == 1 || i == 2)
		    && (INSN_P (in_rtx) || NOTE_P (in_rtx)
			|| LABEL_P (in_rtx) || BARRIER_P (in_rtx))))
	      fputs (" #", outfile);
	    else
	      fprintf (outfile, " %d", INSN_UID (sub));
	  }
	else
	  fputs (" 0", outfile);
	sawclose = 0;
	break;

      case 't':
#ifndef GENERATOR_FILE
	if (i == 0 && GET_CODE (in_rtx) == DEBUG_IMPLICIT_PTR)
	  print_mem_expr (outfile, DEBUG_IMPLICIT_PTR_DECL (in_rtx));
	else if (i == 0 && GET_CODE (in_rtx) == DEBUG_PARAMETER_REF)
	  print_mem_expr (outfile, DEBUG_PARAMETER_REF_DECL (in_rtx));
	else
	  dump_addr (outfile, " ", XTREE (in_rtx, i));
#endif
	break;

      case '*':
	fputs (" Unknown", outfile);
	sawclose = 0;
	break;

      case 'B':
#ifndef GENERATOR_FILE
	if (XBBDEF (in_rtx, i))
	  fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index);
#endif
	break;

      default:
	gcc_unreachable ();
      }

  switch (GET_CODE (in_rtx))
    {
#ifndef GENERATOR_FILE
    case MEM:
      if (__builtin_expect (final_insns_dump_p, false))
	fprintf (outfile, " [");
      else
	fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC,
		 (HOST_WIDE_INT) MEM_ALIAS_SET (in_rtx));

      if (MEM_EXPR (in_rtx))
	print_mem_expr (outfile, MEM_EXPR (in_rtx));

      if (MEM_OFFSET_KNOWN_P (in_rtx))
	fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, MEM_OFFSET (in_rtx));

      if (MEM_SIZE_KNOWN_P (in_rtx))
	fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, MEM_SIZE (in_rtx));

      if (MEM_ALIGN (in_rtx) != 1)
	fprintf (outfile, " A%u", MEM_ALIGN (in_rtx));

      if (!ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (in_rtx)))
	fprintf (outfile, " AS%u", MEM_ADDR_SPACE (in_rtx));

      fputc (']', outfile);
      break;

    case CONST_DOUBLE:
      if (FLOAT_MODE_P (GET_MODE (in_rtx)))
	{
	  char s[60];

	  real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx),
			   sizeof (s), 0, 1);
	  fprintf (outfile, " %s", s);

	  real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx),
			       sizeof (s), 0, 1);
	  fprintf (outfile, " [%s]", s);
	}
      break;
#endif

    case CODE_LABEL:
      fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx));
      switch (LABEL_KIND (in_rtx))
	{
	  case LABEL_NORMAL: break;
	  case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break;
	  case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break;
	  case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break;
	  default: gcc_unreachable ();
	}
      break;

    default:
      break;
    }

  fputc (')', outfile);
  sawclose = 1;
}
Exemplo n.º 18
0
static void
combine_stack_adjustments_for_block (basic_block bb)
{
    HOST_WIDE_INT last_sp_adjust = 0;
    rtx last_sp_set = NULL_RTX;
    rtx last2_sp_set = NULL_RTX;
    struct csa_reflist *reflist = NULL;
    rtx insn, next, set;
    struct record_stack_refs_data data;
    bool end_of_block = false;

    for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
    {
        end_of_block = insn == BB_END (bb);
        next = NEXT_INSN (insn);

        if (! INSN_P (insn))
            continue;

        set = single_set_for_csa (insn);
        if (set)
        {
            rtx dest = SET_DEST (set);
            rtx src = SET_SRC (set);

            /* Find constant additions to the stack pointer.  */
            if (dest == stack_pointer_rtx
                    && GET_CODE (src) == PLUS
                    && XEXP (src, 0) == stack_pointer_rtx
                    && CONST_INT_P (XEXP (src, 1)))
            {
                HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));

                /* If we've not seen an adjustment previously, record
                it now and continue.  */
                if (! last_sp_set)
                {
                    last_sp_set = insn;
                    last_sp_adjust = this_adjust;
                    continue;
                }

                /* If not all recorded refs can be adjusted, or the
                adjustment is now too large for a constant addition,
                 we cannot merge the two stack adjustments.

                 Also we need to be careful to not move stack pointer
                 such that we create stack accesses outside the allocated
                 area.  We can combine an allocation into the first insn,
                 or a deallocation into the second insn.  We can not
                 combine an allocation followed by a deallocation.

                 The only somewhat frequent occurrence of the later is when
                 a function allocates a stack frame but does not use it.
                 For this case, we would need to analyze rtl stream to be
                 sure that allocated area is really unused.  This means not
                 only checking the memory references, but also all registers
                 or global memory references possibly containing a stack
                 frame address.

                 Perhaps the best way to address this problem is to teach
                 gcc not to allocate stack for objects never used.  */

                /* Combine an allocation into the first instruction.  */
                if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
                {
                    if (try_apply_stack_adjustment (last_sp_set, reflist,
                                                    last_sp_adjust + this_adjust,
                                                    this_adjust))
                    {
                        /* It worked!  */
                        maybe_move_args_size_note (last_sp_set, insn, false);
                        delete_insn (insn);
                        last_sp_adjust += this_adjust;
                        continue;
                    }
                }

                /* Otherwise we have a deallocation.  Do not combine with
                a previous allocation.  Combine into the second insn.  */
                else if (STACK_GROWS_DOWNWARD
                         ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
                {
                    if (try_apply_stack_adjustment (insn, reflist,
                                                    last_sp_adjust + this_adjust,
                                                    -last_sp_adjust))
                    {
                        /* It worked!  */
                        maybe_move_args_size_note (insn, last_sp_set, true);
                        delete_insn (last_sp_set);
                        last_sp_set = insn;
                        last_sp_adjust += this_adjust;
                        free_csa_reflist (reflist);
                        reflist = NULL;
                        continue;
                    }
                }

                /* Combination failed.  Restart processing from here.  If
                deallocation+allocation conspired to cancel, we can
                 delete the old deallocation insn.  */
                if (last_sp_set)
                {
                    if (last_sp_adjust == 0)
                    {
                        maybe_move_args_size_note (insn, last_sp_set, true);
                        delete_insn (last_sp_set);
                    }
                    else
                        last2_sp_set = last_sp_set;
                }
                free_csa_reflist (reflist);
                reflist = NULL;
                last_sp_set = insn;
                last_sp_adjust = this_adjust;
                continue;
            }

            /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
               the previous adjustment and turn it into a simple store.  This
               is equivalent to anticipating the stack adjustment so this must
               be an allocation.  */
            if (MEM_P (dest)
                    && ((STACK_GROWS_DOWNWARD
                         ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
                            && last_sp_adjust
                            == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
                         : (GET_CODE (XEXP (dest, 0)) == PRE_INC
                            && last_sp_adjust
                            == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
                        || ((STACK_GROWS_DOWNWARD
                             ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
                            && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
                            && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
                            && XEXP (XEXP (XEXP (dest, 0), 1), 0)
                            == stack_pointer_rtx
                            && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
                            == CONST_INT
                            && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
                            == -last_sp_adjust))
                    && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
                    && !reg_mentioned_p (stack_pointer_rtx, src)
                    && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
                    && try_apply_stack_adjustment (insn, reflist, 0,
                                                   -last_sp_adjust))
            {
                if (last2_sp_set)
                    maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
                else
                    maybe_move_args_size_note (insn, last_sp_set, true);
                delete_insn (last_sp_set);
                free_csa_reflist (reflist);
                reflist = NULL;
                last_sp_set = NULL_RTX;
                last_sp_adjust = 0;
                continue;
            }
        }

        data.insn = insn;
        data.reflist = reflist;
        if (!CALL_P (insn) && last_sp_set
                && !for_each_rtx (&PATTERN (insn), record_stack_refs, &data))
        {
            reflist = data.reflist;
            continue;
        }
        reflist = data.reflist;

        /* Otherwise, we were not able to process the instruction.
        Do not continue collecting data across such a one.  */
        if (last_sp_set
                && (CALL_P (insn)
                    || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
        {
            if (last_sp_set && last_sp_adjust == 0)
            {
                force_move_args_size_note (bb, last2_sp_set, last_sp_set);
                delete_insn (last_sp_set);
            }
            free_csa_reflist (reflist);
            reflist = NULL;
            last2_sp_set = NULL_RTX;
            last_sp_set = NULL_RTX;
            last_sp_adjust = 0;
        }
    }

    if (last_sp_set && last_sp_adjust == 0)
    {
        force_move_args_size_note (bb, last2_sp_set, last_sp_set);
        delete_insn (last_sp_set);
    }

    if (reflist)
        free_csa_reflist (reflist);
}
Exemplo n.º 19
0
Arquivo: rtl.c Projeto: ochafik/gccxml
rtx
copy_rtx (rtx orig)
{
  rtx copy;
  int i, j;
  RTX_CODE code;
  const char *format_ptr;

  code = GET_CODE (orig);

  switch (code)
    {
    case REG:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
    case SCRATCH:
      /* SCRATCH must be shared because they represent distinct values.  */
      return orig;
    case CLOBBER:
      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
        return orig;
      break;

    case CONST:
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
         a LABEL_REF, it isn't sharable.  */
      if (GET_CODE (XEXP (orig, 0)) == PLUS
          && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
          && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
        return orig;
      break;

      /* A MEM with a constant address is not sharable.  The problem is that
         the constant address may need to be reloaded.  If the mem is shared,
         then reloading one copy of this mem will cause all copies to appear
         to have been reloaded.  */

    default:
      break;
    }

  /* Copy the various flags, fields, and other information.  We assume
     that all fields need copying, and then clear the fields that should
     not be copied.  That is the sensible default behavior, and forces
     us to explicitly document why we are *not* copying a flag.  */
  copy = shallow_copy_rtx (orig);

  /* We do not copy the USED flag, which is used as a mark bit during
     walks over the RTL.  */
  RTX_FLAG (copy, used) = 0;

  /* We do not copy FRAME_RELATED for INSNs.  */
  if (INSN_P (orig))
    RTX_FLAG (copy, frame_related) = 0;
  RTX_FLAG (copy, jump) = RTX_FLAG (orig, jump);
  RTX_FLAG (copy, call) = RTX_FLAG (orig, call);

  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));

  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
    switch (*format_ptr++)
      {
      case 'e':
        if (XEXP (orig, i) != NULL)
          XEXP (copy, i) = copy_rtx (XEXP (orig, i));
        break;

      case 'E':
      case 'V':
        if (XVEC (orig, i) != NULL)
          {
            XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
            for (j = 0; j < XVECLEN (copy, i); j++)
              XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
          }
        break;

      case 't':
      case 'w':
      case 'i':
      case 's':
      case 'S':
      case 'T':
      case 'u':
      case 'B':
      case '0':
        /* These are left unchanged.  */
        break;

      default:
        gcc_unreachable ();
      }
  return copy;
}
Exemplo n.º 20
0
static bool
doloop_optimize (struct loop *loop)
{
    enum machine_mode mode;
    rtx doloop_seq, doloop_pat, doloop_reg;
    rtx iterations, count;
    rtx iterations_max;
    rtx start_label;
    rtx condition;
    unsigned level, est_niter;
    struct niter_desc *desc;
    unsigned word_mode_size;
    unsigned HOST_WIDE_INT word_mode_max;

    if (dump_file)
        fprintf (dump_file, "Doloop: Processing loop %d.\n", loop->num);

    /* APPLE LOCAL begin lno */
    /* Ignore large loops.  */
    if (loop->ninsns > (unsigned) PARAM_VALUE (PARAM_MAX_DOLOOP_INSNS))
    {
        if (dump_file)
            fprintf (dump_file,
                     "Doloop: The loop is too large.\n");
        return false;
    }
    /* APPLE LOCAL end lno */

    iv_analysis_loop_init (loop);

    /* Find the simple exit of a LOOP.  */
    desc = get_simple_loop_desc (loop);

    /* Check that loop is a candidate for a low-overhead looping insn.  */
    if (!doloop_valid_p (loop, desc))
    {
        if (dump_file)
            fprintf (dump_file,
                     "Doloop: The loop is not suitable.\n");
        return false;
    }
    mode = desc->mode;

    est_niter = 3;
    if (desc->const_iter)
        est_niter = desc->niter;
    /* If the estimate on number of iterations is reliable (comes from profile
       feedback), use it.  Do not use it normally, since the expected number
       of iterations of an unrolled loop is 2.  */
    if (loop->header->count)
        est_niter = expected_loop_iterations (loop);

    if (est_niter < 3)
    {
        if (dump_file)
            fprintf (dump_file,
                     "Doloop: Too few iterations (%u) to be profitable.\n",
                     est_niter);
        return false;
    }

    count = copy_rtx (desc->niter_expr);
    iterations = desc->const_iter ? desc->niter_expr : const0_rtx;
    iterations_max = GEN_INT (desc->niter_max);
    level = get_loop_level (loop) + 1;

    /* Generate looping insn.  If the pattern FAILs then give up trying
       to modify the loop since there is some aspect the back-end does
       not like.  */
    start_label = block_label (desc->in_edge->dest);
    doloop_reg = gen_reg_rtx (mode);
    doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max,
                                 GEN_INT (level), start_label);

    word_mode_size = GET_MODE_BITSIZE (word_mode);
    word_mode_max
        = ((unsigned HOST_WIDE_INT) 1 << (word_mode_size - 1) << 1) - 1;
    if (! doloop_seq
            && mode != word_mode
            /* Before trying mode different from the one in that # of iterations is
            computed, we must be sure that the number of iterations fits into
             the new mode.  */
            && (word_mode_size >= GET_MODE_BITSIZE (mode)
                || desc->niter_max <= word_mode_max))
    {
        if (word_mode_size > GET_MODE_BITSIZE (mode))
        {
            count = simplify_gen_unary (ZERO_EXTEND, word_mode,
                                        count, mode);
            iterations = simplify_gen_unary (ZERO_EXTEND, word_mode,
                                             iterations, mode);
            iterations_max = simplify_gen_unary (ZERO_EXTEND, word_mode,
                                                 iterations_max, mode);
        }
        else
        {
            count = lowpart_subreg (word_mode, count, mode);
            iterations = lowpart_subreg (word_mode, iterations, mode);
            iterations_max = lowpart_subreg (word_mode, iterations_max, mode);
        }
        PUT_MODE (doloop_reg, word_mode);
        doloop_seq = gen_doloop_end (doloop_reg, iterations, iterations_max,
                                     GEN_INT (level), start_label);
    }
    if (! doloop_seq)
    {
        if (dump_file)
            fprintf (dump_file,
                     "Doloop: Target unwilling to use doloop pattern!\n");
        return false;
    }

    /* If multiple instructions were created, the last must be the
       jump instruction.  Also, a raw define_insn may yield a plain
       pattern.  */
    doloop_pat = doloop_seq;
    if (INSN_P (doloop_pat))
    {
        while (NEXT_INSN (doloop_pat) != NULL_RTX)
            doloop_pat = NEXT_INSN (doloop_pat);
        if (JUMP_P (doloop_pat))
            doloop_pat = PATTERN (doloop_pat);
        else
            doloop_pat = NULL_RTX;
    }

    if (! doloop_pat
            || ! (condition = doloop_condition_get (doloop_pat)))
    {
        if (dump_file)
            fprintf (dump_file, "Doloop: Unrecognizable doloop pattern!\n");
        return false;
    }

    doloop_modify (loop, desc, doloop_seq, condition, count);
    return true;
}
Exemplo n.º 21
0
void
print_pattern (pretty_printer *pp, const_rtx x, int verbose)
{
  if (! x)
    {
      pp_string (pp, "(nil)");
      return;
    }

  switch (GET_CODE (x))
    {
    case SET:
      print_value (pp, SET_DEST (x), verbose);
      pp_equal (pp);
      print_value (pp, SET_SRC (x), verbose);
      break;
    case RETURN:
    case SIMPLE_RETURN:
    case EH_RETURN:
      pp_string (pp, GET_RTX_NAME (GET_CODE (x)));
      break;
    case CALL:
      print_exp (pp, x, verbose);
      break;
    case CLOBBER:
    case USE:
      pp_printf (pp, "%s ", GET_RTX_NAME (GET_CODE (x)));
      print_value (pp, XEXP (x, 0), verbose);
      break;
    case VAR_LOCATION:
      pp_string (pp, "loc ");
      print_value (pp, PAT_VAR_LOCATION_LOC (x), verbose);
      break;
    case COND_EXEC:
      pp_left_paren (pp);
      if (GET_CODE (COND_EXEC_TEST (x)) == NE
	  && XEXP (COND_EXEC_TEST (x), 1) == const0_rtx)
	print_value (pp, XEXP (COND_EXEC_TEST (x), 0), verbose);
      else if (GET_CODE (COND_EXEC_TEST (x)) == EQ
	       && XEXP (COND_EXEC_TEST (x), 1) == const0_rtx)
	{
	  pp_exclamation (pp);
	  print_value (pp, XEXP (COND_EXEC_TEST (x), 0), verbose);
	}
      else
	print_value (pp, COND_EXEC_TEST (x), verbose);
      pp_string (pp, ") ");
      print_pattern (pp, COND_EXEC_CODE (x), verbose);
      break;
    case PARALLEL:
      {
	int i;

	pp_left_brace (pp);
	for (i = 0; i < XVECLEN (x, 0); i++)
	  {
	    print_pattern (pp, XVECEXP (x, 0, i), verbose);
	    pp_semicolon (pp);
	  }
	pp_right_brace (pp);
      }
      break;
    case SEQUENCE:
      {
	pp_string (pp, "sequence{");
	if (INSN_P (XVECEXP (x, 0, 0)))
	  {
	    /* Print the sequence insns indented.  */
	    const char * save_print_rtx_head = print_rtx_head;
	    char indented_print_rtx_head[32];

	    pp_newline (pp);
	    gcc_assert (strlen (print_rtx_head) < sizeof (indented_print_rtx_head) - 4);
	    snprintf (indented_print_rtx_head,
		      sizeof (indented_print_rtx_head),
		      "%s     ", print_rtx_head);
	    print_rtx_head = indented_print_rtx_head;
	    for (int i = 0; i < XVECLEN (x, 0); i++)
	      print_insn_with_notes (pp, XVECEXP (x, 0, i));
	    pp_printf (pp, "%s      ", save_print_rtx_head);
	    print_rtx_head = save_print_rtx_head;
	  }
	else
	  {
	    for (int i = 0; i < XVECLEN (x, 0); i++)
	      {
		print_pattern (pp, XVECEXP (x, 0, i), verbose);
		pp_semicolon (pp);
	      }
	  }
	pp_right_brace (pp);
      }
      break;
    case ASM_INPUT:
      pp_printf (pp, "asm {%s}", XSTR (x, 0));
      break;
    case ADDR_VEC:
      /* Fall through.  */
    case ADDR_DIFF_VEC:
      print_value (pp, XEXP (x, 0), verbose);
      break;
    case TRAP_IF:
      pp_string (pp, "trap_if ");
      print_value (pp, TRAP_CONDITION (x), verbose);
      break;
    case UNSPEC:
    case UNSPEC_VOLATILE:
      /* Fallthru -- leave UNSPECs to print_exp.  */
    default:
      print_value (pp, x, verbose);
    }
}				/* print_pattern */
Exemplo n.º 22
0
static basic_block
create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
{
  edge eg;
  edge_iterator ei;
  basic_block pre_exit;

  /* The only non-call predecessor at this stage is a block with a
     fallthrough edge; there can be at most one, but there could be
     none at all, e.g. when exit is called.  */
  pre_exit = 0;
  FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    if (eg->flags & EDGE_FALLTHRU)
      {
	basic_block src_bb = eg->src;
	rtx_insn *last_insn;
	rtx ret_reg;

	gcc_assert (!pre_exit);
	/* If this function returns a value at the end, we have to
	   insert the final mode switch before the return value copy
	   to its hard register.  */
	if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1
	    && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
	    && GET_CODE (PATTERN (last_insn)) == USE
	    && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
	  {
	    int ret_start = REGNO (ret_reg);
	    int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
	    int ret_end = ret_start + nregs;
	    bool short_block = false;
	    bool multi_reg_return = false;
	    bool forced_late_switch = false;
	    rtx_insn *before_return_copy;

	    do
	      {
		rtx_insn *return_copy = PREV_INSN (last_insn);
		rtx return_copy_pat, copy_reg;
		int copy_start, copy_num;
		int j;

		if (NONDEBUG_INSN_P (return_copy))
		  {
		    /* When using SJLJ exceptions, the call to the
		       unregister function is inserted between the
		       clobber of the return value and the copy.
		       We do not want to split the block before this
		       or any other call; if we have not found the
		       copy yet, the copy must have been deleted.  */
		    if (CALL_P (return_copy))
		      {
			short_block = true;
			break;
		      }
		    return_copy_pat = PATTERN (return_copy);
		    switch (GET_CODE (return_copy_pat))
		      {
		      case USE:
			/* Skip USEs of multiple return registers.
			   __builtin_apply pattern is also handled here.  */
			if (GET_CODE (XEXP (return_copy_pat, 0)) == REG
			    && (targetm.calls.function_value_regno_p
				(REGNO (XEXP (return_copy_pat, 0)))))
			  {
			    multi_reg_return = true;
			    last_insn = return_copy;
			    continue;
			  }
			break;

		      case ASM_OPERANDS:
			/* Skip barrier insns.  */
			if (!MEM_VOLATILE_P (return_copy_pat))
			  break;

			/* Fall through.  */

		      case ASM_INPUT:
		      case UNSPEC_VOLATILE:
			last_insn = return_copy;
			continue;

		      default:
			break;
		      }

		    /* If the return register is not (in its entirety)
		       likely spilled, the return copy might be
		       partially or completely optimized away.  */
		    return_copy_pat = single_set (return_copy);
		    if (!return_copy_pat)
		      {
			return_copy_pat = PATTERN (return_copy);
			if (GET_CODE (return_copy_pat) != CLOBBER)
			  break;
			else if (!optimize)
			  {
			    /* This might be (clobber (reg [<result>]))
			       when not optimizing.  Then check if
			       the previous insn is the clobber for
			       the return register.  */
			    copy_reg = SET_DEST (return_copy_pat);
			    if (GET_CODE (copy_reg) == REG
				&& !HARD_REGISTER_NUM_P (REGNO (copy_reg)))
			      {
				if (INSN_P (PREV_INSN (return_copy)))
				  {
				    return_copy = PREV_INSN (return_copy);
				    return_copy_pat = PATTERN (return_copy);
				    if (GET_CODE (return_copy_pat) != CLOBBER)
				      break;
				  }
			      }
			  }
		      }
		    copy_reg = SET_DEST (return_copy_pat);
		    if (GET_CODE (copy_reg) == REG)
		      copy_start = REGNO (copy_reg);
		    else if (GET_CODE (copy_reg) == SUBREG
			     && GET_CODE (SUBREG_REG (copy_reg)) == REG)
		      copy_start = REGNO (SUBREG_REG (copy_reg));
		    else
		      {
			/* When control reaches end of non-void function,
			   there are no return copy insns at all.  This
			   avoids an ice on that invalid function.  */
			if (ret_start + nregs == ret_end)
			  short_block = true;
			break;
		      }
		    if (!targetm.calls.function_value_regno_p (copy_start))
		      copy_num = 0;
		    else
		      copy_num
			= hard_regno_nregs[copy_start][GET_MODE (copy_reg)];

		    /* If the return register is not likely spilled, - as is
		       the case for floating point on SH4 - then it might
		       be set by an arithmetic operation that needs a
		       different mode than the exit block.  */
		    for (j = n_entities - 1; j >= 0; j--)
		      {
			int e = entity_map[j];
			int mode =
			  targetm.mode_switching.needed (e, return_copy);

			if (mode != num_modes[e]
			    && mode != targetm.mode_switching.exit (e))
			  break;
		      }
		    if (j >= 0)
		      {
			/* __builtin_return emits a sequence of loads to all
			   return registers.  One of them might require
			   another mode than MODE_EXIT, even if it is
			   unrelated to the return value, so we want to put
			   the final mode switch after it.  */
			if (multi_reg_return
			    && targetm.calls.function_value_regno_p
			        (copy_start))
			  forced_late_switch = true;

			/* For the SH4, floating point loads depend on fpscr,
			   thus we might need to put the final mode switch
			   after the return value copy.  That is still OK,
			   because a floating point return value does not
			   conflict with address reloads.  */
			if (copy_start >= ret_start
			    && copy_start + copy_num <= ret_end
			    && OBJECT_P (SET_SRC (return_copy_pat)))
			  forced_late_switch = true;
			break;
		      }
		    if (copy_num == 0)
		      {
			last_insn = return_copy;
			continue;
		      }

		    if (copy_start >= ret_start
			&& copy_start + copy_num <= ret_end)
		      nregs -= copy_num;
		    else if (!multi_reg_return
			     || !targetm.calls.function_value_regno_p
				 (copy_start))
		      break;
		    last_insn = return_copy;
		  }
		/* ??? Exception handling can lead to the return value
		   copy being already separated from the return value use,
		   as in  unwind-dw2.c .
		   Similarly, conditionally returning without a value,
		   and conditionally using builtin_return can lead to an
		   isolated use.  */
		if (return_copy == BB_HEAD (src_bb))
		  {
		    short_block = true;
		    break;
		  }
		last_insn = return_copy;
	      }
	    while (nregs);

	    /* If we didn't see a full return value copy, verify that there
	       is a plausible reason for this.  If some, but not all of the
	       return register is likely spilled, we can expect that there
	       is a copy for the likely spilled part.  */
	    gcc_assert (!nregs
			|| forced_late_switch
			|| short_block
			|| !(targetm.class_likely_spilled_p
			     (REGNO_REG_CLASS (ret_start)))
			|| (nregs
			    != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
			/* For multi-hard-register floating point
		   	   values, sometimes the likely-spilled part
		   	   is ordinarily copied first, then the other
		   	   part is set with an arithmetic operation.
		   	   This doesn't actually cause reload
		   	   failures, so let it pass.  */
			|| (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
			    && nregs != 1));

	    if (!NOTE_INSN_BASIC_BLOCK_P (last_insn))
	      {
		before_return_copy
		  = emit_note_before (NOTE_INSN_DELETED, last_insn);
		/* Instructions preceding LAST_INSN in the same block might
		   require a different mode than MODE_EXIT, so if we might
		   have such instructions, keep them in a separate block
		   from pre_exit.  */
		src_bb = split_block (src_bb,
				      PREV_INSN (before_return_copy))->dest;
	      }
	    else
	      before_return_copy = last_insn;
	    pre_exit = split_block (src_bb, before_return_copy)->src;
	  }
	else
	  {
	    pre_exit = split_edge (eg);
	  }
      }

  return pre_exit;
}
Exemplo n.º 23
0
void
optimize_sibling_and_tail_recursive_calls (void)
{
  rtx insn, insns;
  basic_block alternate_exit = EXIT_BLOCK_PTR;
  bool no_sibcalls_this_function = false;
  bool successful_replacement = false;
  bool replaced_call_placeholder = false;
  edge e;

  insns = get_insns ();

  cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);

  /* If there are no basic blocks, then there is nothing to do.  */
  if (n_basic_blocks == 0)
    return;

  /* If we are using sjlj exceptions, we may need to add a call to
     _Unwind_SjLj_Unregister at exit of the function.  Which means
     that we cannot do any sibcall transformations.  */
  if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
    no_sibcalls_this_function = true;

  return_value_pseudo = NULL_RTX;

  /* Find the exit block.

     It is possible that we have blocks which can reach the exit block
     directly.  However, most of the time a block will jump (or fall into)
     N_BASIC_BLOCKS - 1, which in turn falls into the exit block.  */
  for (e = EXIT_BLOCK_PTR->pred;
       e && alternate_exit == EXIT_BLOCK_PTR;
       e = e->pred_next)
    {
      rtx insn;

      if (e->dest != EXIT_BLOCK_PTR || e->succ_next != NULL)
	continue;

      /* Walk forwards through the last normal block and see if it
	 does nothing except fall into the exit block.  */
      for (insn = BB_HEAD (EXIT_BLOCK_PTR->prev_bb);
	   insn;
	   insn = NEXT_INSN (insn))
	{
	  rtx set;
	  /* This should only happen once, at the start of this block.  */
	  if (GET_CODE (insn) == CODE_LABEL)
	    continue;

	  if (GET_CODE (insn) == NOTE)
	    continue;

	  if (GET_CODE (insn) == INSN
	      && GET_CODE (PATTERN (insn)) == USE)
	    continue;

	  /* Exit block also may contain copy from pseudo containing
	     return value to hard register.  */
	  if (GET_CODE (insn) == INSN
	      && (set = single_set (insn))
	      && SET_DEST (set) == current_function_return_rtx
	      && REG_P (SET_SRC (set))
	      && !return_value_pseudo)
	    {
	      return_value_pseudo = SET_SRC (set);
	      continue;
	    }

	  break;
	}

      /* If INSN is zero, then the search walked all the way through the
	 block without hitting anything interesting.  This block is a
	 valid alternate exit block.  */
      if (insn == NULL)
	alternate_exit = e->src;
      else
	return_value_pseudo = NULL;
    }

  /* If the function uses ADDRESSOF, we can't (easily) determine
     at this point if the value will end up on the stack.  */
  no_sibcalls_this_function |= sequence_uses_addressof (insns);

  /* Walk the insn chain and find any CALL_PLACEHOLDER insns.  We need to
     select one of the insn sequences attached to each CALL_PLACEHOLDER.

     The different sequences represent different ways to implement the call,
     ie, tail recursion, sibling call or normal call.

     Since we do not create nested CALL_PLACEHOLDERs, the scan
     continues with the insn that was after a replaced CALL_PLACEHOLDER;
     we don't rescan the replacement insns.  */
  for (insn = insns; insn; insn = NEXT_INSN (insn))
    {
      if (GET_CODE (insn) == CALL_INSN
	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
	{
	  int sibcall = (XEXP (PATTERN (insn), 1) != NULL_RTX);
	  int tailrecursion = (XEXP (PATTERN (insn), 2) != NULL_RTX);
	  basic_block call_block = BLOCK_FOR_INSN (insn);

	  /* alloca (until we have stack slot life analysis) inhibits
	     sibling call optimizations, but not tail recursion.
	     Similarly if we use varargs or stdarg since they implicitly
	     may take the address of an argument.  */
	  if (current_function_calls_alloca || current_function_stdarg)
	    sibcall = 0;

	  /* See if there are any reasons we can't perform either sibling or
	     tail call optimizations.  We must be careful with stack slots
	     which are live at potential optimization sites.  */
	  if (no_sibcalls_this_function
	      /* ??? Overly conservative.  */
	      || frame_offset
	      /* Any function that calls setjmp might have longjmp called from
		 any called function.  ??? We really should represent this
		 properly in the CFG so that this needn't be special cased.  */
	      || current_function_calls_setjmp
	      /* Can't if more than one successor or single successor is not
		 exit block.  These two tests prevent tail call optimization
		 in the presence of active exception handlers.  */
	      || call_block->succ == NULL
	      || call_block->succ->succ_next != NULL
	      || (call_block->succ->dest != EXIT_BLOCK_PTR
		  && call_block->succ->dest != alternate_exit)
	      /* If this call doesn't end the block, there are operations at
		 the end of the block which we must execute after returning.  */
	      || ! call_ends_block_p (insn, BB_END (call_block)))
	    sibcall = 0, tailrecursion = 0;

	  /* Select a set of insns to implement the call and emit them.
	     Tail recursion is the most efficient, so select it over
	     a tail/sibling call.  */

	  if (sibcall || tailrecursion)
	    successful_replacement = true;
	  replaced_call_placeholder = true;

	  replace_call_placeholder (insn,
				    tailrecursion != 0
				      ? sibcall_use_tail_recursion
				      : sibcall != 0
					 ? sibcall_use_sibcall
					 : sibcall_use_normal);
	}
    }

  if (successful_replacement)
    {
      rtx insn;
      tree arg;

      /* A sibling call sequence invalidates any REG_EQUIV notes made for
	 this function's incoming arguments.

	 At the start of RTL generation we know the only REG_EQUIV notes
	 in the rtl chain are those for incoming arguments, so we can safely
	 flush any REG_EQUIV note.

	 This is (slight) overkill.  We could keep track of the highest
	 argument we clobber and be more selective in removing notes, but it
	 does not seem to be worth the effort.  */
      purge_reg_equiv_notes ();

      /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
	 flag of some incoming arguments MEM RTLs, because it can write into
	 those slots.  We clear all those bits now.

	 This is (slight) overkill, we could keep track of which arguments
	 we actually write into.  */
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
	{
	  if (INSN_P (insn))
	    purge_mem_unchanging_flag (PATTERN (insn));
	}

      /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
	 arguments passed in registers.  */
      for (arg = DECL_ARGUMENTS (current_function_decl);
	   arg;
	   arg = TREE_CHAIN (arg))
	{
	  if (REG_P (DECL_RTL (arg)))
	    RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
	}
    }

  /* There may have been NOTE_INSN_BLOCK_{BEGIN,END} notes in the
     CALL_PLACEHOLDER alternatives that we didn't emit.  Rebuild the
     lexical block tree to correspond to the notes that still exist.  */
  if (replaced_call_placeholder)
    reorder_blocks ();

  /* This information will be invalid after inline expansion.  Kill it now.  */
  free_basic_block_vars (0);
  free_EXPR_LIST_list (&tail_recursion_label_list);
}
Exemplo n.º 24
0
static void
print_rtx (rtx in_rtx)
{
  int i = 0;
  int j;
  const char *format_ptr;
  int is_insn;

  if (sawclose)
    {
      if (flag_simple)
        fputc (' ', outfile);
      else
        fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, "");
      sawclose = 0;
    }

  if (in_rtx == 0)
    {
      fputs ("(nil)", outfile);
      sawclose = 1;
      return;
    }
  else if (GET_CODE (in_rtx) > NUM_RTX_CODE)
    {
       fprintf (outfile, "(??? bad code %d\n)", GET_CODE (in_rtx));
       sawclose = 1;
       return;
    }

  is_insn = INSN_P (in_rtx);

  /* When printing in VCG format we write INSNs, NOTE, LABEL, and BARRIER
     in separate nodes and therefore have to handle them special here.  */
  if (dump_for_graph
      && (is_insn || NOTE_P (in_rtx)
          || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
    {
      i = 3;
      indent = 0;
    }
  else
    {
      /* Print name of expression code.  */
      if (flag_simple && GET_CODE (in_rtx) == CONST_INT)
        fputc ('(', outfile);
      else
        fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx)));

      if (! flag_simple)
        {
          if (RTX_FLAG (in_rtx, in_struct))
            fputs ("/s", outfile);

          if (RTX_FLAG (in_rtx, volatil))
            fputs ("/v", outfile);

          if (RTX_FLAG (in_rtx, unchanging))
            fputs ("/u", outfile);

          if (RTX_FLAG (in_rtx, frame_related))
            fputs ("/f", outfile);

          if (RTX_FLAG (in_rtx, jump))
            fputs ("/j", outfile);

          if (RTX_FLAG (in_rtx, call))
            fputs ("/c", outfile);

          if (RTX_FLAG (in_rtx, return_val))
            fputs ("/i", outfile);

          /* Print REG_NOTE names for EXPR_LIST and INSN_LIST.  */
          if (GET_CODE (in_rtx) == EXPR_LIST
              || GET_CODE (in_rtx) == INSN_LIST)
            fprintf (outfile, ":%s",
                     GET_REG_NOTE_NAME (GET_MODE (in_rtx)));

          /* For other rtl, print the mode if it's not VOID.  */
          else if (GET_MODE (in_rtx) != VOIDmode)
            fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx)));
        }
    }

#ifndef GENERATOR_FILE
  if (GET_CODE (in_rtx) == CONST_DOUBLE && FLOAT_MODE_P (GET_MODE (in_rtx)))
    i = 5;
#endif

  /* Get the format string and skip the first elements if we have handled
     them already.  */
  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i;
  for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
    switch (*format_ptr++)
      {
        const char *str;

      case 'T':
        str = XTMPL (in_rtx, i);
        goto string;

      case 'S':
      case 's':
        str = XSTR (in_rtx, i);
      string:

        if (str == 0)
          fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile);
        else
          {
            if (dump_for_graph)
              fprintf (outfile, " (\\\"%s\\\")", str);
            else
              fprintf (outfile, " (\"%s\")", str);
          }
        sawclose = 1;
        break;

        /* 0 indicates a field for internal use that should not be printed.
           An exception is the third field of a NOTE, where it indicates
           that the field has several different valid contents.  */
      case '0':
        if (i == 1 && REG_P (in_rtx))
          {
            if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx))
              fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx));
          }
#ifndef GENERATOR_FILE
        else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF)
          {
            int flags = SYMBOL_REF_FLAGS (in_rtx);
            if (flags)
              fprintf (outfile, " [flags 0x%x]", flags);
          }
        else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF)
          {
            tree decl = SYMBOL_REF_DECL (in_rtx);
            if (decl)
              print_node_brief (outfile, "", decl, 0);
          }
#endif
        else if (i == 4 && NOTE_P (in_rtx))
          {
            switch (NOTE_LINE_NUMBER (in_rtx))
              {
              case NOTE_INSN_EH_REGION_BEG:
              case NOTE_INSN_EH_REGION_END:
                if (flag_dump_unnumbered)
                  fprintf (outfile, " #");
                else
                  fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx));
                sawclose = 1;
                break;

              case NOTE_INSN_BLOCK_BEG:
              case NOTE_INSN_BLOCK_END:
#ifndef GENERATOR_FILE
                dump_addr (outfile, " ", NOTE_BLOCK (in_rtx));
#endif
                sawclose = 1;
                break;

              case NOTE_INSN_BASIC_BLOCK:
                {
#ifndef GENERATOR_FILE
                  basic_block bb = NOTE_BASIC_BLOCK (in_rtx);
                  if (bb != 0)
                    fprintf (outfile, " [bb %d]", bb->index);
#endif
                  break;
                }

              case NOTE_INSN_EXPECTED_VALUE:
                indent += 2;
                if (!sawclose)
                  fprintf (outfile, " ");
                print_rtx (NOTE_EXPECTED_VALUE (in_rtx));
                indent -= 2;
                break;

              case NOTE_INSN_DELETED_LABEL:
                {
                  const char *label = NOTE_DELETED_LABEL_NAME (in_rtx);
                  if (label)
                    fprintf (outfile, " (\"%s\")", label);
                  else
                    fprintf (outfile, " \"\"");
                }
                break;

              case NOTE_INSN_SWITCH_TEXT_SECTIONS:
                {
#ifndef GENERATOR_FILE
                  basic_block bb = NOTE_BASIC_BLOCK (in_rtx);
                  if (bb != 0)
                    fprintf (outfile, " [bb %d]", bb->index);
#endif
                  break;
                }
                
              case NOTE_INSN_VAR_LOCATION:
#ifndef GENERATOR_FILE
                fprintf (outfile, " (");
                print_mem_expr (outfile, NOTE_VAR_LOCATION_DECL (in_rtx));
                fprintf (outfile, " ");
                print_rtx (NOTE_VAR_LOCATION_LOC (in_rtx));
                fprintf (outfile, ")");
#endif
                break;

              default:
                {
                  const char * const str = X0STR (in_rtx, i);

                  if (NOTE_LINE_NUMBER (in_rtx) < 0)
                    ;
                  else if (str == 0)
                    fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile);
                  else
                    {
                      if (dump_for_graph)
                        fprintf (outfile, " (\\\"%s\\\")", str);
                      else
                        fprintf (outfile, " (\"%s\")", str);
                    }
                  break;
                }
              }
          }
        break;

      case 'e':
      do_e:
        indent += 2;
        if (!sawclose)
          fprintf (outfile, " ");
        print_rtx (XEXP (in_rtx, i));
        indent -= 2;
        break;

      case 'E':
      case 'V':
        indent += 2;
        if (sawclose)
          {
            fprintf (outfile, "\n%s%*s",
                     print_rtx_head, indent * 2, "");
            sawclose = 0;
          }
        fputs (" [", outfile);
        if (NULL != XVEC (in_rtx, i))
          {
            indent += 2;
            if (XVECLEN (in_rtx, i))
              sawclose = 1;

            for (j = 0; j < XVECLEN (in_rtx, i); j++)
              print_rtx (XVECEXP (in_rtx, i, j));

            indent -= 2;
          }
        if (sawclose)
          fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, "");

        fputs ("]", outfile);
        sawclose = 1;
        indent -= 2;
        break;

      case 'w':
        if (! flag_simple)
          fprintf (outfile, " ");
        fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i));
        if (! flag_simple)
          fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]",
                   XWINT (in_rtx, i));
        break;

      case 'i':
        if (i == 4 && INSN_P (in_rtx))
          {
#ifndef GENERATOR_FILE
            /*  Pretty-print insn locators.  Ignore scoping as it is mostly
                redundant with line number information and do not print anything
                when there is no location information available.  */
            if (INSN_LOCATOR (in_rtx) && insn_file (in_rtx))
              fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx));
#endif
          }
        else if (i == 6 && NOTE_P (in_rtx))
          {
            /* This field is only used for NOTE_INSN_DELETED_LABEL, and
               other times often contains garbage from INSN->NOTE death.  */
            if (NOTE_LINE_NUMBER (in_rtx) == NOTE_INSN_DELETED_LABEL)
              fprintf (outfile, " %d",  XINT (in_rtx, i));
          }
        else
          {
            int value = XINT (in_rtx, i);
            const char *name;

#ifndef GENERATOR_FILE
            if (REG_P (in_rtx) && value < FIRST_PSEUDO_REGISTER)
              fprintf (outfile, " %d %s", REGNO (in_rtx),
                       reg_names[REGNO (in_rtx)]);
            else if (REG_P (in_rtx)
                     && value <= LAST_VIRTUAL_REGISTER)
              {
                if (value == VIRTUAL_INCOMING_ARGS_REGNUM)
                  fprintf (outfile, " %d virtual-incoming-args", value);
                else if (value == VIRTUAL_STACK_VARS_REGNUM)
                  fprintf (outfile, " %d virtual-stack-vars", value);
                else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM)
                  fprintf (outfile, " %d virtual-stack-dynamic", value);
                else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM)
                  fprintf (outfile, " %d virtual-outgoing-args", value);
                else if (value == VIRTUAL_CFA_REGNUM)
                  fprintf (outfile, " %d virtual-cfa", value);
                else
                  fprintf (outfile, " %d virtual-reg-%d", value,
                           value-FIRST_VIRTUAL_REGISTER);
              }
            else
#endif
              if (flag_dump_unnumbered
                     && (is_insn || NOTE_P (in_rtx)))
              fputc ('#', outfile);
            else
              fprintf (outfile, " %d", value);

#ifndef GENERATOR_FILE
            if (REG_P (in_rtx) && REG_ATTRS (in_rtx))
              {
                fputs (" [", outfile);
                if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx))
                  fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx));
                if (REG_EXPR (in_rtx))
                  print_mem_expr (outfile, REG_EXPR (in_rtx));

                if (REG_OFFSET (in_rtx))
                  fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC,
                           REG_OFFSET (in_rtx));
                fputs (" ]", outfile);
              }
#endif

            if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i)
                && XINT (in_rtx, i) >= 0
                && (name = get_insn_name (XINT (in_rtx, i))) != NULL)
              fprintf (outfile, " {%s}", name);
            sawclose = 0;
          }
        break;

      /* Print NOTE_INSN names rather than integer codes.  */

      case 'n':
        if (XINT (in_rtx, i) >= (int) NOTE_INSN_BIAS
            && XINT (in_rtx, i) < (int) NOTE_INSN_MAX)
          fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i)));
        else
          fprintf (outfile, " %d", XINT (in_rtx, i));
        sawclose = 0;
        break;

      case 'u':
        if (XEXP (in_rtx, i) != NULL)
          {
            rtx sub = XEXP (in_rtx, i);
            enum rtx_code subc = GET_CODE (sub);

            if (GET_CODE (in_rtx) == LABEL_REF)
              {
                if (subc == NOTE
                    && NOTE_LINE_NUMBER (sub) == NOTE_INSN_DELETED_LABEL)
                  {
                    if (flag_dump_unnumbered)
                      fprintf (outfile, " [# deleted]");
                    else
                      fprintf (outfile, " [%d deleted]", INSN_UID (sub));
                    sawclose = 0;
                    break;
                  }

                if (subc != CODE_LABEL)
                  goto do_e;
              }

            if (flag_dump_unnumbered)
              fputs (" #", outfile);
            else
              fprintf (outfile, " %d", INSN_UID (sub));
          }
        else
          fputs (" 0", outfile);
        sawclose = 0;
        break;

      case 'b':
#ifndef GENERATOR_FILE
        if (XBITMAP (in_rtx, i) == NULL)
          fputs (" {null}", outfile);
        else
          bitmap_print (outfile, XBITMAP (in_rtx, i), " {", "}");
#endif
        sawclose = 0;
        break;

      case 't':
#ifndef GENERATOR_FILE
        dump_addr (outfile, " ", XTREE (in_rtx, i));
#endif
        break;

      case '*':
        fputs (" Unknown", outfile);
        sawclose = 0;
        break;

      case 'B':
#ifndef GENERATOR_FILE
        if (XBBDEF (in_rtx, i))
          fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index);
#endif
        break;

      default:
        gcc_unreachable ();
      }

  switch (GET_CODE (in_rtx))
    {
#ifndef GENERATOR_FILE
    case MEM:
      fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC, MEM_ALIAS_SET (in_rtx));

      if (MEM_EXPR (in_rtx))
        print_mem_expr (outfile, MEM_EXPR (in_rtx));

      if (MEM_OFFSET (in_rtx))
        fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC,
                 INTVAL (MEM_OFFSET (in_rtx)));

      if (MEM_SIZE (in_rtx))
        fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC,
                 INTVAL (MEM_SIZE (in_rtx)));

      if (MEM_ALIGN (in_rtx) != 1)
        fprintf (outfile, " A%u", MEM_ALIGN (in_rtx));

      fputc (']', outfile);
      break;

    case CONST_DOUBLE:
      if (FLOAT_MODE_P (GET_MODE (in_rtx)))
        {
          char s[60];

          real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx),
                           sizeof (s), 0, 1);
          fprintf (outfile, " %s", s);

          real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx),
                               sizeof (s), 0, 1);
          fprintf (outfile, " [%s]", s);
        }
      break;
#endif

    case CODE_LABEL:
      fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx));
      switch (LABEL_KIND (in_rtx))
        {
          case LABEL_NORMAL: break;
          case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break;
          case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break;
          case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break;
          default: gcc_unreachable ();
        }
      break;

    default:
      break;
    }

  if (dump_for_graph
      && (is_insn || NOTE_P (in_rtx)
          || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
    sawclose = 0;
  else
    {
      fputc (')', outfile);
      sawclose = 1;
    }
}
Exemplo n.º 25
0
static int
optimize_mode_switching (void)
{
  int e;
  basic_block bb;
  bool need_commit = false;
  static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
#define N_ENTITIES ARRAY_SIZE (num_modes)
  int entity_map[N_ENTITIES];
  struct bb_info *bb_info[N_ENTITIES];
  int i, j;
  int n_entities = 0;
  int max_num_modes = 0;
  bool emitted ATTRIBUTE_UNUSED = false;
  basic_block post_entry = 0;
  basic_block pre_exit = 0;
  struct edge_list *edge_list = 0;

  /* These bitmaps are used for the LCM algorithm.  */
  sbitmap *kill, *del, *insert, *antic, *transp, *comp;
  sbitmap *avin, *avout;

  for (e = N_ENTITIES - 1; e >= 0; e--)
    if (OPTIMIZE_MODE_SWITCHING (e))
      {
	int entry_exit_extra = 0;

	/* Create the list of segments within each basic block.
	   If NORMAL_MODE is defined, allow for two extra
	   blocks split from the entry and exit block.  */
	if (targetm.mode_switching.entry && targetm.mode_switching.exit)
	  entry_exit_extra = 3;

	bb_info[n_entities]
	  = XCNEWVEC (struct bb_info,
		      last_basic_block_for_fn (cfun) + entry_exit_extra);
	entity_map[n_entities++] = e;
	if (num_modes[e] > max_num_modes)
	  max_num_modes = num_modes[e];
      }

  if (! n_entities)
    return 0;

  /* Make sure if MODE_ENTRY is defined MODE_EXIT is defined.  */
  gcc_assert ((targetm.mode_switching.entry && targetm.mode_switching.exit)
	      || (!targetm.mode_switching.entry
		  && !targetm.mode_switching.exit));

  if (targetm.mode_switching.entry && targetm.mode_switching.exit)
    {
      /* Split the edge from the entry block, so that we can note that
	 there NORMAL_MODE is supplied.  */
      post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
      pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
    }

  df_analyze ();

  /* Create the bitmap vectors.  */
  antic = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				n_entities * max_num_modes);
  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				 n_entities * max_num_modes);
  comp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);
  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);
  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				n_entities * max_num_modes);
  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);

  bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
  bitmap_vector_clear (antic, last_basic_block_for_fn (cfun));
  bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));

  for (j = n_entities - 1; j >= 0; j--)
    {
      int e = entity_map[j];
      int no_mode = num_modes[e];
      struct bb_info *info = bb_info[j];
      rtx_insn *insn;

      /* Determine what the first use (if any) need for a mode of entity E is.
	 This will be the mode that is anticipatable for this block.
	 Also compute the initial transparency settings.  */
      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct seginfo *ptr;
	  int last_mode = no_mode;
	  bool any_set_required = false;
	  HARD_REG_SET live_now;

	  info[bb->index].mode_out = info[bb->index].mode_in = no_mode;

	  REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb));

	  /* Pretend the mode is clobbered across abnormal edges.  */
	  {
	    edge_iterator ei;
	    edge eg;
	    FOR_EACH_EDGE (eg, ei, bb->preds)
	      if (eg->flags & EDGE_COMPLEX)
		break;
	    if (eg)
	      {
		rtx_insn *ins_pos = BB_HEAD (bb);
		if (LABEL_P (ins_pos))
		  ins_pos = NEXT_INSN (ins_pos);
		gcc_assert (NOTE_INSN_BASIC_BLOCK_P (ins_pos));
		if (ins_pos != BB_END (bb))
		  ins_pos = NEXT_INSN (ins_pos);
		ptr = new_seginfo (no_mode, ins_pos, bb->index, live_now);
		add_seginfo (info + bb->index, ptr);
		for (i = 0; i < no_mode; i++)
		  clear_mode_bit (transp[bb->index], j, i);
	      }
	  }

	  FOR_BB_INSNS (bb, insn)
	    {
	      if (INSN_P (insn))
		{
		  int mode = targetm.mode_switching.needed (e, insn);
		  rtx link;

		  if (mode != no_mode && mode != last_mode)
		    {
		      any_set_required = true;
		      last_mode = mode;
		      ptr = new_seginfo (mode, insn, bb->index, live_now);
		      add_seginfo (info + bb->index, ptr);
		      for (i = 0; i < no_mode; i++)
			clear_mode_bit (transp[bb->index], j, i);
		    }

		  if (targetm.mode_switching.after)
		    last_mode = targetm.mode_switching.after (e, last_mode,
							      insn);

		  /* Update LIVE_NOW.  */
		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
		    if (REG_NOTE_KIND (link) == REG_DEAD)
		      reg_dies (XEXP (link, 0), &live_now);

		  note_stores (PATTERN (insn), reg_becomes_live, &live_now);
		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
		    if (REG_NOTE_KIND (link) == REG_UNUSED)
		      reg_dies (XEXP (link, 0), &live_now);
		}
	    }

	  info[bb->index].computing = last_mode;
	  /* Check for blocks without ANY mode requirements.
	     N.B. because of MODE_AFTER, last_mode might still
	     be different from no_mode, in which case we need to
	     mark the block as nontransparent.  */
	  if (!any_set_required)
	    {
	      ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
	      add_seginfo (info + bb->index, ptr);
	      if (last_mode != no_mode)
		for (i = 0; i < no_mode; i++)
		  clear_mode_bit (transp[bb->index], j, i);
	    }
	}
      if (targetm.mode_switching.entry && targetm.mode_switching.exit)
	{
	  int mode = targetm.mode_switching.entry (e);

	  info[post_entry->index].mode_out =
	    info[post_entry->index].mode_in = no_mode;
	  if (pre_exit)
	    {
	      info[pre_exit->index].mode_out =
		info[pre_exit->index].mode_in = no_mode;
	    }

	  if (mode != no_mode)
	    {
	      bb = post_entry;

	      /* By always making this nontransparent, we save
		 an extra check in make_preds_opaque.  We also
		 need this to avoid confusing pre_edge_lcm when
		 antic is cleared but transp and comp are set.  */
	      for (i = 0; i < no_mode; i++)
		clear_mode_bit (transp[bb->index], j, i);

	      /* Insert a fake computing definition of MODE into entry
		 blocks which compute no mode. This represents the mode on
		 entry.  */
	      info[bb->index].computing = mode;

	      if (pre_exit)
		info[pre_exit->index].seginfo->mode =
		  targetm.mode_switching.exit (e);
	    }
	}

      /* Set the anticipatable and computing arrays.  */
      for (i = 0; i < no_mode; i++)
	{
	  int m = targetm.mode_switching.priority (entity_map[j], i);

	  FOR_EACH_BB_FN (bb, cfun)
	    {
	      if (info[bb->index].seginfo->mode == m)
		set_mode_bit (antic[bb->index], j, m);

	      if (info[bb->index].computing == m)
		set_mode_bit (comp[bb->index], j, m);
	    }
	}
    }

  /* Calculate the optimal locations for the
     placement mode switches to modes with priority I.  */

  FOR_EACH_BB_FN (bb, cfun)
    bitmap_not (kill[bb->index], transp[bb->index]);

  edge_list = pre_edge_lcm_avs (n_entities * max_num_modes, transp, comp, antic,
				kill, avin, avout, &insert, &del);

  for (j = n_entities - 1; j >= 0; j--)
    {
      int no_mode = num_modes[entity_map[j]];

      /* Insert all mode sets that have been inserted by lcm.  */

      for (int ed = NUM_EDGES (edge_list) - 1; ed >= 0; ed--)
	{
	  edge eg = INDEX_EDGE (edge_list, ed);

	  eg->aux = (void *)(intptr_t)-1;

	  for (i = 0; i < no_mode; i++)
	    {
	      int m = targetm.mode_switching.priority (entity_map[j], i);
	      if (mode_bit_p (insert[ed], j, m))
		{
		  eg->aux = (void *)(intptr_t)m;
		  break;
		}
	    }
	}

      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct bb_info *info = bb_info[j];
	  int last_mode = no_mode;

	  /* intialize mode in availability for bb.  */
	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (avout[bb->index], j, i))
	      {
		if (last_mode == no_mode)
		  last_mode = i;
		if (last_mode != i)
		  {
		    last_mode = no_mode;
		    break;
		  }
	      }
	  info[bb->index].mode_out = last_mode;

	  /* intialize mode out availability for bb.  */
	  last_mode = no_mode;
	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (avin[bb->index], j, i))
	      {
		if (last_mode == no_mode)
		  last_mode = i;
		if (last_mode != i)
		  {
		    last_mode = no_mode;
		    break;
		  }
	      }
	  info[bb->index].mode_in = last_mode;

	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (del[bb->index], j, i))
	      info[bb->index].seginfo->mode = no_mode;
	}

      /* Now output the remaining mode sets in all the segments.  */

      /* In case there was no mode inserted. the mode information on the edge
	 might not be complete.
	 Update mode info on edges and commit pending mode sets.  */
      need_commit |= commit_mode_sets (edge_list, entity_map[j], bb_info[j]);

      /* Reset modes for next entity.  */
      clear_aux_for_edges ();

      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct seginfo *ptr, *next;
	  int cur_mode = bb_info[j][bb->index].mode_in;

	  for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
	    {
	      next = ptr->next;
	      if (ptr->mode != no_mode)
		{
		  rtx_insn *mode_set;

		  rtl_profile_for_bb (bb);
		  start_sequence ();

		  targetm.mode_switching.emit (entity_map[j], ptr->mode,
					       cur_mode, ptr->regs_live);
		  mode_set = get_insns ();
		  end_sequence ();

		  /* modes kill each other inside a basic block.  */
		  cur_mode = ptr->mode;

		  /* Insert MODE_SET only if it is nonempty.  */
		  if (mode_set != NULL_RTX)
		    {
		      emitted = true;
		      if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr))
			/* We need to emit the insns in a FIFO-like manner,
			   i.e. the first to be emitted at our insertion
			   point ends up first in the instruction steam.
			   Because we made sure that NOTE_INSN_BASIC_BLOCK is
			   only used for initially empty basic blocks, we
			   can achieve this by appending at the end of
			   the block.  */
			emit_insn_after
			  (mode_set, BB_END (NOTE_BASIC_BLOCK (ptr->insn_ptr)));
		      else
			emit_insn_before (mode_set, ptr->insn_ptr);
		    }

		  default_rtl_profile ();
		}

	      free (ptr);
	    }
	}

      free (bb_info[j]);
    }

  free_edge_list (edge_list);

  /* Finished. Free up all the things we've allocated.  */
  sbitmap_vector_free (del);
  sbitmap_vector_free (insert);
  sbitmap_vector_free (kill);
  sbitmap_vector_free (antic);
  sbitmap_vector_free (transp);
  sbitmap_vector_free (comp);
  sbitmap_vector_free (avin);
  sbitmap_vector_free (avout);

  if (need_commit)
    commit_edge_insertions ();

  if (targetm.mode_switching.entry && targetm.mode_switching.exit)
    cleanup_cfg (CLEANUP_NO_INSN_DEL);
  else if (!need_commit && !emitted)
    return 0;

  return 1;
}
Exemplo n.º 26
0
static void
initialize_uninitialized_regs (void)
{
  basic_block bb;
  bitmap already_genned = BITMAP_ALLOC (NULL);

  if (optimize == 1)
    {
      df_live_add_problem ();
      df_live_set_all_dirty ();
    }

  df_analyze ();

  FOR_EACH_BB (bb)
    {
      rtx insn;
      bitmap lr = DF_LR_IN (bb);
      bitmap ur = DF_LIVE_IN (bb);
      bitmap_clear (already_genned);

      FOR_BB_INSNS (bb, insn)
	{
	  unsigned int uid = INSN_UID (insn);
	  df_ref *use_rec;
	  if (!INSN_P (insn))
	    continue;

	  for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
	    {
	      df_ref use = *use_rec;
	      unsigned int regno = DF_REF_REGNO (use);

	      /* Only do this for the pseudos.  */
	      if (regno < FIRST_PSEUDO_REGISTER)
		continue;

	      /* Do not generate multiple moves for the same regno.
		 This is common for sequences of subreg operations.
		 They would be deleted during combine but there is no
		 reason to churn the system.  */
	      if (bitmap_bit_p (already_genned, regno))
		continue;

	      /* A use is MUST uninitialized if it reaches the top of
		 the block from the inside of the block (the lr test)
		 and no def for it reaches the top of the block from
		 outside of the block (the ur test).  */
	      if (bitmap_bit_p (lr, regno)
		  && (!bitmap_bit_p (ur, regno)))
		{
		  rtx move_insn;
		  rtx reg = DF_REF_REAL_REG (use);

		  bitmap_set_bit (already_genned, regno); 

		  start_sequence ();
		  emit_move_insn (reg, CONST0_RTX (GET_MODE (reg)));
		  move_insn = get_insns ();
		  end_sequence ();
		  emit_insn_before (move_insn, insn);
		  if (dump_file)
		    fprintf (dump_file, 
			     "adding initialization in %s of reg %d at in block %d for insn %d.\n", 
			     current_function_name (), regno, bb->index, uid);
		}
	    }
	}
    }