Beispiel #1
0
Datei: sese.c Projekt: zh9118/gcc
static void
sese_build_liveouts (sese region, bitmap liveouts)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    sese_build_liveouts_bb (region, liveouts, bb);
  if (MAY_HAVE_DEBUG_STMTS)
    FOR_EACH_BB_FN (bb, cfun)
      sese_reset_debug_liveouts_bb (region, liveouts, bb);
}
Beispiel #2
0
static void
combine_stack_adjustments (void)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    combine_stack_adjustments_for_block (bb);
}
Beispiel #3
0
void
gsi_commit_edge_inserts (void)
{
  basic_block bb;
  edge e;
  edge_iterator ei;

  gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
			      NULL);

  FOR_EACH_BB_FN (bb, cfun)
    FOR_EACH_EDGE (e, ei, bb->succs)
      gsi_commit_one_edge_insert (e, NULL);
}
Beispiel #4
0
Datei: lcm.c Projekt: krnowak/gcc
static void
compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
		       sbitmap *later, sbitmap *laterin, sbitmap *insert,
		       sbitmap *del)
{
  int x;
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    bitmap_and_compl (del[bb->index], antloc[bb->index],
			laterin[bb->index]);

  for (x = 0; x < NUM_EDGES (edge_list); x++)
    {
      basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);

      if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
	bitmap_and_compl (insert[x], later[x],
			  laterin[last_basic_block_for_fn (cfun)]);
      else
	bitmap_and_compl (insert[x], later[x], laterin[b->index]);
    }
}
Beispiel #5
0
static int
optimize_mode_switching (void)
{
  int e;
  basic_block bb;
  bool need_commit = false;
  static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
#define N_ENTITIES ARRAY_SIZE (num_modes)
  int entity_map[N_ENTITIES];
  struct bb_info *bb_info[N_ENTITIES];
  int i, j;
  int n_entities = 0;
  int max_num_modes = 0;
  bool emitted ATTRIBUTE_UNUSED = false;
  basic_block post_entry = 0;
  basic_block pre_exit = 0;
  struct edge_list *edge_list = 0;

  /* These bitmaps are used for the LCM algorithm.  */
  sbitmap *kill, *del, *insert, *antic, *transp, *comp;
  sbitmap *avin, *avout;

  for (e = N_ENTITIES - 1; e >= 0; e--)
    if (OPTIMIZE_MODE_SWITCHING (e))
      {
	int entry_exit_extra = 0;

	/* Create the list of segments within each basic block.
	   If NORMAL_MODE is defined, allow for two extra
	   blocks split from the entry and exit block.  */
	if (targetm.mode_switching.entry && targetm.mode_switching.exit)
	  entry_exit_extra = 3;

	bb_info[n_entities]
	  = XCNEWVEC (struct bb_info,
		      last_basic_block_for_fn (cfun) + entry_exit_extra);
	entity_map[n_entities++] = e;
	if (num_modes[e] > max_num_modes)
	  max_num_modes = num_modes[e];
      }

  if (! n_entities)
    return 0;

  /* Make sure if MODE_ENTRY is defined MODE_EXIT is defined.  */
  gcc_assert ((targetm.mode_switching.entry && targetm.mode_switching.exit)
	      || (!targetm.mode_switching.entry
		  && !targetm.mode_switching.exit));

  if (targetm.mode_switching.entry && targetm.mode_switching.exit)
    {
      /* Split the edge from the entry block, so that we can note that
	 there NORMAL_MODE is supplied.  */
      post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
      pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
    }

  df_analyze ();

  /* Create the bitmap vectors.  */
  antic = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				n_entities * max_num_modes);
  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				 n_entities * max_num_modes);
  comp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);
  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);
  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
				n_entities * max_num_modes);
  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
			       n_entities * max_num_modes);

  bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
  bitmap_vector_clear (antic, last_basic_block_for_fn (cfun));
  bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));

  for (j = n_entities - 1; j >= 0; j--)
    {
      int e = entity_map[j];
      int no_mode = num_modes[e];
      struct bb_info *info = bb_info[j];
      rtx_insn *insn;

      /* Determine what the first use (if any) need for a mode of entity E is.
	 This will be the mode that is anticipatable for this block.
	 Also compute the initial transparency settings.  */
      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct seginfo *ptr;
	  int last_mode = no_mode;
	  bool any_set_required = false;
	  HARD_REG_SET live_now;

	  info[bb->index].mode_out = info[bb->index].mode_in = no_mode;

	  REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb));

	  /* Pretend the mode is clobbered across abnormal edges.  */
	  {
	    edge_iterator ei;
	    edge eg;
	    FOR_EACH_EDGE (eg, ei, bb->preds)
	      if (eg->flags & EDGE_COMPLEX)
		break;
	    if (eg)
	      {
		rtx_insn *ins_pos = BB_HEAD (bb);
		if (LABEL_P (ins_pos))
		  ins_pos = NEXT_INSN (ins_pos);
		gcc_assert (NOTE_INSN_BASIC_BLOCK_P (ins_pos));
		if (ins_pos != BB_END (bb))
		  ins_pos = NEXT_INSN (ins_pos);
		ptr = new_seginfo (no_mode, ins_pos, bb->index, live_now);
		add_seginfo (info + bb->index, ptr);
		for (i = 0; i < no_mode; i++)
		  clear_mode_bit (transp[bb->index], j, i);
	      }
	  }

	  FOR_BB_INSNS (bb, insn)
	    {
	      if (INSN_P (insn))
		{
		  int mode = targetm.mode_switching.needed (e, insn);
		  rtx link;

		  if (mode != no_mode && mode != last_mode)
		    {
		      any_set_required = true;
		      last_mode = mode;
		      ptr = new_seginfo (mode, insn, bb->index, live_now);
		      add_seginfo (info + bb->index, ptr);
		      for (i = 0; i < no_mode; i++)
			clear_mode_bit (transp[bb->index], j, i);
		    }

		  if (targetm.mode_switching.after)
		    last_mode = targetm.mode_switching.after (e, last_mode,
							      insn);

		  /* Update LIVE_NOW.  */
		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
		    if (REG_NOTE_KIND (link) == REG_DEAD)
		      reg_dies (XEXP (link, 0), &live_now);

		  note_stores (PATTERN (insn), reg_becomes_live, &live_now);
		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
		    if (REG_NOTE_KIND (link) == REG_UNUSED)
		      reg_dies (XEXP (link, 0), &live_now);
		}
	    }

	  info[bb->index].computing = last_mode;
	  /* Check for blocks without ANY mode requirements.
	     N.B. because of MODE_AFTER, last_mode might still
	     be different from no_mode, in which case we need to
	     mark the block as nontransparent.  */
	  if (!any_set_required)
	    {
	      ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
	      add_seginfo (info + bb->index, ptr);
	      if (last_mode != no_mode)
		for (i = 0; i < no_mode; i++)
		  clear_mode_bit (transp[bb->index], j, i);
	    }
	}
      if (targetm.mode_switching.entry && targetm.mode_switching.exit)
	{
	  int mode = targetm.mode_switching.entry (e);

	  info[post_entry->index].mode_out =
	    info[post_entry->index].mode_in = no_mode;
	  if (pre_exit)
	    {
	      info[pre_exit->index].mode_out =
		info[pre_exit->index].mode_in = no_mode;
	    }

	  if (mode != no_mode)
	    {
	      bb = post_entry;

	      /* By always making this nontransparent, we save
		 an extra check in make_preds_opaque.  We also
		 need this to avoid confusing pre_edge_lcm when
		 antic is cleared but transp and comp are set.  */
	      for (i = 0; i < no_mode; i++)
		clear_mode_bit (transp[bb->index], j, i);

	      /* Insert a fake computing definition of MODE into entry
		 blocks which compute no mode. This represents the mode on
		 entry.  */
	      info[bb->index].computing = mode;

	      if (pre_exit)
		info[pre_exit->index].seginfo->mode =
		  targetm.mode_switching.exit (e);
	    }
	}

      /* Set the anticipatable and computing arrays.  */
      for (i = 0; i < no_mode; i++)
	{
	  int m = targetm.mode_switching.priority (entity_map[j], i);

	  FOR_EACH_BB_FN (bb, cfun)
	    {
	      if (info[bb->index].seginfo->mode == m)
		set_mode_bit (antic[bb->index], j, m);

	      if (info[bb->index].computing == m)
		set_mode_bit (comp[bb->index], j, m);
	    }
	}
    }

  /* Calculate the optimal locations for the
     placement mode switches to modes with priority I.  */

  FOR_EACH_BB_FN (bb, cfun)
    bitmap_not (kill[bb->index], transp[bb->index]);

  edge_list = pre_edge_lcm_avs (n_entities * max_num_modes, transp, comp, antic,
				kill, avin, avout, &insert, &del);

  for (j = n_entities - 1; j >= 0; j--)
    {
      int no_mode = num_modes[entity_map[j]];

      /* Insert all mode sets that have been inserted by lcm.  */

      for (int ed = NUM_EDGES (edge_list) - 1; ed >= 0; ed--)
	{
	  edge eg = INDEX_EDGE (edge_list, ed);

	  eg->aux = (void *)(intptr_t)-1;

	  for (i = 0; i < no_mode; i++)
	    {
	      int m = targetm.mode_switching.priority (entity_map[j], i);
	      if (mode_bit_p (insert[ed], j, m))
		{
		  eg->aux = (void *)(intptr_t)m;
		  break;
		}
	    }
	}

      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct bb_info *info = bb_info[j];
	  int last_mode = no_mode;

	  /* intialize mode in availability for bb.  */
	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (avout[bb->index], j, i))
	      {
		if (last_mode == no_mode)
		  last_mode = i;
		if (last_mode != i)
		  {
		    last_mode = no_mode;
		    break;
		  }
	      }
	  info[bb->index].mode_out = last_mode;

	  /* intialize mode out availability for bb.  */
	  last_mode = no_mode;
	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (avin[bb->index], j, i))
	      {
		if (last_mode == no_mode)
		  last_mode = i;
		if (last_mode != i)
		  {
		    last_mode = no_mode;
		    break;
		  }
	      }
	  info[bb->index].mode_in = last_mode;

	  for (i = 0; i < no_mode; i++)
	    if (mode_bit_p (del[bb->index], j, i))
	      info[bb->index].seginfo->mode = no_mode;
	}

      /* Now output the remaining mode sets in all the segments.  */

      /* In case there was no mode inserted. the mode information on the edge
	 might not be complete.
	 Update mode info on edges and commit pending mode sets.  */
      need_commit |= commit_mode_sets (edge_list, entity_map[j], bb_info[j]);

      /* Reset modes for next entity.  */
      clear_aux_for_edges ();

      FOR_EACH_BB_FN (bb, cfun)
	{
	  struct seginfo *ptr, *next;
	  int cur_mode = bb_info[j][bb->index].mode_in;

	  for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
	    {
	      next = ptr->next;
	      if (ptr->mode != no_mode)
		{
		  rtx_insn *mode_set;

		  rtl_profile_for_bb (bb);
		  start_sequence ();

		  targetm.mode_switching.emit (entity_map[j], ptr->mode,
					       cur_mode, ptr->regs_live);
		  mode_set = get_insns ();
		  end_sequence ();

		  /* modes kill each other inside a basic block.  */
		  cur_mode = ptr->mode;

		  /* Insert MODE_SET only if it is nonempty.  */
		  if (mode_set != NULL_RTX)
		    {
		      emitted = true;
		      if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr))
			/* We need to emit the insns in a FIFO-like manner,
			   i.e. the first to be emitted at our insertion
			   point ends up first in the instruction steam.
			   Because we made sure that NOTE_INSN_BASIC_BLOCK is
			   only used for initially empty basic blocks, we
			   can achieve this by appending at the end of
			   the block.  */
			emit_insn_after
			  (mode_set, BB_END (NOTE_BASIC_BLOCK (ptr->insn_ptr)));
		      else
			emit_insn_before (mode_set, ptr->insn_ptr);
		    }

		  default_rtl_profile ();
		}

	      free (ptr);
	    }
	}

      free (bb_info[j]);
    }

  free_edge_list (edge_list);

  /* Finished. Free up all the things we've allocated.  */
  sbitmap_vector_free (del);
  sbitmap_vector_free (insert);
  sbitmap_vector_free (kill);
  sbitmap_vector_free (antic);
  sbitmap_vector_free (transp);
  sbitmap_vector_free (comp);
  sbitmap_vector_free (avin);
  sbitmap_vector_free (avout);

  if (need_commit)
    commit_edge_insertions ();

  if (targetm.mode_switching.entry && targetm.mode_switching.exit)
    cleanup_cfg (CLEANUP_NO_INSN_DEL);
  else if (!need_commit && !emitted)
    return 0;

  return 1;
}
Beispiel #6
0
unsigned
fix_loop_structure (bitmap changed_bbs)
{
  basic_block bb;
  int record_exits = 0;
  struct loop *loop;
  unsigned old_nloops, i;

  timevar_push (TV_LOOP_INIT);

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "fix_loop_structure: fixing up loops for function\n");

  /* We need exact and fast dominance info to be available.  */
  gcc_assert (dom_info_state (CDI_DOMINATORS) == DOM_OK);

  if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
    {
      release_recorded_exits (cfun);
      record_exits = LOOPS_HAVE_RECORDED_EXITS;
    }

  /* Remember the depth of the blocks in the loop hierarchy, so that we can
     recognize blocks whose loop nesting relationship has changed.  */
  if (changed_bbs)
    FOR_EACH_BB_FN (bb, cfun)
      bb->aux = (void *) (size_t) loop_depth (bb->loop_father);

  /* Remove the dead loops from structures.  We start from the innermost
     loops, so that when we remove the loops, we know that the loops inside
     are preserved, and do not waste time relinking loops that will be
     removed later.  */
  FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
    {
      /* Detect the case that the loop is no longer present even though
         it wasn't marked for removal.
	 ???  If we do that we can get away with not marking loops for
	 removal at all.  And possibly avoid some spurious removals.  */
      if (loop->header
	  && bb_loop_header_p (loop->header))
	continue;

      if (dump_file && (dump_flags & TDF_DETAILS))
	fprintf (dump_file, "fix_loop_structure: removing loop %d\n",
		 loop->num);

      while (loop->inner)
	{
	  struct loop *ploop = loop->inner;
	  flow_loop_tree_node_remove (ploop);
	  flow_loop_tree_node_add (loop_outer (loop), ploop);
	}

      /* Remove the loop.  */
      if (loop->header)
	loop->former_header = loop->header;
      else
	gcc_assert (loop->former_header != NULL);
      loop->header = NULL;
      flow_loop_tree_node_remove (loop);
    }

  /* Remember the number of loops so we can return how many new loops
     flow_loops_find discovered.  */
  old_nloops = number_of_loops (cfun);

  /* Re-compute loop structure in-place.  */
  flow_loops_find (current_loops);

  /* Mark the blocks whose loop has changed.  */
  if (changed_bbs)
    {
      FOR_EACH_BB_FN (bb, cfun)
	{
	  if ((void *) (size_t) loop_depth (bb->loop_father) != bb->aux)
	    bitmap_set_bit (changed_bbs, bb->index);

    	  bb->aux = NULL;
	}
    }

  /* Finally free deleted loops.  */
  bool any_deleted = false;
  FOR_EACH_VEC_ELT (*get_loops (cfun), i, loop)
    if (loop && loop->header == NULL)
      {
	if (dump_file
	    && ((unsigned) loop->former_header->index
		< basic_block_info_for_fn (cfun)->length ()))
	  {
	    basic_block former_header
	      = BASIC_BLOCK_FOR_FN (cfun, loop->former_header->index);
	    /* If the old header still exists we want to check if the
	       original loop is re-discovered or the old header is now
	       part of a newly discovered loop.
	       In both cases we should have avoided removing the loop.  */
	    if (former_header == loop->former_header)
	      {
		if (former_header->loop_father->header == former_header)
		  fprintf (dump_file, "fix_loop_structure: rediscovered "
			   "removed loop %d as loop %d with old header %d\n",
			   loop->num, former_header->loop_father->num,
			   former_header->index);
		else if ((unsigned) former_header->loop_father->num
			 >= old_nloops)
		  fprintf (dump_file, "fix_loop_structure: header %d of "
			   "removed loop %d is part of the newly "
			   "discovered loop %d with header %d\n",
			   former_header->index, loop->num,
			   former_header->loop_father->num,
			   former_header->loop_father->header->index);
	      }
	  }
	(*get_loops (cfun))[i] = NULL;
	flow_loop_free (loop);
	any_deleted = true;
      }

  /* If we deleted loops then the cached scalar evolutions refering to
     those loops become invalid.  */
  if (any_deleted && scev_initialized_p ())
    scev_reset_htab ();

  loops_state_clear (LOOPS_NEED_FIXUP);

  /* Apply flags to loops.  */
  apply_loop_flags (current_loops->state | record_exits);

  checking_verify_loop_structure ();

  timevar_pop (TV_LOOP_INIT);

  return number_of_loops (cfun) - old_nloops;
}