コード例 #1
0
void
cfg_layout_finalize (void)
{
  basic_block bb;

#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif
  rtl_register_cfg_hooks ();
  fixup_fallthru_exit_predecessor ();
  fixup_reorder_chain ();

#ifdef ENABLE_CHECKING
  verify_insn_chain ();
#endif

  free_alloc_pool (cfg_layout_pool);
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
    bb->rbi = NULL;

  break_superblocks ();

#ifdef ENABLE_CHECKING
  verify_flow_info ();
#endif
}
コード例 #2
0
/* Frees the alloc_pool, if it is empty and zero *POOL in this case.  */
void
free_alloc_pool_if_empty (alloc_pool *pool)
{
  if ((*pool)->elts_free == (*pool)->elts_allocated)
    {
      free_alloc_pool (*pool);
      *pool = NULL;
    }
}
コード例 #3
0
ファイル: journal.c プロジェクト: snua12/zlomekfs
void cleanup_journal_c(void)
{
	zfsd_mutex_lock(&journal_mutex);
#ifdef ENABLE_CHECKING
	if (journal_pool->elts_free < journal_pool->elts_allocated)
		message(LOG_WARNING, FACILITY_MEMORY,
				"Memory leak (%u elements) in journal_pool.\n",
				journal_pool->elts_allocated - journal_pool->elts_free);
#endif
	free_alloc_pool(journal_pool);
	zfsd_mutex_unlock(&journal_mutex);
	zfsd_mutex_destroy(&journal_mutex);
}
コード例 #4
0
static unsigned int
copyprop_hardreg_forward (void)
{
  struct value_data *all_vd;
  basic_block bb;
  sbitmap visited;
  bool analyze_called = false;

  all_vd = XNEWVEC (struct value_data, last_basic_block);

  visited = sbitmap_alloc (last_basic_block);
  bitmap_clear (visited);

  if (MAY_HAVE_DEBUG_INSNS)
    debug_insn_changes_pool
      = create_alloc_pool ("debug insn changes pool",
			   sizeof (struct queued_debug_insn_change), 256);

  FOR_EACH_BB (bb)
    {
      bitmap_set_bit (visited, bb->index);

      /* If a block has a single predecessor, that we've already
	 processed, begin with the value data that was live at
	 the end of the predecessor block.  */
      /* ??? Ought to use more intelligent queuing of blocks.  */
      if (single_pred_p (bb)
	  && bitmap_bit_p (visited, single_pred (bb)->index)
	  && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
	{
	  all_vd[bb->index] = all_vd[single_pred (bb)->index];
	  if (all_vd[bb->index].n_debug_insn_changes)
	    {
	      unsigned int regno;

	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
		{
		  if (all_vd[bb->index].e[regno].debug_insn_changes)
		    {
		      all_vd[bb->index].e[regno].debug_insn_changes = NULL;
		      if (--all_vd[bb->index].n_debug_insn_changes == 0)
			break;
		    }
		}
	    }
	}
      else
	init_value_data (all_vd + bb->index);

      copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
    }

  if (MAY_HAVE_DEBUG_INSNS)
    {
      FOR_EACH_BB (bb)
	if (bitmap_bit_p (visited, bb->index)
	    && all_vd[bb->index].n_debug_insn_changes)
	  {
	    unsigned int regno;
	    bitmap live;

	    if (!analyze_called)
	      {
		df_analyze ();
		analyze_called = true;
	      }
	    live = df_get_live_out (bb);
	    for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
	      if (all_vd[bb->index].e[regno].debug_insn_changes)
		{
		  if (REGNO_REG_SET_P (live, regno))
		    apply_debug_insn_changes (all_vd + bb->index, regno);
		  if (all_vd[bb->index].n_debug_insn_changes == 0)
		    break;
		}
	  }

      free_alloc_pool (debug_insn_changes_pool);
    }

  sbitmap_free (visited);
  free (all_vd);
  return 0;
}