示例#1
0
static gimple *
free_temp_expr_table (temp_expr_table_p t)
{
  gimple *ret = NULL;

#ifdef ENABLE_CHECKING
  unsigned x;
  for (x = 0; x <= num_var_partitions (t->map); x++)
    gcc_assert (!t->kill_list[x]);
  for (x = 0; x < num_ssa_names + 1; x++)
    {
      gcc_assert (t->expr_decl_uids[x] == NULL);
      gcc_assert (t->partition_dependencies[x] == NULL);
    }
#endif

  BITMAP_FREE (t->partition_in_use);
  BITMAP_FREE (t->new_replaceable_dependencies);

  free (t->expr_decl_uids);
  free (t->kill_list);
  free (t->partition_dependencies);
  free (t->num_in_part);

  if (t->replaceable_expressions)
    ret = t->replaceable_expressions;

  free (t);
  return ret;
}
示例#2
0
文件: fwprop.c 项目: ollie314/gcc
static void
build_single_def_use_links (void)
{
  /* We use the multiple definitions problem to compute our restricted
     use-def chains.  */
  df_set_flags (DF_EQ_NOTES);
  df_md_add_problem ();
  df_note_add_problem ();
  df_analyze ();
  df_maybe_reorganize_use_refs (DF_REF_ORDER_BY_INSN_WITH_NOTES);

  use_def_ref.create (DF_USES_TABLE_SIZE ());
  use_def_ref.safe_grow_cleared (DF_USES_TABLE_SIZE ());

  reg_defs.create (max_reg_num ());
  reg_defs.safe_grow_cleared (max_reg_num ());

  reg_defs_stack.create (n_basic_blocks_for_fn (cfun) * 10);
  local_md = BITMAP_ALLOC (NULL);
  local_lr = BITMAP_ALLOC (NULL);

  /* Walk the dominator tree looking for single reaching definitions
     dominating the uses.  This is similar to how SSA form is built.  */
  single_def_use_dom_walker (CDI_DOMINATORS)
    .walk (cfun->cfg->x_entry_block_ptr);

  BITMAP_FREE (local_lr);
  BITMAP_FREE (local_md);
  reg_defs.release ();
  reg_defs_stack.release ();
}
示例#3
0
static void 
process_replaceable (temp_expr_table_p tab, gimple stmt)
{
  tree var, def, basevar;
  int version;
  ssa_op_iter iter;
  bitmap def_vars, use_vars;

#ifdef ENABLE_CHECKING
  gcc_assert (is_replaceable_p (stmt));
#endif

  def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
  version = SSA_NAME_VERSION (def);
  basevar = SSA_NAME_VAR (def);
  def_vars = BITMAP_ALLOC (NULL);

  bitmap_set_bit (def_vars, DECL_UID (basevar));

  /* Add this expression to the dependency list for each use partition.  */
  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
    {
      int var_version = SSA_NAME_VERSION (var);

      use_vars = tab->expr_decl_uids[var_version];
      add_dependence (tab, version, var);
      if (use_vars)
        {
	  bitmap_ior_into (def_vars, use_vars);
	  BITMAP_FREE (tab->expr_decl_uids[var_version]);
	}
      else
	bitmap_set_bit (def_vars, DECL_UID (SSA_NAME_VAR (var)));
    }
示例#4
0
static void
finished_with_expr (temp_expr_table_p tab, int version, bool free_expr)
{
  unsigned i;
  bitmap_iterator bi;

  /* Remove this expression from its dependent lists.  The partition dependence
     list is retained and transfered later to whomever uses this version.  */
  if (tab->partition_dependencies[version])
    {
      EXECUTE_IF_SET_IN_BITMAP (tab->partition_dependencies[version], 0, i, bi)
	remove_from_partition_kill_list (tab, i, version);
      BITMAP_FREE (tab->partition_dependencies[version]);
    }
  if (free_expr)
    BITMAP_FREE (tab->expr_decl_uids[version]);
}
示例#5
0
static inline void 
remove_from_partition_kill_list (temp_expr_table_p tab, int p, int version)
{
#ifdef ENABLE_CHECKING
  gcc_assert (tab->kill_list[p]);
#endif
  bitmap_clear_bit (tab->kill_list[p], version);
  if (bitmap_empty_p (tab->kill_list[p]))
    {
      bitmap_clear_bit (tab->partition_in_use, p);
      BITMAP_FREE (tab->kill_list[p]);
    }
}
示例#6
0
static void
tree_ssa_forward_propagate_single_use_vars (void)
{
  basic_block bb;
  varray_type vars_worklist, cond_worklist;

  vars = BITMAP_ALLOC (NULL);
  VARRAY_TREE_INIT (vars_worklist, 10, "VARS worklist");
  VARRAY_TREE_INIT (cond_worklist, 10, "COND worklist");

  /* Prime the COND_EXPR worklist by placing all the COND_EXPRs on the
     worklist.  */
  FOR_EACH_BB (bb)
    {
      tree last = last_stmt (bb);
      if (last && TREE_CODE (last) == COND_EXPR)
	VARRAY_PUSH_TREE (cond_worklist, last);
    }

  while (VARRAY_ACTIVE_SIZE (cond_worklist) > 0)
    {
      /* First get a list of all the interesting COND_EXPRs and potential
	 single use variables which feed those COND_EXPRs.  This will drain
	 COND_WORKLIST and initialize VARS_WORKLIST.  */
      record_single_argument_cond_exprs (cond_worklist, &vars_worklist, vars);

      if (VARRAY_ACTIVE_SIZE (vars_worklist) > 0)
	{
	  /* Now compute immediate uses for all the variables we care about.  */
	  compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);

	  /* We've computed immediate uses, so we can/must clear the VARS
	     bitmap for the next iteration.  */
	  bitmap_clear (vars);

	  /* And optimize.  This will drain VARS_WORKLIST and initialize
	     COND_WORKLIST for the next iteration.  */
	  substitute_single_use_vars (&cond_worklist, vars_worklist);

	  /* We do not incrementally update the dataflow information
	     so we must free it here and recompute the necessary bits
	     on the next iteration.  If this turns out to be expensive,
	     methods for incrementally updating the dataflow are known.  */
	  free_df ();
	}
    }

  /* All done.  Clean up.  */
  BITMAP_FREE (vars);
}
示例#7
0
static basic_block
nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
{
  tree var = DEF_FROM_PTR (def_p);
  bitmap blocks = BITMAP_ALLOC (NULL);
  basic_block commondom;
  unsigned int j;
  bitmap_iterator bi;
  imm_use_iterator imm_iter;
  use_operand_p use_p;

  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
    {
      gimple *usestmt = USE_STMT (use_p);
      basic_block useblock;

      if (gphi *phi = dyn_cast <gphi *> (usestmt))
	{
	  int idx = PHI_ARG_INDEX_FROM_USE (use_p);

	  useblock = gimple_phi_arg_edge (phi, idx)->src;
	}
      else if (is_gimple_debug (usestmt))
	{
	  *debug_stmts = true;
	  continue;
	}
      else
	{
	  useblock = gimple_bb (usestmt);
	}

      /* Short circuit. Nothing dominates the entry block.  */
      if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
	{
	  BITMAP_FREE (blocks);
	  return NULL;
	}
      bitmap_set_bit (blocks, useblock->index);
    }
示例#8
0
void
lto_bitmap_free (bitmap b)
{
  BITMAP_FREE (b);
}
示例#9
0
unsigned HOST_WIDE_INT
compute_builtin_object_size (tree ptr, int object_size_type)
{
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);

  if (! offset_limit)
    init_offset_limit ();

  if (TREE_CODE (ptr) == ADDR_EXPR)
    return addr_object_size (ptr, object_size_type);
  else if (TREE_CODE (ptr) == CALL_EXPR)
    {
      tree arg = pass_through_call (ptr);

      if (arg)
	return compute_builtin_object_size (arg, object_size_type);
      else
	return alloc_object_size (ptr, object_size_type);
    }
  else if (TREE_CODE (ptr) == SSA_NAME
	   && POINTER_TYPE_P (TREE_TYPE (ptr))
	   && object_sizes[object_size_type] != NULL)
    {
      if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
	{
	  struct object_size_info osi;
	  bitmap_iterator bi;
	  unsigned int i;

	  if (dump_file)
	    {
	      fprintf (dump_file, "Computing %s %sobject size for ",
		       (object_size_type & 2) ? "minimum" : "maximum",
		       (object_size_type & 1) ? "sub" : "");
	      print_generic_expr (dump_file, ptr, dump_flags);
	      fprintf (dump_file, ":\n");
	    }

	  osi.visited = BITMAP_ALLOC (NULL);
	  osi.reexamine = BITMAP_ALLOC (NULL);
	  osi.object_size_type = object_size_type;
	  osi.depths = NULL;
	  osi.stack = NULL;
	  osi.tos = NULL;

	  /* First pass: walk UD chains, compute object sizes that
	     can be computed.  osi.reexamine bitmap at the end will
	     contain what variables were found in dependency cycles
	     and therefore need to be reexamined.  */
	  osi.pass = 0;
	  osi.changed = false;
	  collect_object_sizes_for (&osi, ptr);

	  /* Second pass: keep recomputing object sizes of variables
	     that need reexamination, until no object sizes are
	     increased or all object sizes are computed.  */
	  if (! bitmap_empty_p (osi.reexamine))
	    {
	      bitmap reexamine = BITMAP_ALLOC (NULL);

	      /* If looking for minimum instead of maximum object size,
		 detect cases where a pointer is increased in a loop.
		 Although even without this detection pass 2 would eventually
		 terminate, it could take a long time.  If a pointer is
		 increasing this way, we need to assume 0 object size.
		 E.g. p = &buf[0]; while (cond) p = p + 4;  */
	      if (object_size_type & 2)
		{
		  osi.depths = xcalloc (num_ssa_names, sizeof (unsigned int));
		  osi.stack = xmalloc (num_ssa_names * sizeof (unsigned int));
		  osi.tos = osi.stack;
		  osi.pass = 1;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      check_for_plus_in_loops (&osi, ssa_name (i));

		  free (osi.depths);
		  osi.depths = NULL;
		  free (osi.stack);
		  osi.stack = NULL;
		  osi.tos = NULL;
		}

	      do
		{
		  osi.pass = 2;
		  osi.changed = false;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      {
			collect_object_sizes_for (&osi, ssa_name (i));
			if (dump_file && (dump_flags & TDF_DETAILS))
			  {
			    fprintf (dump_file, "Reexamining ");
			    print_generic_expr (dump_file, ssa_name (i),
						dump_flags);
			    fprintf (dump_file, "\n");
			  }
		      }
		}
	      while (osi.changed);

	      BITMAP_FREE (reexamine);
	    }
	  EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
	    bitmap_set_bit (computed[object_size_type], i);

	  /* Debugging dumps.  */
	  if (dump_file)
	    {
	      EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
		if (object_sizes[object_size_type][i]
		    != unknown[object_size_type])
		  {
		    print_generic_expr (dump_file, ssa_name (i),
					dump_flags);
		    fprintf (dump_file,
			     ": %s %sobject size "
			     HOST_WIDE_INT_PRINT_UNSIGNED "\n",
			     (object_size_type & 2) ? "minimum" : "maximum",
			     (object_size_type & 1) ? "sub" : "",
			     object_sizes[object_size_type][i]);
		  }
	    }

	  BITMAP_FREE (osi.reexamine);
	  BITMAP_FREE (osi.visited);
	}
示例#10
0
/* Propagate the constant parameters found by ipcp_iterate_stage()
   to the function's code.  */
static void
ipcp_insert_stage (void)
{
  struct cgraph_node *node, *node1 = NULL;
  int i;
  VEC (cgraph_edge_p, heap) * redirect_callers;
  VEC (ipa_replace_map_p,gc)* replace_trees;
  int node_callers, count;
  tree parm_tree;
  struct ipa_replace_map *replace_param;
  fibheap_t heap;
  long overall_size = 0, new_size = 0;
  long max_new_size;

  ipa_check_create_node_params ();
  ipa_check_create_edge_args ();
  if (dump_file)
    fprintf (dump_file, "\nIPA insert stage:\n\n");

  dead_nodes = BITMAP_ALLOC (NULL);

  for (node = cgraph_nodes; node; node = node->next)
    if (node->analyzed)
      {
	if (node->count > max_count)
	  max_count = node->count;
	overall_size += node->local.inline_summary.self_size;
      }

  max_new_size = overall_size;
  if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
    max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
  max_new_size = max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;

  /* First collect all functions we proved to have constant arguments to
     heap.  */
  heap = fibheap_new ();
  for (node = cgraph_nodes; node; node = node->next)
    {
      struct ipa_node_params *info;
      /* Propagation of the constant is forbidden in certain conditions.  */
      if (!node->analyzed || !ipcp_node_modifiable_p (node))
	  continue;
      info = IPA_NODE_REF (node);
      if (ipa_is_called_with_var_arguments (info))
	continue;
      if (ipcp_const_param_count (node))
	node->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (node),
				    node);
     }

  /* Now clone in priority order until code size growth limits are met or
     heap is emptied.  */
  while (!fibheap_empty (heap))
    {
      struct ipa_node_params *info;
      int growth = 0;
      bitmap args_to_skip;
      struct cgraph_edge *cs;

      node = (struct cgraph_node *)fibheap_extract_min (heap);
      node->aux = NULL;
      if (dump_file)
	fprintf (dump_file, "considering function %s\n",
		 cgraph_node_name (node));

      growth = ipcp_estimate_growth (node);

      if (new_size + growth > max_new_size)
	break;
      if (growth
	  && optimize_function_for_size_p (DECL_STRUCT_FUNCTION (node->decl)))
	{
	  if (dump_file)
	    fprintf (dump_file, "Not versioning, cold code would grow");
	  continue;
	}

      info = IPA_NODE_REF (node);
      count = ipa_get_param_count (info);

      replace_trees = VEC_alloc (ipa_replace_map_p, gc, 1);

      if (node->local.can_change_signature)
	args_to_skip = BITMAP_GGC_ALLOC ();
      else
	args_to_skip = NULL;
      for (i = 0; i < count; i++)
	{
	  struct ipcp_lattice *lat = ipcp_get_lattice (info, i);
	  parm_tree = ipa_get_param (info, i);

	  /* We can proactively remove obviously unused arguments.  */
	  if (!ipa_is_param_used (info, i))
	    {
	      if (args_to_skip)
	        bitmap_set_bit (args_to_skip, i);
	      continue;
	    }

	  if (lat->type == IPA_CONST_VALUE)
	    {
	      replace_param =
		ipcp_create_replace_map (parm_tree, lat);
	      if (replace_param == NULL)
		break;
	      VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_param);
	      if (args_to_skip)
	        bitmap_set_bit (args_to_skip, i);
	    }
	}
      if (i < count)
	{
	  if (dump_file)
	    fprintf (dump_file, "Not versioning, some parameters couldn't be replaced");
	  continue;
	}

      new_size += growth;

      /* Look if original function becomes dead after cloning.  */
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	if (cs->caller == node || ipcp_need_redirect_p (cs))
	  break;
      if (!cs && cgraph_will_be_removed_from_program_if_no_direct_calls (node))
	bitmap_set_bit (dead_nodes, node->uid);

      /* Compute how many callers node has.  */
      node_callers = 0;
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	node_callers++;
      redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	if (!cs->indirect_inlining_edge)
	  VEC_quick_push (cgraph_edge_p, redirect_callers, cs);

      /* Redirecting all the callers of the node to the
         new versioned node.  */
      node1 =
	cgraph_create_virtual_clone (node, redirect_callers, replace_trees,
				     args_to_skip, "constprop");
      args_to_skip = NULL;
      VEC_free (cgraph_edge_p, heap, redirect_callers);
      replace_trees = NULL;

      if (node1 == NULL)
	continue;
      ipcp_process_devirtualization_opportunities (node1);

      if (dump_file)
	fprintf (dump_file, "versioned function %s with growth %i, overall %i\n",
		 cgraph_node_name (node), (int)growth, (int)new_size);
      ipcp_init_cloned_node (node, node1);

      info = IPA_NODE_REF (node);
      for (i = 0; i < count; i++)
	{
	  struct ipcp_lattice *lat = ipcp_get_lattice (info, i);
	  if (lat->type == IPA_CONST_VALUE)
	    ipcp_discover_new_direct_edges (node1, i, lat->constant);
        }

      if (dump_file)
	dump_function_to_file (node1->decl, dump_file, dump_flags);

      for (cs = node->callees; cs; cs = cs->next_callee)
        if (cs->callee->aux)
	  {
	    fibheap_delete_node (heap, (fibnode_t) cs->callee->aux);
	    cs->callee->aux = fibheap_insert (heap,
	    				      ipcp_estimate_cloning_cost (cs->callee),
					      cs->callee);
	  }
    }

  while (!fibheap_empty (heap))
    {
      if (dump_file)
	fprintf (dump_file, "skipping function %s\n",
		 cgraph_node_name (node));
      node = (struct cgraph_node *) fibheap_extract_min (heap);
      node->aux = NULL;
    }
  fibheap_delete (heap);
  BITMAP_FREE (dead_nodes);
  ipcp_update_callgraph ();
  ipcp_update_profiling ();
}
示例#11
0
static void
propagate_bits (struct cgraph_node *x)
{
  ipa_reference_vars_info_t x_info = get_reference_vars_info_from_cgraph (x);
  ipa_reference_global_vars_info_t x_global = x_info->global;

  struct cgraph_edge *e;
  for (e = x->callees; e; e = e->next_callee) 
    {
      struct cgraph_node *y = e->callee;

      /* Only look at the master nodes and skip external nodes.  */
      y = cgraph_master_clone (y);
      if (y)
        {
          if (get_reference_vars_info_from_cgraph (y))
            {
              ipa_reference_vars_info_t y_info = get_reference_vars_info_from_cgraph (y);
              ipa_reference_global_vars_info_t y_global = y_info->global;
              
              if (x_global->statics_read
                  != all_module_statics)
                {
                  if (y_global->statics_read 
                      == all_module_statics)
                    {
                      BITMAP_FREE (x_global->statics_read);
                      x_global->statics_read 
                        = all_module_statics;
                    }
                  /* Skip bitmaps that are pointer equal to node's bitmap
                     (no reason to spin within the cycle).  */
                  else if (x_global->statics_read 
                           != y_global->statics_read)
                    bitmap_ior_into (x_global->statics_read,
                                     y_global->statics_read);
                }
              
              if (x_global->statics_written 
                  != all_module_statics)
                {
                  if (y_global->statics_written 
                      == all_module_statics)
                    {
                      BITMAP_FREE (x_global->statics_written);
                      x_global->statics_written 
                        = all_module_statics;
                    }
                  /* Skip bitmaps that are pointer equal to node's bitmap
                     (no reason to spin within the cycle).  */
                  else if (x_global->statics_written 
                           != y_global->statics_written)
                    bitmap_ior_into (x_global->statics_written,
                                     y_global->statics_written);
                }
            }
          else 
            {
              gcc_unreachable ();
            }
        }
    }
}
示例#12
0
bool
compute_builtin_object_size (tree ptr, int object_size_type,
			     unsigned HOST_WIDE_INT *psize)
{
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);

  /* Set to unknown and overwrite just before returning if the size
     could be determined.  */
  *psize = unknown[object_size_type];

  if (! offset_limit)
    init_offset_limit ();

  if (TREE_CODE (ptr) == ADDR_EXPR)
    return addr_object_size (NULL, ptr, object_size_type, psize);

  if (TREE_CODE (ptr) != SSA_NAME
      || !POINTER_TYPE_P (TREE_TYPE (ptr)))
      return false;

  if (computed[object_size_type] == NULL)
    {
      if (optimize || object_size_type & 1)
	return false;

      /* When not optimizing, rather than failing, make a small effort
	 to determine the object size without the full benefit of
	 the (costly) computation below.  */
      gimple *def = SSA_NAME_DEF_STMT (ptr);
      if (gimple_code (def) == GIMPLE_ASSIGN)
	{
	  tree_code code = gimple_assign_rhs_code (def);
	  if (code == POINTER_PLUS_EXPR)
	    {
	      tree offset = gimple_assign_rhs2 (def);
	      ptr = gimple_assign_rhs1 (def);

	      if (tree_fits_shwi_p (offset)
		  && compute_builtin_object_size (ptr, object_size_type, psize))
		{
		  /* Return zero when the offset is out of bounds.  */
		  unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
		  *psize = off < *psize ? *psize - off : 0;
		  return true;
		}
	    }
	}
      return false;
    }

  if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
    {
      struct object_size_info osi;
      bitmap_iterator bi;
      unsigned int i;

      if (num_ssa_names > object_sizes[object_size_type].length ())
	object_sizes[object_size_type].safe_grow (num_ssa_names);
      if (dump_file)
	{
	  fprintf (dump_file, "Computing %s %sobject size for ",
		   (object_size_type & 2) ? "minimum" : "maximum",
		   (object_size_type & 1) ? "sub" : "");
	  print_generic_expr (dump_file, ptr, dump_flags);
	  fprintf (dump_file, ":\n");
	}

      osi.visited = BITMAP_ALLOC (NULL);
      osi.reexamine = BITMAP_ALLOC (NULL);
      osi.object_size_type = object_size_type;
      osi.depths = NULL;
      osi.stack = NULL;
      osi.tos = NULL;

      /* First pass: walk UD chains, compute object sizes that
	 can be computed.  osi.reexamine bitmap at the end will
	 contain what variables were found in dependency cycles
	 and therefore need to be reexamined.  */
      osi.pass = 0;
      osi.changed = false;
      collect_object_sizes_for (&osi, ptr);

      /* Second pass: keep recomputing object sizes of variables
	 that need reexamination, until no object sizes are
	 increased or all object sizes are computed.  */
      if (! bitmap_empty_p (osi.reexamine))
	{
	  bitmap reexamine = BITMAP_ALLOC (NULL);

	  /* If looking for minimum instead of maximum object size,
	     detect cases where a pointer is increased in a loop.
	     Although even without this detection pass 2 would eventually
	     terminate, it could take a long time.  If a pointer is
	     increasing this way, we need to assume 0 object size.
	     E.g. p = &buf[0]; while (cond) p = p + 4;  */
	  if (object_size_type & 2)
	    {
	      osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
	      osi.stack = XNEWVEC (unsigned int, num_ssa_names);
	      osi.tos = osi.stack;
	      osi.pass = 1;
	      /* collect_object_sizes_for is changing
		 osi.reexamine bitmap, so iterate over a copy.  */
	      bitmap_copy (reexamine, osi.reexamine);
	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		if (bitmap_bit_p (osi.reexamine, i))
		  check_for_plus_in_loops (&osi, ssa_name (i));

	      free (osi.depths);
	      osi.depths = NULL;
	      free (osi.stack);
	      osi.stack = NULL;
	      osi.tos = NULL;
	    }

	  do
	    {
	      osi.pass = 2;
	      osi.changed = false;
	      /* collect_object_sizes_for is changing
		 osi.reexamine bitmap, so iterate over a copy.  */
	      bitmap_copy (reexamine, osi.reexamine);
	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		if (bitmap_bit_p (osi.reexamine, i))
		  {
		    collect_object_sizes_for (&osi, ssa_name (i));
		    if (dump_file && (dump_flags & TDF_DETAILS))
		      {
			fprintf (dump_file, "Reexamining ");
			print_generic_expr (dump_file, ssa_name (i),
					    dump_flags);
			fprintf (dump_file, "\n");
		      }
		  }
	    }
	  while (osi.changed);

	  BITMAP_FREE (reexamine);
	}