Beispiel #1
0
static void
analyze_function (struct cgraph_node *fn)
{
  ipa_reference_vars_info_t info 
    = xcalloc (1, sizeof (struct ipa_reference_vars_info_d));
  ipa_reference_local_vars_info_t l
    = xcalloc (1, sizeof (struct ipa_reference_local_vars_info_d));
  tree decl = fn->decl;

  /* Add the info to the tree's annotation.  */
  get_function_ann (fn->decl)->reference_vars_info = info;

  info->local = l;
  l->statics_read = BITMAP_ALLOC (&ipa_obstack);
  l->statics_written = BITMAP_ALLOC (&ipa_obstack);

  if (dump_file)
    fprintf (dump_file, "\n local analysis of %s\n", cgraph_node_name (fn));
  
  {
    struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
    basic_block this_block;

    FOR_EACH_BB_FN (this_block, this_cfun)
      {
        block_stmt_iterator bsi;
        for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
          walk_tree (bsi_stmt_ptr (bsi), scan_for_static_refs, 
                     fn, visited_nodes);
      }
  }
Beispiel #2
0
static void
build_single_def_use_links (void)
{
  /* We use the multiple definitions problem to compute our restricted
     use-def chains.  */
  df_set_flags (DF_EQ_NOTES);
  df_md_add_problem ();
  df_note_add_problem ();
  df_analyze ();
  df_maybe_reorganize_use_refs (DF_REF_ORDER_BY_INSN_WITH_NOTES);

  use_def_ref.create (DF_USES_TABLE_SIZE ());
  use_def_ref.safe_grow_cleared (DF_USES_TABLE_SIZE ());

  reg_defs.create (max_reg_num ());
  reg_defs.safe_grow_cleared (max_reg_num ());

  reg_defs_stack.create (n_basic_blocks_for_fn (cfun) * 10);
  local_md = BITMAP_ALLOC (NULL);
  local_lr = BITMAP_ALLOC (NULL);

  /* Walk the dominator tree looking for single reaching definitions
     dominating the uses.  This is similar to how SSA form is built.  */
  single_def_use_dom_walker (CDI_DOMINATORS)
    .walk (cfun->cfg->x_entry_block_ptr);

  BITMAP_FREE (local_lr);
  BITMAP_FREE (local_md);
  reg_defs.release ();
  reg_defs_stack.release ();
}
Beispiel #3
0
static temp_expr_table_p
new_temp_expr_table (var_map map)
{
  temp_expr_table_p t;
  unsigned x;

  t = XNEW (struct temp_expr_table_d);
  t->map = map;

  t->partition_dependencies = XCNEWVEC (bitmap, num_ssa_names + 1);
  t->expr_decl_uids = XCNEWVEC (bitmap, num_ssa_names + 1);
  t->kill_list = XCNEWVEC (bitmap, num_var_partitions (map) + 1);

  t->partition_in_use = BITMAP_ALLOC (NULL);

  t->virtual_partition = num_var_partitions (map);
  t->new_replaceable_dependencies = BITMAP_ALLOC (NULL);

  t->replaceable_expressions = NULL;
  t->num_in_part = XCNEWVEC (int, num_var_partitions (map));
  for (x = 1; x < num_ssa_names; x++)
    {
      int p;
      tree name = ssa_name (x);
      if (!name)
        continue;
      p = var_to_partition (map, name);
      if (p != NO_PARTITION)
        t->num_in_part[p]++;
    }

  return t;
}
static void
mark_nodes_having_upstream_mem_writes (struct graph *rdg)
{
  int v, x;
  bitmap seen = BITMAP_ALLOC (NULL);

  for (v = rdg->n_vertices - 1; v >= 0; v--)
    if (!bitmap_bit_p (seen, v))
      {
	unsigned i;
	VEC (int, heap) *nodes = VEC_alloc (int, heap, 3);

	graphds_dfs (rdg, &v, 1, &nodes, false, NULL);

	FOR_EACH_VEC_ELT (int, nodes, i, x)
	  {
	    if (!bitmap_set_bit (seen, x))
	      continue;

	    if (RDG_MEM_WRITE_STMT (rdg, x)
		|| predecessor_has_mem_write (rdg, &(rdg->vertices[x]))
		/* In anti dependences the read should occur before
		   the write, this is why both the read and the write
		   should be placed in the same partition.  */
		|| has_anti_dependence (&(rdg->vertices[x])))
	      {
		bitmap_set_bit (upstream_mem_writes, x);
	      }
	  }

	VEC_free (int, heap, nodes);
      }
Beispiel #5
0
static void
verify_flow_insensitive_alias_info (void)
{
  tree var;
  bitmap visited = BITMAP_ALLOC (NULL);
  referenced_var_iterator rvi;

  FOR_EACH_REFERENCED_VAR (var, rvi)
    {
      size_t j;
      var_ann_t ann;
      VEC(tree,gc) *may_aliases;
      tree alias;

      ann = var_ann (var);
      may_aliases = ann->may_aliases;

      for (j = 0; VEC_iterate (tree, may_aliases, j, alias); j++)
	{
	  bitmap_set_bit (visited, DECL_UID (alias));

	  if (!may_be_aliased (alias))
	    {
	      error ("non-addressable variable inside an alias set");
	      debug_variable (alias);
	      goto err;
	    }
	}
    }
Beispiel #6
0
static void 
process_replaceable (temp_expr_table_p tab, gimple stmt)
{
  tree var, def, basevar;
  int version;
  ssa_op_iter iter;
  bitmap def_vars, use_vars;

#ifdef ENABLE_CHECKING
  gcc_assert (is_replaceable_p (stmt));
#endif

  def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
  version = SSA_NAME_VERSION (def);
  basevar = SSA_NAME_VAR (def);
  def_vars = BITMAP_ALLOC (NULL);

  bitmap_set_bit (def_vars, DECL_UID (basevar));

  /* Add this expression to the dependency list for each use partition.  */
  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
    {
      int var_version = SSA_NAME_VERSION (var);

      use_vars = tab->expr_decl_uids[var_version];
      add_dependence (tab, version, var);
      if (use_vars)
        {
	  bitmap_ior_into (def_vars, use_vars);
	  BITMAP_FREE (tab->expr_decl_uids[var_version]);
	}
      else
	bitmap_set_bit (def_vars, DECL_UID (SSA_NAME_VAR (var)));
    }
Beispiel #7
0
static inline void
make_dependent_on_partition (temp_expr_table_p tab, int version, int p)
{
  if (!tab->partition_dependencies[version])
    tab->partition_dependencies[version] = BITMAP_ALLOC (NULL);

  bitmap_set_bit (tab->partition_dependencies[version], p);
}
Beispiel #8
0
bitmap
lto_bitmap_alloc (void)
{
  if (!lto_obstack_initialized)
    {
      bitmap_obstack_initialize (&lto_obstack);
      lto_obstack_initialized = true;
    }
  return BITMAP_ALLOC (&lto_obstack);
}
Beispiel #9
0
static inline void
add_to_partition_kill_list (temp_expr_table_p tab, int p, int ver)
{
  if (!tab->kill_list[p])
    {
      tab->kill_list[p] = BITMAP_ALLOC (NULL);
      bitmap_set_bit (tab->partition_in_use, p);
    }
  bitmap_set_bit (tab->kill_list[p], ver);
}
static void
tree_ssa_forward_propagate_single_use_vars (void)
{
  basic_block bb;
  varray_type vars_worklist, cond_worklist;

  vars = BITMAP_ALLOC (NULL);
  VARRAY_TREE_INIT (vars_worklist, 10, "VARS worklist");
  VARRAY_TREE_INIT (cond_worklist, 10, "COND worklist");

  /* Prime the COND_EXPR worklist by placing all the COND_EXPRs on the
     worklist.  */
  FOR_EACH_BB (bb)
    {
      tree last = last_stmt (bb);
      if (last && TREE_CODE (last) == COND_EXPR)
	VARRAY_PUSH_TREE (cond_worklist, last);
    }

  while (VARRAY_ACTIVE_SIZE (cond_worklist) > 0)
    {
      /* First get a list of all the interesting COND_EXPRs and potential
	 single use variables which feed those COND_EXPRs.  This will drain
	 COND_WORKLIST and initialize VARS_WORKLIST.  */
      record_single_argument_cond_exprs (cond_worklist, &vars_worklist, vars);

      if (VARRAY_ACTIVE_SIZE (vars_worklist) > 0)
	{
	  /* Now compute immediate uses for all the variables we care about.  */
	  compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);

	  /* We've computed immediate uses, so we can/must clear the VARS
	     bitmap for the next iteration.  */
	  bitmap_clear (vars);

	  /* And optimize.  This will drain VARS_WORKLIST and initialize
	     COND_WORKLIST for the next iteration.  */
	  substitute_single_use_vars (&cond_worklist, vars_worklist);

	  /* We do not incrementally update the dataflow information
	     so we must free it here and recompute the necessary bits
	     on the next iteration.  If this turns out to be expensive,
	     methods for incrementally updating the dataflow are known.  */
	  free_df ();
	}
    }

  /* All done.  Clean up.  */
  BITMAP_FREE (vars);
}
Beispiel #11
0
/* The init routine for analyzing global static variable usage.  See
   comments at top for description.  */
static void 
ipa_init (void) 
{
  struct cgraph_node *node;
  memory_identifier_string = build_string(7, "memory");

  reference_vars_to_consider =
    splay_tree_new_ggc (splay_tree_compare_ints);

  bitmap_obstack_initialize (&ipa_obstack);
  module_statics_escape = BITMAP_ALLOC (&ipa_obstack);
  module_statics_written = BITMAP_ALLOC (&ipa_obstack);
  all_module_statics = BITMAP_ALLOC (&ipa_obstack);

  /* This will add NODE->DECL to the splay trees.  */
  for (node = cgraph_nodes; node; node = node->next)
    has_proper_scope_for_analysis (node->decl);

  /* There are some shared nodes, in particular the initializers on
     static declarations.  We do not need to scan them more than once
     since all we would be interested in are the addressof
     operations.  */
  visited_nodes = pointer_set_create ();
}
Beispiel #12
0
/* Fix the callsites and the call graph after function cloning was done.  */
static void
ipcp_update_callgraph (void)
{
  struct cgraph_node *node;

  for (node = cgraph_nodes; node; node = node->next)
    if (node->analyzed && ipcp_node_is_clone (node))
      {
	bitmap args_to_skip = NULL;
	struct cgraph_node *orig_node = ipcp_get_orig_node (node);
        struct ipa_node_params *info = IPA_NODE_REF (orig_node);
        int i, count = ipa_get_param_count (info);
        struct cgraph_edge *cs, *next;

	if (node->local.can_change_signature)
	  {
	    args_to_skip = BITMAP_ALLOC (NULL);
	    for (i = 0; i < count; i++)
	      {
		struct ipcp_lattice *lat = ipcp_get_lattice (info, i);

		/* We can proactively remove obviously unused arguments.  */
		if (!ipa_is_param_used (info, i))
		  {
		    bitmap_set_bit (args_to_skip, i);
		    continue;
		  }

		if (lat->type == IPA_CONST_VALUE)
		  bitmap_set_bit (args_to_skip, i);
	      }
	  }
	for (cs = node->callers; cs; cs = next)
	  {
	    next = cs->next_caller;
	    if (!ipcp_node_is_clone (cs->caller) && ipcp_need_redirect_p (cs))
	      {
		if (dump_file)
		  fprintf (dump_file, "Redirecting edge %s/%i -> %s/%i "
			   "back to %s/%i.",
			   cgraph_node_name (cs->caller), cs->caller->uid,
			   cgraph_node_name (cs->callee), cs->callee->uid,
			   cgraph_node_name (orig_node), orig_node->uid);
		cgraph_redirect_edge_callee (cs, orig_node);
	      }
	  }
      }
}
Beispiel #13
0
static void
add_dependence (temp_expr_table_p tab, int version, tree var)
{
  int i;
  bitmap_iterator bi;
  unsigned x;

  i = SSA_NAME_VERSION (var);
  if (version_to_be_replaced_p (tab, i))
    {
      if (!bitmap_empty_p (tab->new_replaceable_dependencies))
        {
	  /* Version will now be killed by a write to any partition the 
	     substituted expression would have been killed by.  */
	  EXECUTE_IF_SET_IN_BITMAP (tab->new_replaceable_dependencies, 0, x, bi)
	    add_to_partition_kill_list (tab, x, version);

	  /* Rather than set partition_dependencies and in_use lists bit by 
	     bit, simply OR in the new_replaceable_dependencies bits.  */
	  if (!tab->partition_dependencies[version])
	    tab->partition_dependencies[version] = BITMAP_ALLOC (NULL);
	  bitmap_ior_into (tab->partition_dependencies[version], 
			   tab->new_replaceable_dependencies);
	  bitmap_ior_into (tab->partition_in_use, 
			   tab->new_replaceable_dependencies);
	  /* It is only necessary to add these once.  */
	  bitmap_clear (tab->new_replaceable_dependencies);
	}
    }
  else
    {
      i = var_to_partition (tab->map, var);
#ifdef ENABLE_CHECKING
      gcc_assert (i != NO_PARTITION);
      gcc_assert (tab->num_in_part[i] != 0);
#endif
      /* Only dependencies on ssa_names which are coalesced with something need
         to be tracked.  Partitions with containing only a single SSA_NAME
	 *cannot* have their value changed.  */
      if (tab->num_in_part[i] > 1)
        {
	  add_to_partition_kill_list (tab, i, version);
	  make_dependent_on_partition (tab, version, i);
	}
    }
}
Beispiel #14
0
DEBUG_FUNCTION void
verify_ssaname_freelists (struct function *fun)
{
  if (!gimple_in_ssa_p (fun))
    return;

  bitmap names_in_il = BITMAP_ALLOC (NULL);

  /* Walk the entire IL noting every SSA_NAME we see.  */
  basic_block bb;
  FOR_EACH_BB_FN (bb, fun)
    {
      tree t;
      /* First note the result and arguments of PHI nodes.  */
      for (gphi_iterator gsi = gsi_start_phis (bb);
	   !gsi_end_p (gsi);
	   gsi_next (&gsi))
	{
	  gphi *phi = gsi.phi ();
	  t = gimple_phi_result (phi);
	  bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));

	  for (unsigned int i = 0; i < gimple_phi_num_args (phi); i++)
	    {
	      t = gimple_phi_arg_def (phi, i);
	      if (TREE_CODE (t) == SSA_NAME)
		bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));
	    }
	}

      /* Then note the operands of each statement.  */
      for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
	   !gsi_end_p (gsi);
	   gsi_next (&gsi))
	{
	  ssa_op_iter iter;
	  gimple *stmt = gsi_stmt (gsi);
	  FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
	    bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));
	}
    }
Beispiel #15
0
static basic_block
nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
{
  tree var = DEF_FROM_PTR (def_p);
  bitmap blocks = BITMAP_ALLOC (NULL);
  basic_block commondom;
  unsigned int j;
  bitmap_iterator bi;
  imm_use_iterator imm_iter;
  use_operand_p use_p;

  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
    {
      gimple *usestmt = USE_STMT (use_p);
      basic_block useblock;

      if (gphi *phi = dyn_cast <gphi *> (usestmt))
	{
	  int idx = PHI_ARG_INDEX_FROM_USE (use_p);

	  useblock = gimple_phi_arg_edge (phi, idx)->src;
	}
      else if (is_gimple_debug (usestmt))
	{
	  *debug_stmts = true;
	  continue;
	}
      else
	{
	  useblock = gimple_bb (usestmt);
	}

      /* Short circuit. Nothing dominates the entry block.  */
      if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
	{
	  BITMAP_FREE (blocks);
	  return NULL;
	}
      bitmap_set_bit (blocks, useblock->index);
    }
Beispiel #16
0
void
find_comparison_dom_walker::before_dom_children (basic_block bb)
{
  struct comparison *last_cmp;
  rtx_insn *insn, *next, *last_clobber;
  bool last_cmp_valid;
  bool need_purge = false;
  bitmap killed;

  killed = BITMAP_ALLOC (NULL);

  /* The last comparison that was made.  Will be reset to NULL
     once the flags are clobbered.  */
  last_cmp = NULL;

  /* True iff the last comparison has not been clobbered, nor
     have its inputs.  Used to eliminate duplicate compares.  */
  last_cmp_valid = false;

  /* The last insn that clobbered the flags, if that insn is of
     a form that may be valid for eliminating a following compare.
     To be reset to NULL once the flags are set otherwise.  */
  last_clobber = NULL;

  /* Propagate the last live comparison throughout the extended basic block. */
  if (single_pred_p (bb))
    {
      last_cmp = (struct comparison *) single_pred (bb)->aux;
      if (last_cmp)
	last_cmp_valid = last_cmp->inputs_valid;
    }

  for (insn = BB_HEAD (bb); insn; insn = next)
    {
      rtx src;

      next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
      if (!NONDEBUG_INSN_P (insn))
	continue;

      /* Compute the set of registers modified by this instruction.  */
      bitmap_clear (killed);
      df_simulate_find_defs (insn, killed);

      src = conforming_compare (insn);
      if (src)
	{
	  rtx eh_note = NULL;

	  if (cfun->can_throw_non_call_exceptions)
	    eh_note = find_reg_note (insn, REG_EH_REGION, NULL);

	  if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
	    {
	      if (eh_note)
		need_purge = true;
	      delete_insn (insn);
	      continue;
	    }

	  last_cmp = XCNEW (struct comparison);
	  last_cmp->insn = insn;
	  last_cmp->prev_clobber = last_clobber;
	  last_cmp->in_a = XEXP (src, 0);
	  last_cmp->in_b = XEXP (src, 1);
	  last_cmp->eh_note = eh_note;
	  last_cmp->orig_mode = GET_MODE (src);
	  all_compares.safe_push (last_cmp);

	  /* It's unusual, but be prepared for comparison patterns that
	     also clobber an input, or perhaps a scratch.  */
	  last_clobber = NULL;
	  last_cmp_valid = true;
	}

      /* Notice if this instruction kills the flags register.  */
      else if (bitmap_bit_p (killed, targetm.flags_regnum))
	{
	  /* See if this insn could be the "clobber" that eliminates
	     a future comparison.   */
	  last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);

	  /* In either case, the previous compare is no longer valid.  */
	  last_cmp = NULL;
	  last_cmp_valid = false;
	}

      /* Notice if this instruction uses the flags register.  */
      else if (last_cmp)
	find_flags_uses_in_insn (last_cmp, insn);

      /* Notice if any of the inputs to the comparison have changed.  */
      if (last_cmp_valid
	  && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
	      || (REG_P (last_cmp->in_b)
		  && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
	last_cmp_valid = false;
    }
unsigned HOST_WIDE_INT
compute_builtin_object_size (tree ptr, int object_size_type)
{
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);

  if (! offset_limit)
    init_offset_limit ();

  if (TREE_CODE (ptr) == ADDR_EXPR)
    return addr_object_size (ptr, object_size_type);
  else if (TREE_CODE (ptr) == CALL_EXPR)
    {
      tree arg = pass_through_call (ptr);

      if (arg)
	return compute_builtin_object_size (arg, object_size_type);
      else
	return alloc_object_size (ptr, object_size_type);
    }
  else if (TREE_CODE (ptr) == SSA_NAME
	   && POINTER_TYPE_P (TREE_TYPE (ptr))
	   && object_sizes[object_size_type] != NULL)
    {
      if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
	{
	  struct object_size_info osi;
	  bitmap_iterator bi;
	  unsigned int i;

	  if (dump_file)
	    {
	      fprintf (dump_file, "Computing %s %sobject size for ",
		       (object_size_type & 2) ? "minimum" : "maximum",
		       (object_size_type & 1) ? "sub" : "");
	      print_generic_expr (dump_file, ptr, dump_flags);
	      fprintf (dump_file, ":\n");
	    }

	  osi.visited = BITMAP_ALLOC (NULL);
	  osi.reexamine = BITMAP_ALLOC (NULL);
	  osi.object_size_type = object_size_type;
	  osi.depths = NULL;
	  osi.stack = NULL;
	  osi.tos = NULL;

	  /* First pass: walk UD chains, compute object sizes that
	     can be computed.  osi.reexamine bitmap at the end will
	     contain what variables were found in dependency cycles
	     and therefore need to be reexamined.  */
	  osi.pass = 0;
	  osi.changed = false;
	  collect_object_sizes_for (&osi, ptr);

	  /* Second pass: keep recomputing object sizes of variables
	     that need reexamination, until no object sizes are
	     increased or all object sizes are computed.  */
	  if (! bitmap_empty_p (osi.reexamine))
	    {
	      bitmap reexamine = BITMAP_ALLOC (NULL);

	      /* If looking for minimum instead of maximum object size,
		 detect cases where a pointer is increased in a loop.
		 Although even without this detection pass 2 would eventually
		 terminate, it could take a long time.  If a pointer is
		 increasing this way, we need to assume 0 object size.
		 E.g. p = &buf[0]; while (cond) p = p + 4;  */
	      if (object_size_type & 2)
		{
		  osi.depths = xcalloc (num_ssa_names, sizeof (unsigned int));
		  osi.stack = xmalloc (num_ssa_names * sizeof (unsigned int));
		  osi.tos = osi.stack;
		  osi.pass = 1;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      check_for_plus_in_loops (&osi, ssa_name (i));

		  free (osi.depths);
		  osi.depths = NULL;
		  free (osi.stack);
		  osi.stack = NULL;
		  osi.tos = NULL;
		}

	      do
		{
		  osi.pass = 2;
		  osi.changed = false;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      {
			collect_object_sizes_for (&osi, ssa_name (i));
			if (dump_file && (dump_flags & TDF_DETAILS))
			  {
			    fprintf (dump_file, "Reexamining ");
			    print_generic_expr (dump_file, ssa_name (i),
						dump_flags);
			    fprintf (dump_file, "\n");
			  }
		      }
		}
	      while (osi.changed);

	      BITMAP_FREE (reexamine);
	    }
	  EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
	    bitmap_set_bit (computed[object_size_type], i);

	  /* Debugging dumps.  */
	  if (dump_file)
	    {
	      EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
		if (object_sizes[object_size_type][i]
		    != unknown[object_size_type])
		  {
		    print_generic_expr (dump_file, ssa_name (i),
					dump_flags);
		    fprintf (dump_file,
			     ": %s %sobject size "
			     HOST_WIDE_INT_PRINT_UNSIGNED "\n",
			     (object_size_type & 2) ? "minimum" : "maximum",
			     (object_size_type & 1) ? "sub" : "",
			     object_sizes[object_size_type][i]);
		  }
	    }

	  BITMAP_FREE (osi.reexamine);
	  BITMAP_FREE (osi.visited);
	}
Beispiel #18
0
void
find_comparison_dom_walker::before_dom_children (basic_block bb)
{
  struct comparison *last_cmp;
  rtx insn, next, last_clobber;
  bool last_cmp_valid;
  bool need_purge = false;
  bitmap killed;

  killed = BITMAP_ALLOC (NULL);

  /* The last comparison that was made.  Will be reset to NULL
     once the flags are clobbered.  */
  last_cmp = NULL;

  /* True iff the last comparison has not been clobbered, nor
     have its inputs.  Used to eliminate duplicate compares.  */
  last_cmp_valid = false;

  /* The last insn that clobbered the flags, if that insn is of
     a form that may be valid for eliminating a following compare.
     To be reset to NULL once the flags are set otherwise.  */
  last_clobber = NULL;

  /* Propagate the last live comparison throughout the extended basic block. */
  if (single_pred_p (bb))
    {
      last_cmp = (struct comparison *) single_pred (bb)->aux;
      if (last_cmp)
	last_cmp_valid = last_cmp->inputs_valid;
    }

  for (insn = BB_HEAD (bb); insn; insn = next)
    {
      rtx src;

      next = (insn == BB_END (bb) ? NULL_RTX : NEXT_INSN (insn));
      if (!NONDEBUG_INSN_P (insn))
	continue;

      /* Compute the set of registers modified by this instruction.  */
      bitmap_clear (killed);
      df_simulate_find_defs (insn, killed);

      src = conforming_compare (insn);
      if (src)
	{
	  enum machine_mode src_mode = GET_MODE (src);
	  rtx eh_note = NULL;

	  if (flag_non_call_exceptions)
	    eh_note = find_reg_note (insn, REG_EH_REGION, NULL);

	  if (!last_cmp_valid)
	    goto dont_delete;

	  /* Take care that it's in the same EH region.  */
	  if (flag_non_call_exceptions
	      && !rtx_equal_p (eh_note, last_cmp->eh_note))
	    goto dont_delete;

	  /* Make sure the compare is redundant with the previous.  */
	  if (!rtx_equal_p (last_cmp->in_a, XEXP (src, 0))
	      || !rtx_equal_p (last_cmp->in_b, XEXP (src, 1)))
	    goto dont_delete;

	  /* New mode must be compatible with the previous compare mode.  */
	  {
	    enum machine_mode new_mode
	      = targetm.cc_modes_compatible (last_cmp->orig_mode, src_mode);
	    if (new_mode == VOIDmode)
	      goto dont_delete;

	    if (new_mode != last_cmp->orig_mode)
	      {
		rtx x, flags = gen_rtx_REG (src_mode, targetm.flags_regnum);

		/* Generate new comparison for substitution.  */
		x = gen_rtx_COMPARE (new_mode, XEXP (src, 0), XEXP (src, 1));
		x = gen_rtx_SET (VOIDmode, flags, x);

		if (!validate_change (last_cmp->insn,
				      &PATTERN (last_cmp->insn), x, false))
		  goto dont_delete;

		last_cmp->orig_mode = new_mode;
	      }
	  }

	  /* All tests and substitutions succeeded!  */
	  if (eh_note)
	    need_purge = true;
	  delete_insn (insn);
	  continue;

	dont_delete:
	  last_cmp = XCNEW (struct comparison);
	  last_cmp->insn = insn;
	  last_cmp->prev_clobber = last_clobber;
	  last_cmp->in_a = XEXP (src, 0);
	  last_cmp->in_b = XEXP (src, 1);
	  last_cmp->eh_note = eh_note;
	  last_cmp->orig_mode = src_mode;
	  all_compares.safe_push (last_cmp);

	  /* It's unusual, but be prepared for comparison patterns that
	     also clobber an input, or perhaps a scratch.  */
	  last_clobber = NULL;
	  last_cmp_valid = true;
	}

      /* Notice if this instruction kills the flags register.  */
      else if (bitmap_bit_p (killed, targetm.flags_regnum))
	{
	  /* See if this insn could be the "clobber" that eliminates
	     a future comparison.   */
	  last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);

	  /* In either case, the previous compare is no longer valid.  */
	  last_cmp = NULL;
	  last_cmp_valid = false;
	  continue;
	}

      /* Notice if this instruction uses the flags register.  */
      else if (last_cmp)
	find_flags_uses_in_insn (last_cmp, insn);

      /* Notice if any of the inputs to the comparison have changed.  */
      if (last_cmp_valid
	  && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
	      || (REG_P (last_cmp->in_b)
		  && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
	last_cmp_valid = false;
    }
Beispiel #19
0
static void
initialize_uninitialized_regs (void)
{
  basic_block bb;
  bitmap already_genned = BITMAP_ALLOC (NULL);

  if (optimize == 1)
    {
      df_live_add_problem ();
      df_live_set_all_dirty ();
    }

  df_analyze ();

  FOR_EACH_BB_FN (bb, cfun)
    {
      rtx_insn *insn;
      bitmap lr = DF_LR_IN (bb);
      bitmap ur = DF_LIVE_IN (bb);
      bitmap_clear (already_genned);

      FOR_BB_INSNS (bb, insn)
	{
	  df_ref use;
	  if (!NONDEBUG_INSN_P (insn))
	    continue;

	  FOR_EACH_INSN_USE (use, insn)
	    {
	      unsigned int regno = DF_REF_REGNO (use);

	      /* Only do this for the pseudos.  */
	      if (regno < FIRST_PSEUDO_REGISTER)
		continue;

	      /* Do not generate multiple moves for the same regno.
		 This is common for sequences of subreg operations.
		 They would be deleted during combine but there is no
		 reason to churn the system.  */
	      if (bitmap_bit_p (already_genned, regno))
		continue;

	      /* A use is MUST uninitialized if it reaches the top of
		 the block from the inside of the block (the lr test)
		 and no def for it reaches the top of the block from
		 outside of the block (the ur test).  */
	      if (bitmap_bit_p (lr, regno)
		  && (!bitmap_bit_p (ur, regno)))
		{
		  rtx_insn *move_insn;
		  rtx reg = DF_REF_REAL_REG (use);

		  bitmap_set_bit (already_genned, regno);

		  start_sequence ();
		  emit_move_insn (reg, CONST0_RTX (GET_MODE (reg)));
		  move_insn = get_insns ();
		  end_sequence ();
		  emit_insn_before (move_insn, insn);
		  if (dump_file)
		    fprintf (dump_file,
			     "adding initialization in %s of reg %d at in block %d for insn %d.\n",
			     current_function_name (), regno, bb->index,
			     INSN_UID (insn));
		}
	    }
Beispiel #20
0
/* Propagate the constant parameters found by ipcp_iterate_stage()
   to the function's code.  */
static void
ipcp_insert_stage (void)
{
  struct cgraph_node *node, *node1 = NULL;
  int i;
  VEC (cgraph_edge_p, heap) * redirect_callers;
  VEC (ipa_replace_map_p,gc)* replace_trees;
  int node_callers, count;
  tree parm_tree;
  struct ipa_replace_map *replace_param;
  fibheap_t heap;
  long overall_size = 0, new_size = 0;
  long max_new_size;

  ipa_check_create_node_params ();
  ipa_check_create_edge_args ();
  if (dump_file)
    fprintf (dump_file, "\nIPA insert stage:\n\n");

  dead_nodes = BITMAP_ALLOC (NULL);

  for (node = cgraph_nodes; node; node = node->next)
    if (node->analyzed)
      {
	if (node->count > max_count)
	  max_count = node->count;
	overall_size += node->local.inline_summary.self_size;
      }

  max_new_size = overall_size;
  if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS))
    max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS);
  max_new_size = max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1;

  /* First collect all functions we proved to have constant arguments to
     heap.  */
  heap = fibheap_new ();
  for (node = cgraph_nodes; node; node = node->next)
    {
      struct ipa_node_params *info;
      /* Propagation of the constant is forbidden in certain conditions.  */
      if (!node->analyzed || !ipcp_node_modifiable_p (node))
	  continue;
      info = IPA_NODE_REF (node);
      if (ipa_is_called_with_var_arguments (info))
	continue;
      if (ipcp_const_param_count (node))
	node->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (node),
				    node);
     }

  /* Now clone in priority order until code size growth limits are met or
     heap is emptied.  */
  while (!fibheap_empty (heap))
    {
      struct ipa_node_params *info;
      int growth = 0;
      bitmap args_to_skip;
      struct cgraph_edge *cs;

      node = (struct cgraph_node *)fibheap_extract_min (heap);
      node->aux = NULL;
      if (dump_file)
	fprintf (dump_file, "considering function %s\n",
		 cgraph_node_name (node));

      growth = ipcp_estimate_growth (node);

      if (new_size + growth > max_new_size)
	break;
      if (growth
	  && optimize_function_for_size_p (DECL_STRUCT_FUNCTION (node->decl)))
	{
	  if (dump_file)
	    fprintf (dump_file, "Not versioning, cold code would grow");
	  continue;
	}

      info = IPA_NODE_REF (node);
      count = ipa_get_param_count (info);

      replace_trees = VEC_alloc (ipa_replace_map_p, gc, 1);

      if (node->local.can_change_signature)
	args_to_skip = BITMAP_GGC_ALLOC ();
      else
	args_to_skip = NULL;
      for (i = 0; i < count; i++)
	{
	  struct ipcp_lattice *lat = ipcp_get_lattice (info, i);
	  parm_tree = ipa_get_param (info, i);

	  /* We can proactively remove obviously unused arguments.  */
	  if (!ipa_is_param_used (info, i))
	    {
	      if (args_to_skip)
	        bitmap_set_bit (args_to_skip, i);
	      continue;
	    }

	  if (lat->type == IPA_CONST_VALUE)
	    {
	      replace_param =
		ipcp_create_replace_map (parm_tree, lat);
	      if (replace_param == NULL)
		break;
	      VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_param);
	      if (args_to_skip)
	        bitmap_set_bit (args_to_skip, i);
	    }
	}
      if (i < count)
	{
	  if (dump_file)
	    fprintf (dump_file, "Not versioning, some parameters couldn't be replaced");
	  continue;
	}

      new_size += growth;

      /* Look if original function becomes dead after cloning.  */
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	if (cs->caller == node || ipcp_need_redirect_p (cs))
	  break;
      if (!cs && cgraph_will_be_removed_from_program_if_no_direct_calls (node))
	bitmap_set_bit (dead_nodes, node->uid);

      /* Compute how many callers node has.  */
      node_callers = 0;
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	node_callers++;
      redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
      for (cs = node->callers; cs != NULL; cs = cs->next_caller)
	if (!cs->indirect_inlining_edge)
	  VEC_quick_push (cgraph_edge_p, redirect_callers, cs);

      /* Redirecting all the callers of the node to the
         new versioned node.  */
      node1 =
	cgraph_create_virtual_clone (node, redirect_callers, replace_trees,
				     args_to_skip, "constprop");
      args_to_skip = NULL;
      VEC_free (cgraph_edge_p, heap, redirect_callers);
      replace_trees = NULL;

      if (node1 == NULL)
	continue;
      ipcp_process_devirtualization_opportunities (node1);

      if (dump_file)
	fprintf (dump_file, "versioned function %s with growth %i, overall %i\n",
		 cgraph_node_name (node), (int)growth, (int)new_size);
      ipcp_init_cloned_node (node, node1);

      info = IPA_NODE_REF (node);
      for (i = 0; i < count; i++)
	{
	  struct ipcp_lattice *lat = ipcp_get_lattice (info, i);
	  if (lat->type == IPA_CONST_VALUE)
	    ipcp_discover_new_direct_edges (node1, i, lat->constant);
        }

      if (dump_file)
	dump_function_to_file (node1->decl, dump_file, dump_flags);

      for (cs = node->callees; cs; cs = cs->next_callee)
        if (cs->callee->aux)
	  {
	    fibheap_delete_node (heap, (fibnode_t) cs->callee->aux);
	    cs->callee->aux = fibheap_insert (heap,
	    				      ipcp_estimate_cloning_cost (cs->callee),
					      cs->callee);
	  }
    }

  while (!fibheap_empty (heap))
    {
      if (dump_file)
	fprintf (dump_file, "skipping function %s\n",
		 cgraph_node_name (node));
      node = (struct cgraph_node *) fibheap_extract_min (heap);
      node->aux = NULL;
    }
  fibheap_delete (heap);
  BITMAP_FREE (dead_nodes);
  ipcp_update_callgraph ();
  ipcp_update_profiling ();
}
   global variables within a function FN.  */

static void
analyze_function (struct cgraph_node *fn)
{
  ipa_reference_vars_info_t info 
    = XCNEW (struct ipa_reference_vars_info_d);
  ipa_reference_local_vars_info_t l
    = XCNEW (struct ipa_reference_local_vars_info_d);
  tree decl = fn->decl;

  /* Add the info to the tree's annotation.  */
  get_function_ann (fn->decl)->reference_vars_info = info;

  info->local = l;
  l->statics_read = BITMAP_ALLOC (&ipa_obstack);
  l->statics_written = BITMAP_ALLOC (&ipa_obstack);

  if (dump_file)
    fprintf (dump_file, "\n local analysis of %s\n", cgraph_node_name (fn));
  
  {
    struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
    basic_block this_block;

    FOR_EACH_BB_FN (this_block, this_cfun)
      {
	block_stmt_iterator bsi;
	tree phi, op;
	use_operand_p use;
	ssa_op_iter iter;
Beispiel #22
0
bool
compute_builtin_object_size (tree ptr, int object_size_type,
			     unsigned HOST_WIDE_INT *psize)
{
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);

  /* Set to unknown and overwrite just before returning if the size
     could be determined.  */
  *psize = unknown[object_size_type];

  if (! offset_limit)
    init_offset_limit ();

  if (TREE_CODE (ptr) == ADDR_EXPR)
    return addr_object_size (NULL, ptr, object_size_type, psize);

  if (TREE_CODE (ptr) != SSA_NAME
      || !POINTER_TYPE_P (TREE_TYPE (ptr)))
      return false;

  if (computed[object_size_type] == NULL)
    {
      if (optimize || object_size_type & 1)
	return false;

      /* When not optimizing, rather than failing, make a small effort
	 to determine the object size without the full benefit of
	 the (costly) computation below.  */
      gimple *def = SSA_NAME_DEF_STMT (ptr);
      if (gimple_code (def) == GIMPLE_ASSIGN)
	{
	  tree_code code = gimple_assign_rhs_code (def);
	  if (code == POINTER_PLUS_EXPR)
	    {
	      tree offset = gimple_assign_rhs2 (def);
	      ptr = gimple_assign_rhs1 (def);

	      if (tree_fits_shwi_p (offset)
		  && compute_builtin_object_size (ptr, object_size_type, psize))
		{
		  /* Return zero when the offset is out of bounds.  */
		  unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
		  *psize = off < *psize ? *psize - off : 0;
		  return true;
		}
	    }
	}
      return false;
    }

  if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
    {
      struct object_size_info osi;
      bitmap_iterator bi;
      unsigned int i;

      if (num_ssa_names > object_sizes[object_size_type].length ())
	object_sizes[object_size_type].safe_grow (num_ssa_names);
      if (dump_file)
	{
	  fprintf (dump_file, "Computing %s %sobject size for ",
		   (object_size_type & 2) ? "minimum" : "maximum",
		   (object_size_type & 1) ? "sub" : "");
	  print_generic_expr (dump_file, ptr, dump_flags);
	  fprintf (dump_file, ":\n");
	}

      osi.visited = BITMAP_ALLOC (NULL);
      osi.reexamine = BITMAP_ALLOC (NULL);
      osi.object_size_type = object_size_type;
      osi.depths = NULL;
      osi.stack = NULL;
      osi.tos = NULL;

      /* First pass: walk UD chains, compute object sizes that
	 can be computed.  osi.reexamine bitmap at the end will
	 contain what variables were found in dependency cycles
	 and therefore need to be reexamined.  */
      osi.pass = 0;
      osi.changed = false;
      collect_object_sizes_for (&osi, ptr);

      /* Second pass: keep recomputing object sizes of variables
	 that need reexamination, until no object sizes are
	 increased or all object sizes are computed.  */
      if (! bitmap_empty_p (osi.reexamine))
	{
	  bitmap reexamine = BITMAP_ALLOC (NULL);

	  /* If looking for minimum instead of maximum object size,
	     detect cases where a pointer is increased in a loop.
	     Although even without this detection pass 2 would eventually
	     terminate, it could take a long time.  If a pointer is
	     increasing this way, we need to assume 0 object size.
	     E.g. p = &buf[0]; while (cond) p = p + 4;  */
	  if (object_size_type & 2)
	    {
	      osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
	      osi.stack = XNEWVEC (unsigned int, num_ssa_names);
	      osi.tos = osi.stack;
	      osi.pass = 1;
	      /* collect_object_sizes_for is changing
		 osi.reexamine bitmap, so iterate over a copy.  */
	      bitmap_copy (reexamine, osi.reexamine);
	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		if (bitmap_bit_p (osi.reexamine, i))
		  check_for_plus_in_loops (&osi, ssa_name (i));

	      free (osi.depths);
	      osi.depths = NULL;
	      free (osi.stack);
	      osi.stack = NULL;
	      osi.tos = NULL;
	    }

	  do
	    {
	      osi.pass = 2;
	      osi.changed = false;
	      /* collect_object_sizes_for is changing
		 osi.reexamine bitmap, so iterate over a copy.  */
	      bitmap_copy (reexamine, osi.reexamine);
	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		if (bitmap_bit_p (osi.reexamine, i))
		  {
		    collect_object_sizes_for (&osi, ssa_name (i));
		    if (dump_file && (dump_flags & TDF_DETAILS))
		      {
			fprintf (dump_file, "Reexamining ");
			print_generic_expr (dump_file, ssa_name (i),
					    dump_flags);
			fprintf (dump_file, "\n");
		      }
		  }
	    }
	  while (osi.changed);

	  BITMAP_FREE (reexamine);
	}
Beispiel #23
0
extern void debug_ter (FILE *, temp_expr_table *);


/* Create a new TER table for MAP.  */

static temp_expr_table *
new_temp_expr_table (var_map map)
{
  temp_expr_table *t = XNEW (struct temp_expr_table);
  t->map = map;

  t->partition_dependencies = XCNEWVEC (bitmap, num_ssa_names + 1);
  t->expr_decl_uids = XCNEWVEC (bitmap, num_ssa_names + 1);
  t->kill_list = XCNEWVEC (bitmap, num_var_partitions (map) + 1);

  t->partition_in_use = BITMAP_ALLOC (&ter_bitmap_obstack);

  t->virtual_partition = num_var_partitions (map);
  t->new_replaceable_dependencies = BITMAP_ALLOC (&ter_bitmap_obstack);

  t->replaceable_expressions = NULL;
  t->num_in_part = XCNEWVEC (int, num_var_partitions (map));

  unsigned x;
  tree name;

  FOR_EACH_SSA_NAME (x, name, cfun)
    {
      int p;
      p = var_to_partition (map, name);
      if (p != NO_PARTITION)
Beispiel #24
0
Datei: sese.c Projekt: zh9118/gcc
    sese_build_liveouts_bb (region, liveouts, bb);
  if (MAY_HAVE_DEBUG_STMTS)
    FOR_EACH_BB_FN (bb, cfun)
      sese_reset_debug_liveouts_bb (region, liveouts, bb);
}

/* Builds a new SESE region from edges ENTRY and EXIT.  */

sese
new_sese (edge entry, edge exit)
{
  sese region = XNEW (struct sese_s);

  SESE_ENTRY (region) = entry;
  SESE_EXIT (region) = exit;
  SESE_LOOPS (region) = BITMAP_ALLOC (NULL);
  SESE_LOOP_NEST (region).create (3);
  SESE_ADD_PARAMS (region) = true;
  SESE_PARAMS (region).create (3);
  region->parameter_rename_map = new parameter_rename_map_t;

  return region;
}

/* Deletes REGION.  */

void
free_sese (sese region)
{
  if (SESE_LOOPS (region))
    SESE_LOOPS (region) = BITMAP_ALLOC (NULL);
Beispiel #25
0
      /* Then note the operands of each statement.  */
      for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
	   !gsi_end_p (gsi);
	   gsi_next (&gsi))
	{
	  ssa_op_iter iter;
	  gimple *stmt = gsi_stmt (gsi);
	  FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
	    bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));
	}
    }

  /* Now walk the free list noting what we find there and verifying
     there are no duplicates.  */
  bitmap names_in_freelists = BITMAP_ALLOC (NULL);
  if (FREE_SSANAMES (fun))
    {
      for (unsigned int i = 0; i < FREE_SSANAMES (fun)->length (); i++)
	{
	  tree t = (*FREE_SSANAMES (fun))[i];

	  /* Verify that the name is marked as being in the free list.  */
	  gcc_assert (SSA_NAME_IN_FREE_LIST (t));

	  /* Verify the name has not already appeared in the free list and
	     note it in the list of names found in the free list.  */
	  gcc_assert (!bitmap_bit_p (names_in_freelists, SSA_NAME_VERSION (t)));
	  bitmap_set_bit (names_in_freelists, SSA_NAME_VERSION (t));
	}
    }