Beispiel #1
0
/* Return true if we should ignore the basic block for purposes of tracing.  */
static bool
ignore_bb_p (const_basic_block bb)
{
  if (bb->index < NUM_FIXED_BLOCKS)
    return true;
  if (optimize_bb_for_size_p (bb))
    return true;

  if (gimple *g = last_stmt (CONST_CAST_BB (bb)))
    {
      /* A transaction is a single entry multiple exit region.  It
	 must be duplicated in its entirety or not at all.  */
      if (gimple_code (g) == GIMPLE_TRANSACTION)
	return true;

      /* An IFN_UNIQUE call must be duplicated as part of its group,
	 or not at all.  */
      if (is_gimple_call (g)
	  && gimple_call_internal_p (g)
	  && gimple_call_internal_unique_p (g))
	return true;
    }

  return false;
}
Beispiel #2
0
static void
set_callers_may_not_allocate_frame (function *fn)
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  gimple *stmt;
  tree called_fn_tree;
  function *called_fn;

  if (fn->cfg == NULL)
    return;

  FOR_EACH_BB_FN (bb, fn)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  stmt = gsi_stmt (gsi);
	  if (is_gimple_call (stmt))
	    {
	      called_fn_tree = gimple_call_fndecl (stmt);
	      if (called_fn_tree != NULL)
	        {
	          called_fn = DECL_STRUCT_FUNCTION (called_fn_tree);
		  if (called_fn != NULL)
		    called_fn->machine->callers_may_not_allocate_frame = true;
	        }
            }
        }
    }
  return;
}
Beispiel #3
0
static void
update_call_edge_frequencies (gimple_seq_node first, basic_block bb)
{
  struct cgraph_node *cfun_node = NULL;
  int bb_freq = 0;
  gimple_seq_node n;

  for (n = first; n ; n = n->next)
    if (is_gimple_call (n))
      {
	struct cgraph_edge *e;

	/* These function calls are expensive enough that we want
	   to avoid calling them if we never see any calls.  */
	if (cfun_node == NULL)
	  {
	    cfun_node = cgraph_node::get (current_function_decl);
	    bb_freq = (compute_call_stmt_bb_frequency
		       (current_function_decl, bb));
	  }

	e = cfun_node->get_edge (n);
	if (e != NULL)
	  e->frequency = bb_freq;
      }
}
Beispiel #4
0
static unsigned int
execute_return_slot_opt (void)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    {
      gimple_stmt_iterator gsi;
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  bool slot_opt_p;

	  if (is_gimple_call (stmt)
	      && gimple_call_lhs (stmt)
	      && !gimple_call_return_slot_opt_p (stmt)
	      && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
				    gimple_call_fndecl (stmt)))
	    {
	      /* Check if the location being assigned to is
	         clobbered by the call.  */
	      slot_opt_p = dest_safe_for_nrv_p (stmt);
	      gimple_call_set_return_slot_opt (stmt, slot_opt_p);
	    }
	}
    }
Beispiel #5
0
static bool
bb_no_side_effects_p (basic_block bb)
{
  gimple_stmt_iterator gsi;

  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    {
      gimple *stmt = gsi_stmt (gsi);

      if (is_gimple_debug (stmt))
	continue;

      if (gimple_has_side_effects (stmt)
	  || gimple_uses_undefined_value_p (stmt)
	  || gimple_could_trap_p (stmt)
	  || gimple_vuse (stmt)
	  /* const calls don't match any of the above, yet they could
	     still have some side-effects - they could contain
	     gimple_could_trap_p statements, like floating point
	     exceptions or integer division by zero.  See PR70586.
	     FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
	     should handle this.  */
	  || is_gimple_call (stmt))
	return false;
    }

  return true;
}
Beispiel #6
0
static void
gen_shrink_wrap_conditions (gimple bi_call, vec<gimple> conds,
                            unsigned int *nconds)
{
  gimple call;
  tree fn;
  enum built_in_function fnc;

  gcc_assert (nconds && conds.exists ());
  gcc_assert (conds.length () == 0);
  gcc_assert (is_gimple_call (bi_call));

  call = bi_call;
  fn = gimple_call_fndecl (call);
  gcc_assert (fn && DECL_BUILT_IN (fn));
  fnc = DECL_FUNCTION_CODE (fn);
  *nconds = 0;

  if (fnc == BUILT_IN_POW)
    gen_conditions_for_pow (call, conds, nconds);
  else
    {
      tree arg;
      inp_domain domain = get_no_error_domain (fnc);
      *nconds = 0;
      arg = gimple_call_arg (bi_call, 0);
      gen_conditions_for_domain (arg, domain, conds, nconds);
    }

  return;
}
static unsigned int execute_stackleak_tree_instrument(void)
{
	basic_block bb, entry_bb;
	bool prologue_instrumented = false, is_leaf = true;

	entry_bb = ENTRY_BLOCK_PTR_FOR_FN(cfun)->next_bb;

	// 1. loop through BBs and GIMPLE statements
	FOR_EACH_BB_FN(bb, cfun) {
		gimple_stmt_iterator gsi;

		for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
			gimple stmt;

			stmt = gsi_stmt(gsi);

			if (is_gimple_call(stmt))
				is_leaf = false;

			// gimple match: align 8 built-in BUILT_IN_NORMAL:BUILT_IN_ALLOCA attributes <tree_list 0xb7576450>
			if (!is_alloca(stmt))
				continue;

			// 2. insert stack overflow check before each __builtin_alloca call
			stackleak_check_alloca(&gsi);

			// 3. insert track call after each __builtin_alloca call
			stackleak_add_instrumentation(&gsi);
			if (bb == entry_bb)
				prologue_instrumented = true;
		}
	}
Beispiel #8
0
static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple call, int object_size_type)
{
  tree callee, bytes = NULL_TREE;
  tree alloc_size;
  int arg1 = -1, arg2 = -1;

  gcc_assert (is_gimple_call (call));

  callee = gimple_call_fndecl (call);
  if (!callee)
    return unknown[object_size_type];

  alloc_size = lookup_attribute ("alloc_size",
				 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
  if (alloc_size && TREE_VALUE (alloc_size))
    {
      tree p = TREE_VALUE (alloc_size);

      arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
      if (TREE_CHAIN (p))
        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
    }

  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_CALLOC:
	arg2 = 1;
	/* fall through */
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
      case BUILT_IN_ALLOCA_WITH_ALIGN:
	arg1 = 0;
      default:
	break;
      }

  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
      || (arg2 >= 0
	  && (arg2 >= (int)gimple_call_num_args (call)
	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
    return unknown[object_size_type];

  if (arg2 >= 0)
    bytes = size_binop (MULT_EXPR,
	fold_convert (sizetype, gimple_call_arg (call, arg1)),
	fold_convert (sizetype, gimple_call_arg (call, arg2)));
  else if (arg1 >= 0)
    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));

  if (bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Beispiel #9
0
static bool
should_duplicate_loop_header_p (basic_block header, struct loop *loop,
				int *limit)
{
  gimple_stmt_iterator bsi;
  gimple last;

  /* Do not copy one block more than once (we do not really want to do
     loop peeling here).  */
  if (header->aux)
    return false;

  /* Loop header copying usually increases size of the code.  This used not to
     be true, since quite often it is possible to verify that the condition is
     satisfied in the first iteration and therefore to eliminate it.  Jump
     threading handles these cases now.  */
  if (optimize_loop_for_size_p (loop))
    return false;

  gcc_assert (EDGE_COUNT (header->succs) > 0);
  if (single_succ_p (header))
    return false;
  if (flow_bb_inside_loop_p (loop, EDGE_SUCC (header, 0)->dest)
      && flow_bb_inside_loop_p (loop, EDGE_SUCC (header, 1)->dest))
    return false;

  /* If this is not the original loop header, we want it to have just
     one predecessor in order to match the && pattern.  */
  if (header != loop->header && !single_pred_p (header))
    return false;

  last = last_stmt (header);
  if (gimple_code (last) != GIMPLE_COND)
    return false;

  /* Approximately copy the conditions that used to be used in jump.c --
     at most 20 insns and no calls.  */
  for (bsi = gsi_start_bb (header); !gsi_end_p (bsi); gsi_next (&bsi))
    {
      last = gsi_stmt (bsi);

      if (gimple_code (last) == GIMPLE_LABEL)
	continue;

      if (is_gimple_debug (last))
	continue;

      if (is_gimple_call (last))
	return false;

      *limit -= estimate_num_insns (last, &eni_size_weights);
      if (*limit < 0)
	return false;
    }

  return true;
}
Beispiel #10
0
static void
adjust_simduid_builtins (hash_table<simduid_to_vf> *htab)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    {
      gimple_stmt_iterator i;

      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
	{
	  unsigned int vf = 1;
	  enum internal_fn ifn;
	  gimple stmt = gsi_stmt (i);
	  tree t;
	  if (!is_gimple_call (stmt)
	      || !gimple_call_internal_p (stmt))
	    continue;
	  ifn = gimple_call_internal_fn (stmt);
	  switch (ifn)
	    {
	    case IFN_GOMP_SIMD_LANE:
	    case IFN_GOMP_SIMD_VF:
	    case IFN_GOMP_SIMD_LAST_LANE:
	      break;
	    default:
	      continue;
	    }
	  tree arg = gimple_call_arg (stmt, 0);
	  gcc_assert (arg != NULL_TREE);
	  gcc_assert (TREE_CODE (arg) == SSA_NAME);
	  simduid_to_vf *p = NULL, data;
	  data.simduid = DECL_UID (SSA_NAME_VAR (arg));
	  if (htab)
	    {
	      p = htab->find (&data);
	      if (p)
		vf = p->vf;
	    }
	  switch (ifn)
	    {
	    case IFN_GOMP_SIMD_VF:
	      t = build_int_cst (unsigned_type_node, vf);
	      break;
	    case IFN_GOMP_SIMD_LANE:
	      t = build_int_cst (unsigned_type_node, 0);
	      break;
	    case IFN_GOMP_SIMD_LAST_LANE:
	      t = gimple_call_arg (stmt, 1);
	      break;
	    default:
	      gcc_unreachable ();
	    }
	  update_call_from_tree (&i, t);
	}
    }
Beispiel #11
0
static unsigned int slimer_exec(void)
{
    basic_block bb;
    gimple stmt;
    gimple_stmt_iterator gsi;

    if (has_been_processed(cfun->decl))
      return 0;

    if (DECL_EXTERNAL(cfun->decl))
      return 0;

    if (get_identifier(get_name(cfun->decl)) == get_identifier("main"))
      insert_slimer_init();

    /* Go through the basic blocks of this function */
    FOR_EACH_BB(bb)
      for (gsi=gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi))
      {
          stmt = gsi_stmt(gsi);
          if (is_gimple_call(stmt) || is_gimple_assign(stmt))
          {
              /* If its a call to a function we added already (junk or some
               * initlization functions), or a function we have previously
               * analyized, avoid inserting junk data.
               */
              if (is_gimple_call(stmt) &&
                  !has_been_processed(gimple_call_fn(stmt)))
                continue;
              else if ((max_calls > 0) && ((rand() % 2) == 0))
              {
                  insert_call_to_junk_fn(stmt);
                  --max_calls;
              }
          }
      }

    /* Mark as being analyized so we avoid trying to junkify it again */
    VEC_safe_push(tree, gc, analyized_fns, cfun->decl);
    return 0;
}
Beispiel #12
0
static unsigned int
tree_call_cdce (void)
{
    basic_block bb;
    gimple_stmt_iterator i;
    bool something_changed = false;
    vec<gimple> cond_dead_built_in_calls = vNULL;
    FOR_EACH_BB (bb)
    {
        /* Collect dead call candidates.  */
        for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
            gimple stmt = gsi_stmt (i);
            if (is_gimple_call (stmt)
                    && is_call_dce_candidate (stmt))
            {
                if (dump_file && (dump_flags & TDF_DETAILS))
                {
                    fprintf (dump_file, "Found conditional dead call: ");
                    print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
                    fprintf (dump_file, "\n");
                }
                if (!cond_dead_built_in_calls.exists ())
                    cond_dead_built_in_calls.create (64);
                cond_dead_built_in_calls.safe_push (stmt);
            }
        }
    }

    if (!cond_dead_built_in_calls.exists ())
        return 0;

    something_changed
        = shrink_wrap_conditional_dead_built_in_calls (cond_dead_built_in_calls);

    cond_dead_built_in_calls.release ();

    if (something_changed)
    {
        free_dominance_info (CDI_DOMINATORS);
        free_dominance_info (CDI_POST_DOMINATORS);
        /* As we introduced new control-flow we need to insert PHI-nodes
           for the call-clobbers of the remaining call.  */
        mark_virtual_operands_for_renaming (cfun);
        return TODO_update_ssa;
    }

    return 0;
}
Beispiel #13
0
static struct mem_ref_group *
gather_memory_references (struct loop *loop, bool *no_other_refs)
{
  basic_block *body = get_loop_body_in_dom_order (loop);
  basic_block bb;
  unsigned i;
  gimple_stmt_iterator bsi;
  gimple stmt;
  tree lhs, rhs;
  struct mem_ref_group *refs = NULL;

  *no_other_refs = true;

  /* Scan the loop body in order, so that the former references precede the
     later ones.  */
  for (i = 0; i < loop->num_nodes; i++)
    {
      bb = body[i];
      if (bb->loop_father != loop)
	continue;

      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
	{
	  stmt = gsi_stmt (bsi);

	  if (gimple_code (stmt) != GIMPLE_ASSIGN)
	    {
	      if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
		  || (is_gimple_call (stmt)
		      && !(gimple_call_flags (stmt) & ECF_CONST)))
		*no_other_refs = false;
	      continue;
	    }

	  lhs = gimple_assign_lhs (stmt);
	  rhs = gimple_assign_rhs1 (stmt);

	  if (REFERENCE_CLASS_P (rhs))
	    *no_other_refs &= gather_memory_references_ref (loop, &refs,
							    rhs, false, stmt);
	  if (REFERENCE_CLASS_P (lhs))
	    *no_other_refs &= gather_memory_references_ref (loop, &refs,
							    lhs, true, stmt);
	}
    }
  free (body);

  return refs;
}
Beispiel #14
0
unsigned int
cgraph_edge::rebuild_edges (void)
{
  basic_block bb;
  cgraph_node *node = cgraph_node::get (current_function_decl);
  gimple_stmt_iterator gsi;

  node->remove_callees ();
  node->remove_all_references ();

  node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;

  FOR_EACH_BB_FN (bb, cfun)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl;

	  if (is_gimple_call (stmt))
	    {
	      int freq = compute_call_stmt_bb_frequency (current_function_decl,
							 bb);
	      decl = gimple_call_fndecl (stmt);
	      if (decl)
		node->create_edge (cgraph_node::get_create (decl), stmt,
				   bb->count, freq);
	      else if (gimple_call_internal_p (stmt))
		;
	      else
		node->create_indirect_edge (stmt,
					    gimple_call_flags (stmt),
					    bb->count, freq);
	    }
	  node->record_stmt_references (stmt);
	}
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	node->record_stmt_references (gsi_stmt (gsi));
    }
  record_eh_tables (node, cfun);
  gcc_assert (!node->global.inlined_to);

  if (node->instrumented_version
      && !node->instrumentation_clone)
    node->create_reference (node->instrumented_version, IPA_REF_CHKP, NULL);

  return 0;
}
Beispiel #15
0
/*
 * find all C level function pointer dereferences and forcibly set the highest bit of the pointer
 */
static unsigned int execute_kernexec_fptr(void)
{
	basic_block bb;

	// 1. loop through BBs and GIMPLE statements
	FOR_EACH_BB(bb) {
		gimple_stmt_iterator gsi;

		for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
			// gimple match: h_1 = get_fptr (); D.2709_3 = h_1 (x_2(D));
			tree fn;
			gimple call_stmt;

			// is it a call ...
			call_stmt = gsi_stmt(gsi);
			if (!is_gimple_call(call_stmt))
				continue;
			fn = gimple_call_fn(call_stmt);
			if (TREE_CODE(fn) == ADDR_EXPR)
				continue;
			if (TREE_CODE(fn) != SSA_NAME)
				gcc_unreachable();

			// ... through a function pointer
			if (SSA_NAME_VAR(fn) != NULL_TREE) {
				fn = SSA_NAME_VAR(fn);
				if (TREE_CODE(fn) != VAR_DECL && TREE_CODE(fn) != PARM_DECL) {
					debug_tree(fn);
					gcc_unreachable();
				}
			}
			fn = TREE_TYPE(fn);
			if (TREE_CODE(fn) != POINTER_TYPE)
				continue;
			fn = TREE_TYPE(fn);
			if (TREE_CODE(fn) != FUNCTION_TYPE)
				continue;

			kernexec_instrument_fptr(&gsi);

//debug_tree(gimple_call_fn(call_stmt));
//print_gimple_stmt(stderr, call_stmt, 0, TDF_LINENO);
		}
	}

	return 0;
}
Beispiel #16
0
static bool
is_leaf_function (function *fn)
{
  basic_block bb;
  gimple_stmt_iterator gsi;

  /* If we do not have a cfg for this function be conservative and assume
     it is not a leaf function.  */
  if (fn->cfg == NULL)
    return false;

  FOR_EACH_BB_FN (bb, fn)
    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
      if (is_gimple_call (gsi_stmt (gsi)))
	return false;
  return true;
}
unsigned int
rebuild_cgraph_edges (void)
{
  basic_block bb;
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
  gimple_stmt_iterator gsi;

  cgraph_node_remove_callees (node);
  ipa_remove_all_references (&node->ref_list);

  node->count = ENTRY_BLOCK_PTR->count;

  FOR_EACH_BB (bb)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl;

	  if (is_gimple_call (stmt))
	    {
	      int freq = compute_call_stmt_bb_frequency (current_function_decl,
							 bb);
	      decl = gimple_call_fndecl (stmt);
	      if (decl)
		cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
				    bb->count, freq);
	      else
		cgraph_create_indirect_edge (node, stmt,
					     gimple_call_flags (stmt),
					     bb->count, freq);
	    }
	  walk_stmt_load_store_addr_ops (stmt, node, mark_load,
					 mark_store, mark_address);

	}
      for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
	walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
				       mark_load, mark_store, mark_address);
    }
  record_eh_tables (node, cfun);
  gcc_assert (!node->global.inlined_to);

  return 0;
}
bool
may_propagate_copy_into_stmt (gimple dest, tree orig)
{
    tree type_d;
    tree type_o;

    /* If the statement is a switch or a single-rhs assignment,
       then the expression to be replaced by the propagation may
       be an SSA_NAME.  Fortunately, there is an explicit tree
       for the expression, so we delegate to may_propagate_copy.  */

    if (gimple_assign_single_p (dest))
        return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
    else if (gimple_code (dest) == GIMPLE_SWITCH)
        return may_propagate_copy (gimple_switch_index (dest), orig);

    /* In other cases, the expression is not materialized, so there
       is no destination to pass to may_propagate_copy.  On the other
       hand, the expression cannot be an SSA_NAME, so the analysis
       is much simpler.  */

    if (TREE_CODE (orig) == SSA_NAME
            && (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
                ||  TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG))
        return false;

    if (is_gimple_assign (dest))
        type_d = TREE_TYPE (gimple_assign_lhs (dest));
    else if (gimple_code (dest) == GIMPLE_COND)
        type_d = boolean_type_node;
    else if (is_gimple_call (dest)
             && gimple_call_lhs (dest) != NULL_TREE)
        type_d = TREE_TYPE (gimple_call_lhs (dest));
    else
        gcc_unreachable ();

    type_o = TREE_TYPE (orig);

    if (!useless_type_conversion_p (type_d, type_o))
        return false;

    return true;
}
Beispiel #19
0
static void
warn_self_assign (gimple stmt)
{
  tree rhs, lhs;

  /* Check assigment statement.  */
  if (gimple_assign_single_p (stmt))
    {
      rhs = get_real_ref_rhs (gimple_assign_rhs1 (stmt));
      if (!rhs)
        return;

      lhs = gimple_assign_lhs (stmt);
      if (TREE_CODE (lhs) == SSA_NAME)
        {
          lhs = SSA_NAME_VAR (lhs);
          if (!lhs || DECL_ARTIFICIAL (lhs))
            return;
        }

      compare_and_warn (stmt, lhs, rhs);
    }
  /* Check overloaded operator '=' (if enabled).  */
  else if (check_operator_eq && is_gimple_call (stmt))
    {
      tree fdecl = gimple_call_fndecl (stmt);
      if (fdecl && (DECL_NAME (fdecl) == maybe_get_identifier ("operator=")))
        {
          /* If 'operator=' takes reference operands, the arguments will be 
             ADDR_EXPR trees. In this case, just remove the address-taken
             operator before we compare the lhs and rhs.  */
          lhs = gimple_call_arg (stmt, 0);
          if (TREE_CODE (lhs) == ADDR_EXPR)
            lhs = TREE_OPERAND (lhs, 0);
          rhs = gimple_call_arg (stmt, 1);
          if (TREE_CODE (rhs) == ADDR_EXPR)
            rhs = TREE_OPERAND (rhs, 0);

          compare_and_warn (stmt, lhs, rhs);
        }
    }
}
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
    gimple stmt = gsi_stmt (*gsi);

    if (is_gimple_assign (stmt))
    {
        tree expr = NULL_TREE;
        if (gimple_assign_single_p (stmt))
            expr = gimple_assign_rhs1 (stmt);
        propagate_tree_value (&expr, val);
        gimple_assign_set_rhs_from_tree (gsi, expr);
        stmt = gsi_stmt (*gsi);
    }
    else if (gimple_code (stmt) == GIMPLE_COND)
    {
        tree lhs = NULL_TREE;
        tree rhs = fold_convert (TREE_TYPE (val), integer_zero_node);
        propagate_tree_value (&lhs, val);
        gimple_cond_set_code (stmt, NE_EXPR);
        gimple_cond_set_lhs (stmt, lhs);
        gimple_cond_set_rhs (stmt, rhs);
    }
    else if (is_gimple_call (stmt)
             && gimple_call_lhs (stmt) != NULL_TREE)
    {
        gimple new_stmt;

        tree expr = NULL_TREE;
        propagate_tree_value (&expr, val);
        new_stmt  = gimple_build_assign (gimple_call_lhs (stmt), expr);
        copy_virtual_operands (new_stmt, stmt);
        move_ssa_defining_stmt_for_defs (new_stmt, stmt);
        gsi_replace (gsi, new_stmt, false);
    }
    else if (gimple_code (stmt) == GIMPLE_SWITCH)
        propagate_tree_value (gimple_switch_index_ptr (stmt), val);
    else
        gcc_unreachable ();
}
void
update_stmt_operands (gimple stmt)
{
  /* If update_stmt_operands is called before SSA is initialized, do
     nothing.  */
  if (!ssa_operands_active ())
    return;

  timevar_push (TV_TREE_OPS);

  /* If the stmt is a noreturn call queue it to be processed by
     split_bbs_on_noreturn_calls during cfg cleanup.  */
  if (is_gimple_call (stmt)
      && gimple_call_noreturn_p (stmt))
    VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);

  gcc_assert (gimple_modified_p (stmt));
  build_ssa_operands (stmt);
  gimple_set_modified (stmt, false);

  timevar_pop (TV_TREE_OPS);
}
Beispiel #22
0
static bool
callees_functions_use_frame_header (function *fn)
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  gimple *stmt;
  tree called_fn_tree;
  function *called_fn;

  if (fn->cfg == NULL)
    return true;

  FOR_EACH_BB_FN (bb, fn)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  stmt = gsi_stmt (gsi);
	  if (is_gimple_call (stmt))
	    {
	      called_fn_tree = gimple_call_fndecl (stmt);
	      if (called_fn_tree != NULL)
	        {
	          called_fn = DECL_STRUCT_FUNCTION (called_fn_tree);
		  if (called_fn == NULL
		      || DECL_WEAK (called_fn_tree) 
		      || has_inlined_assembly (called_fn)
		      || !is_leaf_function (called_fn)
		      || !called_fn->machine->does_not_use_frame_header)
		    return true;
	        }
	      else
		return true;
            }
        }
    }
  return false;
}
Beispiel #23
0
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
  gimple stmt = gsi_stmt (*gsi);

  if (is_gimple_assign (stmt))
    {
      tree expr = NULL_TREE;
      if (gimple_assign_single_p (stmt))
        expr = gimple_assign_rhs1 (stmt);
      propagate_tree_value (&expr, val);
      gimple_assign_set_rhs_from_tree (gsi, expr);
    }
  else if (gimple_code (stmt) == GIMPLE_COND)
    {
      tree lhs = NULL_TREE;
      tree rhs = build_zero_cst (TREE_TYPE (val));
      propagate_tree_value (&lhs, val);
      gimple_cond_set_code (stmt, NE_EXPR);
      gimple_cond_set_lhs (stmt, lhs);
      gimple_cond_set_rhs (stmt, rhs);
    }
  else if (is_gimple_call (stmt)
           && gimple_call_lhs (stmt) != NULL_TREE)
    {
      tree expr = NULL_TREE;
      bool res;
      propagate_tree_value (&expr, val);
      res = update_call_from_tree (gsi, expr);
      gcc_assert (res);
    }
  else if (gimple_code (stmt) == GIMPLE_SWITCH)
    propagate_tree_value (gimple_switch_index_ptr (stmt), val);
  else
    gcc_unreachable ();
}
Beispiel #24
0
static unsigned int
build_cgraph_edges (void)
{
  basic_block bb;
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
  struct pointer_set_t *visited_nodes = pointer_set_create ();
  gimple_stmt_iterator gsi;
  tree decl;
  unsigned ix;

  /* Create the callgraph edges and record the nodes referenced by the function.
     body.  */
  FOR_EACH_BB_FN (bb, cfun)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl;

	  if (is_gimple_debug (stmt))
	    continue;

	  if (is_gimple_call (stmt))
	    {
	      int freq = compute_call_stmt_bb_frequency (current_function_decl,
							 bb);
	      decl = gimple_call_fndecl (stmt);
	      if (decl)
		cgraph_create_edge (node, cgraph_get_create_node (decl),
				    stmt, bb->count, freq);
	      else if (gimple_call_internal_p (stmt))
		;
	      else
		cgraph_create_indirect_edge (node, stmt,
					     gimple_call_flags (stmt),
					     bb->count, freq);
	    }
	  ipa_record_stmt_references (node, stmt);
	  if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
	      && gimple_omp_parallel_child_fn (stmt))
	    {
	      tree fn = gimple_omp_parallel_child_fn (stmt);
	      ipa_record_reference (node,
				    cgraph_get_create_node (fn),
				    IPA_REF_ADDR, stmt);
	    }
	  if (gimple_code (stmt) == GIMPLE_OMP_TASK)
	    {
	      tree fn = gimple_omp_task_child_fn (stmt);
	      if (fn)
		ipa_record_reference (node,
				      cgraph_get_create_node (fn),
				      IPA_REF_ADDR, stmt);
	      fn = gimple_omp_task_copy_fn (stmt);
	      if (fn)
		ipa_record_reference (node,
				      cgraph_get_create_node (fn),
				      IPA_REF_ADDR, stmt);
	    }
	}
      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	ipa_record_stmt_references (node, gsi_stmt (gsi));
   }

  /* Look for initializers of constant variables and private statics.  */
  FOR_EACH_LOCAL_DECL (cfun, ix, decl)
    if (TREE_CODE (decl) == VAR_DECL
	&& (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
	&& !DECL_HAS_VALUE_EXPR_P (decl))
      varpool_finalize_decl (decl);
  record_eh_tables (node, cfun);

  pointer_set_destroy (visited_nodes);
  return 0;
}
Beispiel #25
0
unsigned int
execute_fixup_cfg (void)
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
  gcov_type count_scale;
  edge e;
  edge_iterator ei;

  count_scale
      = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
                            ENTRY_BLOCK_PTR->count);

  ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
  EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
                                       count_scale);

  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
    e->count = apply_scale (e->count, count_scale);

  FOR_EACH_BB (bb)
    {
      bb->count = apply_scale (bb->count, count_scale);
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl = is_gimple_call (stmt)
		      ? gimple_call_fndecl (stmt)
		      : NULL;
	  if (decl)
	    {
	      int flags = gimple_call_flags (stmt);
	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
		{
		  if (gimple_purge_dead_abnormal_call_edges (bb))
		    todo |= TODO_cleanup_cfg;

		  if (gimple_in_ssa_p (cfun))
		    {
		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
		      update_stmt (stmt);
		    }
		}

	      if (flags & ECF_NORETURN
		  && fixup_noreturn_call (stmt))
		todo |= TODO_cleanup_cfg;
	     }

	  if (maybe_clean_eh_stmt (stmt)
	      && gimple_purge_dead_eh_edges (bb))
	    todo |= TODO_cleanup_cfg;
	}

      FOR_EACH_EDGE (e, ei, bb->succs)
        e->count = apply_scale (e->count, count_scale);

      /* If we have a basic block with no successors that does not
	 end with a control statement or a noreturn call end it with
	 a call to __builtin_unreachable.  This situation can occur
	 when inlining a noreturn call that does in fact return.  */
      if (EDGE_COUNT (bb->succs) == 0)
	{
	  gimple stmt = last_stmt (bb);
	  if (!stmt
	      || (!is_ctrl_stmt (stmt)
		  && (!is_gimple_call (stmt)
		      || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
	    {
	      stmt = gimple_build_call
		  (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
	    }
	}
    }
  if (count_scale != REG_BR_PROB_BASE)
    compute_function_frequency ();

  /* We just processed all calls.  */
  if (cfun->gimple_df)
    vec_free (MODIFIED_NORETURN_CALLS (cfun));

  /* Dump a textual representation of the flowgraph.  */
  if (dump_file)
    gimple_dump_cfg (dump_file, dump_flags);

  if (current_loops
      && (todo & TODO_cleanup_cfg))
    loops_state_set (LOOPS_NEED_FIXUP);

  return todo;
}
Beispiel #26
0
static unsigned int
tree_profiling (void)
{
  struct cgraph_node *node;

  /* This is a small-ipa pass that gets called only once, from
     cgraphunit.c:ipa_passes().  */
  gcc_assert (symtab->state == IPA_SSA);

  init_node_map (true);

  FOR_EACH_DEFINED_FUNCTION (node)
    {
      if (!gimple_has_body_p (node->decl))
	continue;

      /* Don't profile functions produced for builtin stuff.  */
      if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
	continue;

      /* Do not instrument extern inline functions when testing coverage.
	 While this is not perfectly consistent (early inlined extern inlines
	 will get acocunted), testsuite expects that.  */
      if (DECL_EXTERNAL (node->decl)
	  && flag_test_coverage)
	continue;

      push_cfun (DECL_STRUCT_FUNCTION (node->decl));

      /* Local pure-const may imply need to fixup the cfg.  */
      if (execute_fixup_cfg () & TODO_cleanup_cfg)
	cleanup_tree_cfg ();

      branch_prob ();

      if (! flag_branch_probabilities
	  && flag_profile_values)
	gimple_gen_ic_func_profiler ();

      if (flag_branch_probabilities
	  && flag_profile_values
	  && flag_value_profile_transformations)
	gimple_value_profile_transformations ();

      /* The above could hose dominator info.  Currently there is
	 none coming in, this is a safety valve.  It should be
	 easy to adjust it, if and when there is some.  */
      free_dominance_info (CDI_DOMINATORS);
      free_dominance_info (CDI_POST_DOMINATORS);
      pop_cfun ();
    }

  /* Drop pure/const flags from instrumented functions.  */
  FOR_EACH_DEFINED_FUNCTION (node)
    {
      if (!gimple_has_body_p (node->decl)
	  || !(!node->clone_of
	  || node->decl != node->clone_of->decl))
	continue;

      /* Don't profile functions produced for builtin stuff.  */
      if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
	continue;

      node->set_const_flag (false, false);
      node->set_pure_flag (false, false);
    }

  /* Update call statements and rebuild the cgraph.  */
  FOR_EACH_DEFINED_FUNCTION (node)
    {
      basic_block bb;

      if (!gimple_has_body_p (node->decl)
	  || !(!node->clone_of
	  || node->decl != node->clone_of->decl))
	continue;

      /* Don't profile functions produced for builtin stuff.  */
      if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
	continue;

      push_cfun (DECL_STRUCT_FUNCTION (node->decl));

      FOR_EACH_BB_FN (bb, cfun)
	{
	  gimple_stmt_iterator gsi;
	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	    {
	      gimple stmt = gsi_stmt (gsi);
	      if (is_gimple_call (stmt))
		update_stmt (stmt);
	    }
	}

      /* re-merge split blocks.  */
      cleanup_tree_cfg ();
      update_ssa (TODO_update_ssa);

      cgraph_edge::rebuild_edges ();

      pop_cfun ();
    }
Beispiel #27
0
static void
output_gimple_stmt (struct output_block *ob, gimple stmt)
{
  unsigned i;
  enum gimple_code code;
  enum LTO_tags tag;
  struct bitpack_d bp;
  histogram_value hist;

  /* Emit identifying tag.  */
  code = gimple_code (stmt);
  tag = lto_gimple_code_to_tag (code);
  streamer_write_record_start (ob, tag);

  /* Emit the tuple header.  */
  bp = bitpack_create (ob->main_stream);
  bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt));
  bp_pack_value (&bp, gimple_no_warning_p (stmt), 1);
  if (is_gimple_assign (stmt))
    bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1);
  bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1);
  hist = gimple_histogram_value (cfun, stmt);
  bp_pack_value (&bp, hist != NULL, 1);
  bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode);

  /* Emit location information for the statement.  */
  stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt)));
  streamer_write_bitpack (&bp);

  /* Emit the lexical block holding STMT.  */
  stream_write_tree (ob, gimple_block (stmt), true);

  /* Emit the operands.  */
  switch (gimple_code (stmt))
    {
    case GIMPLE_RESX:
      streamer_write_hwi (ob, gimple_resx_region (stmt));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true);
      break;

    case GIMPLE_EH_DISPATCH:
      streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt));
      break;

    case GIMPLE_ASM:
      streamer_write_uhwi (ob, gimple_asm_ninputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_noutputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt));
      streamer_write_uhwi (ob, gimple_asm_nlabels (stmt));
      streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt),
			     true);
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < gimple_num_ops (stmt); i++)
	{
	  tree op = gimple_op (stmt, i);
	  tree *basep = NULL;
	  /* Wrap all uses of non-automatic variables inside MEM_REFs
	     so that we do not have to deal with type mismatches on
	     merged symbols during IL read in.  The first operand
	     of GIMPLE_DEBUG must be a decl, not MEM_REF, though.  */
	  if (op && (i || !is_gimple_debug (stmt)))
	    {
	      basep = &op;
	      while (handled_component_p (*basep))
		basep = &TREE_OPERAND (*basep, 0);
	      if (TREE_CODE (*basep) == VAR_DECL
		  && !auto_var_in_fn_p (*basep, current_function_decl)
		  && !DECL_REGISTER (*basep))
		{
		  bool volatilep = TREE_THIS_VOLATILE (*basep);
		  *basep = build2 (MEM_REF, TREE_TYPE (*basep),
				   build_fold_addr_expr (*basep),
				   build_int_cst (build_pointer_type
						  (TREE_TYPE (*basep)), 0));
		  TREE_THIS_VOLATILE (*basep) = volatilep;
		}
	      else
		basep = NULL;
	    }
	  stream_write_tree (ob, op, true);
	  /* Restore the original base if we wrapped it inside a MEM_REF.  */
	  if (basep)
	    *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0);
	}
      if (is_gimple_call (stmt))
	{
	  if (gimple_call_internal_p (stmt))
	    streamer_write_enum (ob->main_stream, internal_fn,
				 IFN_LAST, gimple_call_internal_fn (stmt));
	  else
	    stream_write_tree (ob, gimple_call_fntype (stmt), true);
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gcc_assert (gimple_transaction_body (stmt) == NULL);
      stream_write_tree (ob, gimple_transaction_label (stmt), true);
      break;

    default:
      gcc_unreachable ();
    }
  if (hist)
    stream_out_histogram_value (ob, hist);
}
Beispiel #28
0
static void
eliminate_tail_call (struct tailcall *t)
{
  tree param, rslt;
  gimple stmt, call;
  tree arg;
  size_t idx;
  basic_block bb, first;
  edge e;
  gimple phi;
  gimple_stmt_iterator gsi;
  gimple orig_stmt;

  stmt = orig_stmt = gsi_stmt (t->call_gsi);
  bb = gsi_bb (t->call_gsi);

  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
	       bb->index);
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
      fprintf (dump_file, "\n");
    }

  gcc_assert (is_gimple_call (stmt));

  first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));

  /* Remove the code after call_gsi that will become unreachable.  The
     possibly unreachable code in other blocks is removed later in
     cfg cleanup.  */
  gsi = t->call_gsi;
  gsi_next (&gsi);
  while (!gsi_end_p (gsi))
    {
      gimple t = gsi_stmt (gsi);
      /* Do not remove the return statement, so that redirect_edge_and_branch
	 sees how the block ends.  */
      if (gimple_code (t) == GIMPLE_RETURN)
	break;

      gsi_remove (&gsi, true);
      release_defs (t);
    }

  /* Number of executions of function has reduced by the tailcall.  */
  e = single_succ_edge (gsi_bb (t->call_gsi));
  decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
  decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
		    EDGE_FREQUENCY (e));
  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));

  /* Replace the call by a jump to the start of function.  */
  e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
				first);
  gcc_assert (e);
  PENDING_STMT (e) = NULL;

  /* Add phi node entries for arguments.  The ordering of the phi nodes should
     be the same as the ordering of the arguments.  */
  for (param = DECL_ARGUMENTS (current_function_decl),
	 idx = 0, gsi = gsi_start_phis (first);
       param;
       param = DECL_CHAIN (param), idx++)
    {
      if (!arg_needs_copy_p (param))
	continue;

      arg = gimple_call_arg (stmt, idx);
      phi = gsi_stmt (gsi);
      gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));

      add_phi_arg (phi, arg, e, gimple_location (stmt));
      gsi_next (&gsi);
    }

  /* Update the values of accumulators.  */
  adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);

  call = gsi_stmt (t->call_gsi);
  rslt = gimple_call_lhs (call);
  if (rslt != NULL_TREE)
    {
      /* Result of the call will no longer be defined.  So adjust the
	 SSA_NAME_DEF_STMT accordingly.  */
      SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
    }

  gsi_remove (&t->call_gsi, true);
  release_defs (call);
}
Beispiel #29
0
static void
find_tail_calls (basic_block bb, struct tailcall **ret)
{
  tree ass_var = NULL_TREE, ret_var, func, param;
  gimple stmt, call = NULL;
  gimple_stmt_iterator gsi, agsi;
  bool tail_recursion;
  struct tailcall *nw;
  edge e;
  tree m, a;
  basic_block abb;
  size_t idx;
  tree var;

  if (!single_succ_p (bb))
    return;

  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
    {
      stmt = gsi_stmt (gsi);

      /* Ignore labels, returns, clobbers and debug stmts.  */
      if (gimple_code (stmt) == GIMPLE_LABEL
	  || gimple_code (stmt) == GIMPLE_RETURN
	  || gimple_clobber_p (stmt)
	  || is_gimple_debug (stmt))
	continue;

      /* Check for a call.  */
      if (is_gimple_call (stmt))
	{
	  call = stmt;
	  ass_var = gimple_call_lhs (stmt);
	  break;
	}

      /* If the statement references memory or volatile operands, fail.  */
      if (gimple_references_memory_p (stmt)
	  || gimple_has_volatile_ops (stmt))
	return;
    }

  if (gsi_end_p (gsi))
    {
      edge_iterator ei;
      /* Recurse to the predecessors.  */
      FOR_EACH_EDGE (e, ei, bb->preds)
	find_tail_calls (e->src, ret);

      return;
    }

  /* If the LHS of our call is not just a simple register, we can't
     transform this into a tail or sibling call.  This situation happens,
     in (e.g.) "*p = foo()" where foo returns a struct.  In this case
     we won't have a temporary here, but we need to carry out the side
     effect anyway, so tailcall is impossible.

     ??? In some situations (when the struct is returned in memory via
     invisible argument) we could deal with this, e.g. by passing 'p'
     itself as that argument to foo, but it's too early to do this here,
     and expand_call() will not handle it anyway.  If it ever can, then
     we need to revisit this here, to allow that situation.  */
  if (ass_var && !is_gimple_reg (ass_var))
    return;

  /* We found the call, check whether it is suitable.  */
  tail_recursion = false;
  func = gimple_call_fndecl (call);
  if (func
      && !DECL_BUILT_IN (func)
      && recursive_call_p (current_function_decl, func))
    {
      tree arg;

      for (param = DECL_ARGUMENTS (func), idx = 0;
	   param && idx < gimple_call_num_args (call);
	   param = DECL_CHAIN (param), idx ++)
	{
	  arg = gimple_call_arg (call, idx);
	  if (param != arg)
	    {
	      /* Make sure there are no problems with copying.  The parameter
	         have a copyable type and the two arguments must have reasonably
	         equivalent types.  The latter requirement could be relaxed if
	         we emitted a suitable type conversion statement.  */
	      if (!is_gimple_reg_type (TREE_TYPE (param))
		  || !useless_type_conversion_p (TREE_TYPE (param),
					         TREE_TYPE (arg)))
		break;

	      /* The parameter should be a real operand, so that phi node
		 created for it at the start of the function has the meaning
		 of copying the value.  This test implies is_gimple_reg_type
		 from the previous condition, however this one could be
		 relaxed by being more careful with copying the new value
		 of the parameter (emitting appropriate GIMPLE_ASSIGN and
		 updating the virtual operands).  */
	      if (!is_gimple_reg (param))
		break;
	    }
	}
      if (idx == gimple_call_num_args (call) && !param)
	tail_recursion = true;
    }

  /* Make sure the tail invocation of this function does not refer
     to local variables.  */
  FOR_EACH_LOCAL_DECL (cfun, idx, var)
    {
      if (TREE_CODE (var) != PARM_DECL
	  && auto_var_in_fn_p (var, cfun->decl)
	  && (ref_maybe_used_by_stmt_p (call, var)
	      || call_may_clobber_ref_p (call, var)))
	return;
    }

  /* Now check the statements after the call.  None of them has virtual
     operands, so they may only depend on the call through its return
     value.  The return value should also be dependent on each of them,
     since we are running after dce.  */
  m = NULL_TREE;
  a = NULL_TREE;

  abb = bb;
  agsi = gsi;
  while (1)
    {
      tree tmp_a = NULL_TREE;
      tree tmp_m = NULL_TREE;
      gsi_next (&agsi);

      while (gsi_end_p (agsi))
	{
	  ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
	  abb = single_succ (abb);
	  agsi = gsi_start_bb (abb);
	}

      stmt = gsi_stmt (agsi);

      if (gimple_code (stmt) == GIMPLE_LABEL)
	continue;

      if (gimple_code (stmt) == GIMPLE_RETURN)
	break;

      if (gimple_clobber_p (stmt))
	continue;

      if (is_gimple_debug (stmt))
	continue;

      if (gimple_code (stmt) != GIMPLE_ASSIGN)
	return;

      /* This is a gimple assign. */
      if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
	return;

      if (tmp_a)
	{
	  tree type = TREE_TYPE (tmp_a);
	  if (a)
	    a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
	  else
	    a = tmp_a;
	}
      if (tmp_m)
	{
	  tree type = TREE_TYPE (tmp_m);
	  if (m)
	    m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
	  else
	    m = tmp_m;

	  if (a)
	    a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
	}
    }

  /* See if this is a tail call we can handle.  */
  ret_var = gimple_return_retval (stmt);

  /* We may proceed if there either is no return value, or the return value
     is identical to the call's return.  */
  if (ret_var
      && (ret_var != ass_var))
    return;

  /* If this is not a tail recursive call, we cannot handle addends or
     multiplicands.  */
  if (!tail_recursion && (m || a))
    return;

  /* For pointers only allow additions.  */
  if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
    return;

  nw = XNEW (struct tailcall);

  nw->call_gsi = gsi;

  nw->tail_recursion = tail_recursion;

  nw->mult = m;
  nw->add = a;

  nw->next = *ret;
  *ret = nw;
}
Beispiel #30
0
void
gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
{
    size_t i, num_ops;
    tree lhs;
    gimple_seq pre = NULL;
    gimple post_stmt = NULL;

    push_gimplify_context (gimple_in_ssa_p (cfun));

    switch (gimple_code (stmt))
    {
    case GIMPLE_COND:
        gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_SWITCH:
        gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_OMP_ATOMIC_LOAD:
        gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_ASM:
    {
        size_t i, noutputs = gimple_asm_noutputs (stmt);
        const char *constraint, **oconstraints;
        bool allows_mem, allows_reg, is_inout;

        oconstraints
            = (const char **) alloca ((noutputs) * sizeof (const char *));
        for (i = 0; i < noutputs; i++)
        {
            tree op = gimple_asm_output_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            oconstraints[i] = constraint;
            parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
                                     &allows_reg, &is_inout);
            gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                           is_inout ? is_gimple_min_lval : is_gimple_lvalue,
                           fb_lvalue | fb_mayfail);
        }
        for (i = 0; i < gimple_asm_ninputs (stmt); i++)
        {
            tree op = gimple_asm_input_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                                    oconstraints, &allows_mem, &allows_reg);
            if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
                allows_reg = 0;
            if (!allows_reg && allows_mem)
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_lvalue, fb_lvalue | fb_mayfail);
            else
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_asm_val, fb_rvalue);
        }
    }
    break;
    default:
        /* NOTE: We start gimplifying operands from last to first to
        make sure that side-effects on the RHS of calls, assignments
         and ASMs are executed before the LHS.  The ordering is not
         important for other statements.  */
        num_ops = gimple_num_ops (stmt);
        for (i = num_ops; i > 0; i--)
        {
            tree op = gimple_op (stmt, i - 1);
            if (op == NULL_TREE)
                continue;
            if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
                gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
            else if (i == 2
                     && is_gimple_assign (stmt)
                     && num_ops == 2
                     && get_gimple_rhs_class (gimple_expr_code (stmt))
                     == GIMPLE_SINGLE_RHS)
                gimplify_expr (&op, &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (i == 2 && is_gimple_call (stmt))
            {
                if (TREE_CODE (op) == FUNCTION_DECL)
                    continue;
                gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
            }
            else
                gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
            gimple_set_op (stmt, i - 1, op);
        }

        lhs = gimple_get_lhs (stmt);
        /* If the LHS changed it in a way that requires a simple RHS,
        create temporary.  */
        if (lhs && !is_gimple_reg (lhs))
        {
            bool need_temp = false;

            if (is_gimple_assign (stmt)
                    && num_ops == 2
                    && get_gimple_rhs_class (gimple_expr_code (stmt))
                    == GIMPLE_SINGLE_RHS)
                gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (is_gimple_reg (lhs))
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                {
                    if (is_gimple_call (stmt))
                    {
                        i = gimple_call_flags (stmt);
                        if ((i & ECF_LOOPING_CONST_OR_PURE)
                                || !(i & (ECF_CONST | ECF_PURE)))
                            need_temp = true;
                    }
                    if (stmt_can_throw_internal (stmt))
                        need_temp = true;
                }
            }
            else
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                    need_temp = true;
                else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
                {
                    if (is_gimple_call (stmt))
                    {
                        tree fndecl = gimple_call_fndecl (stmt);

                        if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
                                && !(fndecl && DECL_RESULT (fndecl)
                                     && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
                            need_temp = true;
                    }
                    else
                        need_temp = true;
                }
            }
            if (need_temp)
            {
                tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
                if (gimple_in_ssa_p (cfun))
                    temp = make_ssa_name (temp, NULL);
                gimple_set_lhs (stmt, temp);
                post_stmt = gimple_build_assign (lhs, temp);
            }
        }
        break;
    }

    if (!gimple_seq_empty_p (pre))
        gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
    if (post_stmt)
        gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);

    pop_gimplify_context (NULL);
}