static bool
gate_all_optimizations (void)
{
  return (optimize >= 1
	  /* Don't bother doing anything if the program has errors.
	     We have to pass down the queue if we already went into SSA */
	  && (!(errorcount || sorrycount) || gimple_in_ssa_p (cfun)));
}
Пример #2
0
tree
force_gimple_operand_1 (tree expr, gimple_seq *stmts,
                        gimple_predicate gimple_test_f, tree var)
{
    enum gimplify_status ret;
    location_t saved_location;

    *stmts = NULL;

    /* gimple_test_f might be more strict than is_gimple_val, make
       sure we pass both.  Just checking gimple_test_f doesn't work
       because most gimple predicates do not work recursively.  */
    if (is_gimple_val (expr)
            && (*gimple_test_f) (expr))
        return expr;

    push_gimplify_context (gimple_in_ssa_p (cfun), true);
    saved_location = input_location;
    input_location = UNKNOWN_LOCATION;

    if (var)
    {
        if (gimple_in_ssa_p (cfun) && is_gimple_reg (var))
            var = make_ssa_name (var, NULL);
        expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
    }

    if (TREE_CODE (expr) != MODIFY_EXPR
            && TREE_TYPE (expr) == void_type_node)
    {
        gimplify_and_add (expr, stmts);
        expr = NULL_TREE;
    }
    else
    {
        ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
        gcc_assert (ret != GS_ERROR);
    }

    input_location = saved_location;
    pop_gimplify_context (NULL);

    return expr;
}
Пример #3
0
void
release_defs (gimple stmt)
{
  tree def;
  ssa_op_iter iter;

  /* Make sure that we are in SSA.  Otherwise, operand cache may point
     to garbage.  */
  gcc_assert (gimple_in_ssa_p (cfun));

  FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
    if (TREE_CODE (def) == SSA_NAME)
      release_ssa_name (def);
}
Пример #4
0
DEBUG_FUNCTION void
verify_ssaname_freelists (struct function *fun)
{
  if (!gimple_in_ssa_p (fun))
    return;

  bitmap names_in_il = BITMAP_ALLOC (NULL);

  /* Walk the entire IL noting every SSA_NAME we see.  */
  basic_block bb;
  FOR_EACH_BB_FN (bb, fun)
    {
      tree t;
      /* First note the result and arguments of PHI nodes.  */
      for (gphi_iterator gsi = gsi_start_phis (bb);
	   !gsi_end_p (gsi);
	   gsi_next (&gsi))
	{
	  gphi *phi = gsi.phi ();
	  t = gimple_phi_result (phi);
	  bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));

	  for (unsigned int i = 0; i < gimple_phi_num_args (phi); i++)
	    {
	      t = gimple_phi_arg_def (phi, i);
	      if (TREE_CODE (t) == SSA_NAME)
		bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));
	    }
	}

      /* Then note the operands of each statement.  */
      for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
	   !gsi_end_p (gsi);
	   gsi_next (&gsi))
	{
	  ssa_op_iter iter;
	  gimple *stmt = gsi_stmt (gsi);
	  FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
	    bitmap_set_bit (names_in_il, SSA_NAME_VERSION (t));
	}
    }
Пример #5
0
unsigned int
execute_fixup_cfg (void)
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
  gcov_type count_scale;
  edge e;
  edge_iterator ei;

  count_scale
      = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
                            ENTRY_BLOCK_PTR->count);

  ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
  EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
                                       count_scale);

  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
    e->count = apply_scale (e->count, count_scale);

  FOR_EACH_BB (bb)
    {
      bb->count = apply_scale (bb->count, count_scale);
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl = is_gimple_call (stmt)
		      ? gimple_call_fndecl (stmt)
		      : NULL;
	  if (decl)
	    {
	      int flags = gimple_call_flags (stmt);
	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
		{
		  if (gimple_purge_dead_abnormal_call_edges (bb))
		    todo |= TODO_cleanup_cfg;

		  if (gimple_in_ssa_p (cfun))
		    {
		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
		      update_stmt (stmt);
		    }
		}

	      if (flags & ECF_NORETURN
		  && fixup_noreturn_call (stmt))
		todo |= TODO_cleanup_cfg;
	     }

	  if (maybe_clean_eh_stmt (stmt)
	      && gimple_purge_dead_eh_edges (bb))
	    todo |= TODO_cleanup_cfg;
	}

      FOR_EACH_EDGE (e, ei, bb->succs)
        e->count = apply_scale (e->count, count_scale);

      /* If we have a basic block with no successors that does not
	 end with a control statement or a noreturn call end it with
	 a call to __builtin_unreachable.  This situation can occur
	 when inlining a noreturn call that does in fact return.  */
      if (EDGE_COUNT (bb->succs) == 0)
	{
	  gimple stmt = last_stmt (bb);
	  if (!stmt
	      || (!is_ctrl_stmt (stmt)
		  && (!is_gimple_call (stmt)
		      || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
	    {
	      stmt = gimple_build_call
		  (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
	    }
	}
    }
  if (count_scale != REG_BR_PROB_BASE)
    compute_function_frequency ();

  /* We just processed all calls.  */
  if (cfun->gimple_df)
    vec_free (MODIFIED_NORETURN_CALLS (cfun));

  /* Dump a textual representation of the flowgraph.  */
  if (dump_file)
    gimple_dump_cfg (dump_file, dump_flags);

  if (current_loops
      && (todo & TODO_cleanup_cfg))
    loops_state_set (LOOPS_NEED_FIXUP);

  return todo;
}
Пример #6
0
void
gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
{
    size_t i, num_ops;
    tree lhs;
    gimple_seq pre = NULL;
    gimple post_stmt = NULL;

    push_gimplify_context (gimple_in_ssa_p (cfun));

    switch (gimple_code (stmt))
    {
    case GIMPLE_COND:
        gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_SWITCH:
        gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_OMP_ATOMIC_LOAD:
        gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
                       is_gimple_val, fb_rvalue);
        break;
    case GIMPLE_ASM:
    {
        size_t i, noutputs = gimple_asm_noutputs (stmt);
        const char *constraint, **oconstraints;
        bool allows_mem, allows_reg, is_inout;

        oconstraints
            = (const char **) alloca ((noutputs) * sizeof (const char *));
        for (i = 0; i < noutputs; i++)
        {
            tree op = gimple_asm_output_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            oconstraints[i] = constraint;
            parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
                                     &allows_reg, &is_inout);
            gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                           is_inout ? is_gimple_min_lval : is_gimple_lvalue,
                           fb_lvalue | fb_mayfail);
        }
        for (i = 0; i < gimple_asm_ninputs (stmt); i++)
        {
            tree op = gimple_asm_input_op (stmt, i);
            constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
            parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                                    oconstraints, &allows_mem, &allows_reg);
            if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
                allows_reg = 0;
            if (!allows_reg && allows_mem)
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_lvalue, fb_lvalue | fb_mayfail);
            else
                gimplify_expr (&TREE_VALUE (op), &pre, NULL,
                               is_gimple_asm_val, fb_rvalue);
        }
    }
    break;
    default:
        /* NOTE: We start gimplifying operands from last to first to
        make sure that side-effects on the RHS of calls, assignments
         and ASMs are executed before the LHS.  The ordering is not
         important for other statements.  */
        num_ops = gimple_num_ops (stmt);
        for (i = num_ops; i > 0; i--)
        {
            tree op = gimple_op (stmt, i - 1);
            if (op == NULL_TREE)
                continue;
            if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
                gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
            else if (i == 2
                     && is_gimple_assign (stmt)
                     && num_ops == 2
                     && get_gimple_rhs_class (gimple_expr_code (stmt))
                     == GIMPLE_SINGLE_RHS)
                gimplify_expr (&op, &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (i == 2 && is_gimple_call (stmt))
            {
                if (TREE_CODE (op) == FUNCTION_DECL)
                    continue;
                gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
            }
            else
                gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
            gimple_set_op (stmt, i - 1, op);
        }

        lhs = gimple_get_lhs (stmt);
        /* If the LHS changed it in a way that requires a simple RHS,
        create temporary.  */
        if (lhs && !is_gimple_reg (lhs))
        {
            bool need_temp = false;

            if (is_gimple_assign (stmt)
                    && num_ops == 2
                    && get_gimple_rhs_class (gimple_expr_code (stmt))
                    == GIMPLE_SINGLE_RHS)
                gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
                               rhs_predicate_for (gimple_assign_lhs (stmt)),
                               fb_rvalue);
            else if (is_gimple_reg (lhs))
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                {
                    if (is_gimple_call (stmt))
                    {
                        i = gimple_call_flags (stmt);
                        if ((i & ECF_LOOPING_CONST_OR_PURE)
                                || !(i & (ECF_CONST | ECF_PURE)))
                            need_temp = true;
                    }
                    if (stmt_can_throw_internal (stmt))
                        need_temp = true;
                }
            }
            else
            {
                if (is_gimple_reg_type (TREE_TYPE (lhs)))
                    need_temp = true;
                else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
                {
                    if (is_gimple_call (stmt))
                    {
                        tree fndecl = gimple_call_fndecl (stmt);

                        if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
                                && !(fndecl && DECL_RESULT (fndecl)
                                     && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
                            need_temp = true;
                    }
                    else
                        need_temp = true;
                }
            }
            if (need_temp)
            {
                tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
                if (gimple_in_ssa_p (cfun))
                    temp = make_ssa_name (temp, NULL);
                gimple_set_lhs (stmt, temp);
                post_stmt = gimple_build_assign (lhs, temp);
            }
        }
        break;
    }

    if (!gimple_seq_empty_p (pre))
        gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
    if (post_stmt)
        gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);

    pop_gimplify_context (NULL);
}
unsigned int
execute_fixup_cfg (void)
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
  gcov_type count_scale;
  edge e;
  edge_iterator ei;

  if (ENTRY_BLOCK_PTR->count)
    count_scale = (cgraph_node (current_function_decl)->count * REG_BR_PROB_BASE
    		   + ENTRY_BLOCK_PTR->count / 2) / ENTRY_BLOCK_PTR->count;
  else
    count_scale = REG_BR_PROB_BASE;

  ENTRY_BLOCK_PTR->count = cgraph_node (current_function_decl)->count;
  EXIT_BLOCK_PTR->count = (EXIT_BLOCK_PTR->count * count_scale
  			   + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;

  FOR_EACH_BB (bb)
    {
      bb->count = (bb->count * count_scale
		   + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree decl = is_gimple_call (stmt)
		      ? gimple_call_fndecl (stmt)
		      : NULL;

	  if (decl
	      && gimple_call_flags (stmt) & (ECF_CONST
					     | ECF_PURE
					     | ECF_LOOPING_CONST_OR_PURE))
	    {
	      if (gimple_in_ssa_p (cfun))
		{
		  todo |= TODO_update_ssa | TODO_cleanup_cfg;
		  mark_symbols_for_renaming (stmt);
		  update_stmt (stmt);
		}
	    }

	  maybe_clean_eh_stmt (stmt);
	}

      if (gimple_purge_dead_eh_edges (bb))
	todo |= TODO_cleanup_cfg;
      FOR_EACH_EDGE (e, ei, bb->succs)
        e->count = (e->count * count_scale
		    + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;
    }
  if (count_scale != REG_BR_PROB_BASE)
    compute_function_frequency ();

  /* Dump a textual representation of the flowgraph.  */
  if (dump_file)
    gimple_dump_cfg (dump_file, dump_flags);

  return todo;
}
Пример #8
0
bool
cgraph_process_new_functions (void)
{
  bool output = false;
  tree fndecl;
  struct cgraph_node *node;

  /*  Note that this queue may grow as its being processed, as the new
      functions may generate new ones.  */
  while (cgraph_new_nodes)
    {
      node = cgraph_new_nodes;
      fndecl = node->decl;
      cgraph_new_nodes = cgraph_new_nodes->next_needed;
      switch (cgraph_state)
	{
	case CGRAPH_STATE_CONSTRUCTION:
	  /* At construction time we just need to finalize function and move
	     it into reachable functions list.  */

	  node->next_needed = NULL;
	  cgraph_finalize_function (fndecl, false);
	  cgraph_mark_reachable_node (node);
	  output = true;
	  break;

	case CGRAPH_STATE_IPA:
	case CGRAPH_STATE_IPA_SSA:
	  /* When IPA optimization already started, do all essential
	     transformations that has been already performed on the whole
	     cgraph but not on this function.  */

	  gimple_register_cfg_hooks ();
	  if (!node->analyzed)
	    cgraph_analyze_function (node);
	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
	  current_function_decl = fndecl;
	  compute_inline_parameters (node);
	  if ((cgraph_state == CGRAPH_STATE_IPA_SSA
	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
	      /* When not optimizing, be sure we run early local passes anyway
		 to expand OMP.  */
	      || !optimize)
	    execute_pass_list (pass_early_local_passes.pass.sub);
	  free_dominance_info (CDI_POST_DOMINATORS);
	  free_dominance_info (CDI_DOMINATORS);
	  pop_cfun ();
	  current_function_decl = NULL;
	  break;

	case CGRAPH_STATE_EXPANSION:
	  /* Functions created during expansion shall be compiled
	     directly.  */
	  node->output = 0;
	  cgraph_expand_function (node);
	  break;

	default:
	  gcc_unreachable ();
	  break;
	}
      cgraph_call_function_insertion_hooks (node);
    }
  return output;
}
Пример #9
0
tree
maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
		       gimple_seq *seq, tree res)
{
  if (rcode.is_tree_code ())
    {
      if (!res
	  && gimple_simplified_result_is_gimple_val (rcode, ops))
	return ops[0];
      if (mprts_hook)
	{
	  tree tem = mprts_hook (rcode, type, ops);
	  if (tem)
	    return tem;
	}
      if (!seq)
	return NULL_TREE;
      /* Play safe and do not allow abnormals to be mentioned in
         newly created statements.  */
      if ((TREE_CODE (ops[0]) == SSA_NAME
	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
	  || (ops[1]
	      && TREE_CODE (ops[1]) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
	  || (ops[2]
	      && TREE_CODE (ops[2]) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]))
	  || (COMPARISON_CLASS_P (ops[0])
	      && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
		   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
								     0)))
		  || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0],
									1))))))
	return NULL_TREE;
      if (!res)
	{
	  if (gimple_in_ssa_p (cfun))
	    res = make_ssa_name (type);
	  else
	    res = create_tmp_reg (type);
	}
      maybe_build_generic_op (rcode, type, ops);
      gimple *new_stmt = gimple_build_assign (res, rcode,
					     ops[0], ops[1], ops[2]);
      gimple_seq_add_stmt_without_update (seq, new_stmt);
      return res;
    }
  else
    {
      if (!seq)
	return NULL_TREE;
      combined_fn fn = rcode;
      /* Play safe and do not allow abnormals to be mentioned in
         newly created statements.  */
      unsigned nargs;
      for (nargs = 0; nargs < 3; ++nargs)
	{
	  if (!ops[nargs])
	    break;
	  if (TREE_CODE (ops[nargs]) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[nargs]))
	    return NULL_TREE;
	}
      gcc_assert (nargs != 0);
      gcall *new_stmt = NULL;
      if (internal_fn_p (fn))
	{
	  /* Generate the given function if we can.  */
	  internal_fn ifn = as_internal_fn (fn);
	  new_stmt = build_call_internal (ifn, type, nargs, ops);
	  if (!new_stmt)
	    return NULL_TREE;
	}
      else
	{
	  /* Find the function we want to call.  */
	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
	  if (!decl)
	    return NULL;

	  /* We can't and should not emit calls to non-const functions.  */
	  if (!(flags_from_decl_or_type (decl) & ECF_CONST))
	    return NULL;

	  new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
	}
      if (!res)
	{
	  if (gimple_in_ssa_p (cfun))
	    res = make_ssa_name (type);
	  else
	    res = create_tmp_reg (type);
	}
      gimple_call_set_lhs (new_stmt, res);
      gimple_seq_add_stmt_without_update (seq, new_stmt);
      return res;
    }
}