Esempio n. 1
0
bool
autofdo_source_profile::get_count_info (gimple *stmt, count_info *info) const
{
  if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
    return false;

  inline_stack stack;
  get_inline_stack (gimple_location (stmt), &stack);
  if (stack.length () == 0)
    return false;
  function_instance *s = get_function_instance_by_inline_stack (stack);
  if (s == NULL)
    return false;
  return s->get_count_info (stack[0].second, info);
}
Esempio n. 2
0
static enum ssa_prop_result
copy_prop_visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
{
  enum ssa_prop_result retval = SSA_PROP_VARYING;
  location_t loc = gimple_location (stmt);

  tree op0 = valueize_val (gimple_cond_lhs (stmt));
  tree op1 = valueize_val (gimple_cond_rhs (stmt));

  /* See if we can determine the predicate's value.  */
  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "Trying to determine truth value of ");
      fprintf (dump_file, "predicate ");
      print_gimple_stmt (dump_file, stmt, 0, 0);
    }

  /* Fold COND and see whether we get a useful result.  */
  tree folded_cond = fold_binary_loc (loc, gimple_cond_code (stmt),
				      boolean_type_node, op0, op1);
  if (folded_cond)
    {
      basic_block bb = gimple_bb (stmt);
      *taken_edge_p = find_taken_edge (bb, folded_cond);
      if (*taken_edge_p)
	retval = SSA_PROP_INTERESTING;
    }

  if (dump_file && (dump_flags & TDF_DETAILS) && *taken_edge_p)
    fprintf (dump_file, "\nConditional will always take edge %d->%d\n",
	     (*taken_edge_p)->src->index, (*taken_edge_p)->dest->index);

  return retval;
}
Esempio n. 3
0
void
ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
{
  gimple stmt = gsi_stmt (*gsi);
  location_t loc = gimple_location (stmt);
  gcc_assert (gimple_call_num_args (stmt) == 3);

  /* Pick up the arguments of the UBSAN_BOUNDS call.  */
  tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0)));
  tree index = gimple_call_arg (stmt, 1);
  tree orig_index_type = TREE_TYPE (index);
  tree bound = gimple_call_arg (stmt, 2);

  gimple_stmt_iterator gsi_orig = *gsi;

  /* Create condition "if (index > bound)".  */
  basic_block then_bb, fallthru_bb;
  gimple_stmt_iterator cond_insert_point
    = create_cond_insert_point (gsi, 0/*before_p*/, false, true,
				&then_bb, &fallthru_bb);
  index = fold_convert (TREE_TYPE (bound), index);
  index = force_gimple_operand_gsi (&cond_insert_point, index,
				    true/*simple_p*/, NULL_TREE,
				    false/*before*/, GSI_NEW_STMT);
  gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
  gimple_set_location (g, loc);
  gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);

  /* Generate __ubsan_handle_out_of_bounds call.  */
  *gsi = gsi_after_labels (then_bb);
  if (flag_sanitize_undefined_trap_on_error)
    g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data
	= ubsan_create_data ("__ubsan_out_of_bounds_data", &loc, NULL,
			     ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY),
			     ubsan_type_descriptor (orig_index_type),
			     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      enum built_in_function bcode
	= flag_sanitize_recover
	  ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS
	  : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT;
      tree fn = builtin_decl_explicit (bcode);
      tree val = force_gimple_operand_gsi (gsi, ubsan_encode_value (index),
					   true, NULL_TREE, true,
					   GSI_SAME_STMT);
      g = gimple_build_call (fn, 2, data, val);
    }
  gimple_set_location (g, loc);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Get rid of the UBSAN_BOUNDS call from the IR.  */
  unlink_stmt_vdef (stmt);
  gsi_remove (&gsi_orig, true);

  /* Point GSI to next logical statement.  */
  *gsi = gsi_start_bb (fallthru_bb);
}
Esempio n. 4
0
bool
gsi_replace (gimple_stmt_iterator *gsi, gimple *stmt, bool update_eh_info)
{
  gimple *orig_stmt = gsi_stmt (*gsi);
  bool require_eh_edge_purge = false;

  if (stmt == orig_stmt)
    return false;

  gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt)
	      || gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt));

  gimple_set_location (stmt, gimple_location (orig_stmt));
  gimple_set_bb (stmt, gsi_bb (*gsi));

  /* Preserve EH region information from the original statement, if
     requested by the caller.  */
  if (update_eh_info)
    require_eh_edge_purge = maybe_clean_or_replace_eh_stmt (orig_stmt, stmt);

  gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);

  /* Free all the data flow information for ORIG_STMT.  */
  gimple_set_bb (orig_stmt, NULL);
  gimple_remove_stmt_histograms (cfun, orig_stmt);
  delink_stmt_imm_use (orig_stmt);

  gsi_set_stmt (gsi, stmt);
  gimple_set_modified (stmt, true);
  update_modified_stmt (stmt);
  return require_eh_edge_purge;
}
Esempio n. 5
0
/* Transform
   1) Memory references.
*/
static void
mf_xform_statements (void)
{
  basic_block bb, next;
  gimple_stmt_iterator i;
  int saved_last_basic_block = last_basic_block;
  enum gimple_rhs_class grhs_class;

  bb = ENTRY_BLOCK_PTR ->next_bb;
  do
    {
      next = bb->next_bb;
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
          gimple s = gsi_stmt (i);

          /* Only a few GIMPLE statements can reference memory.  */
          switch (gimple_code (s))
            {
            case GIMPLE_ASSIGN:
	      mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
		  		 gimple_location (s), integer_one_node);
	      mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
		  		 gimple_location (s), integer_zero_node);
	      grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
	      if (grhs_class == GIMPLE_BINARY_RHS)
		mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
				   gimple_location (s), integer_zero_node);
              break;

            case GIMPLE_RETURN:
              if (gimple_return_retval (s) != NULL_TREE)
                {
                  mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
				     gimple_location (s),
				     integer_zero_node);
                }
              break;

            default:
              ;
            }
        }
      bb = next;
    }
  while (bb && bb->index <= saved_last_basic_block);
}
Esempio n. 6
0
static void
instrument_member_call (gimple_stmt_iterator *iter)
{
  tree this_parm = gimple_call_arg (gsi_stmt (*iter), 0);
  tree kind = build_int_cst (unsigned_char_type_node, UBSAN_MEMBER_CALL);
  gimple g = gimple_build_call_internal (IFN_UBSAN_NULL, 2, this_parm, kind);
  gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
  gsi_insert_before (iter, g, GSI_SAME_STMT);
}
Esempio n. 7
0
static void
instrument_mem_ref (tree t, gimple_stmt_iterator *iter, bool is_lhs)
{
  enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF;
  if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_TYPE (t))))
    ikind = UBSAN_MEMBER_ACCESS;
  tree kind = build_int_cst (unsigned_char_type_node, ikind);
  gimple g = gimple_build_call_internal (IFN_UBSAN_NULL, 2, t, kind);
  gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
  gsi_insert_before (iter, g, GSI_SAME_STMT);
}
Esempio n. 8
0
static bool
remove_exits_and_undefined_stmts (struct loop *loop, unsigned int npeeled)
{
  struct nb_iter_bound *elt;
  bool changed = false;

  for (elt = loop->bounds; elt; elt = elt->next)
    {
      /* If statement is known to be undefined after peeling, turn it
	 into unreachable (or trap when debugging experience is supposed
	 to be good).  */
      if (!elt->is_exit
	  && wi::ltu_p (elt->bound, npeeled))
	{
	  gimple_stmt_iterator gsi = gsi_for_stmt (elt->stmt);
	  gcall *stmt = gimple_build_call
	      (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
	  gimple_set_location (stmt, gimple_location (elt->stmt));
	  gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
	  split_block (gimple_bb (stmt), stmt);
	  changed = true;
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "Forced statement unreachable: ");
	      print_gimple_stmt (dump_file, elt->stmt, 0, 0);
	    }
	}
      /* If we know the exit will be taken after peeling, update.  */
      else if (elt->is_exit
	       && wi::leu_p (elt->bound, npeeled))
	{
	  basic_block bb = gimple_bb (elt->stmt);
	  edge exit_edge = EDGE_SUCC (bb, 0);

	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "Forced exit to be taken: ");
	      print_gimple_stmt (dump_file, elt->stmt, 0, 0);
	    }
	  if (!loop_exit_edge_p (loop, exit_edge))
	    exit_edge = EDGE_SUCC (bb, 1);
	  gcc_checking_assert (loop_exit_edge_p (loop, exit_edge));
	  gcond *cond_stmt = as_a <gcond *> (elt->stmt);
	  if (exit_edge->flags & EDGE_TRUE_VALUE)
	    gimple_cond_make_true (cond_stmt);
	  else
	    gimple_cond_make_false (cond_stmt);
	  update_stmt (cond_stmt);
	  changed = true;
	}
    }
  return changed;
}
Esempio n. 9
0
bool
autofdo_source_profile::update_inlined_ind_target (gcall *stmt,
                                                   count_info *info)
{
  if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
    return false;

  count_info old_info;
  get_count_info (stmt, &old_info);
  gcov_type total = 0;
  for (icall_target_map::const_iterator iter = old_info.targets.begin ();
       iter != old_info.targets.end (); ++iter)
    total += iter->second;

  /* Program behavior changed, original promoted (and inlined) target is not
     hot any more. Will avoid promote the original target.

     To check if original promoted target is still hot, we check the total
     count of the unpromoted targets (stored in old_info). If it is no less
     than half of the callsite count (stored in INFO), the original promoted
     target is considered not hot any more.  */
  if (total >= info->count / 2)
    return false;

  inline_stack stack;
  get_inline_stack (gimple_location (stmt), &stack);
  if (stack.length () == 0)
    return false;
  function_instance *s = get_function_instance_by_inline_stack (stack);
  if (s == NULL)
    return false;
  icall_target_map map;
  if (s->find_icall_target_map (stmt, &map) == 0)
    return false;
  for (icall_target_map::const_iterator iter = map.begin ();
       iter != map.end (); ++iter)
    info->targets[iter->first] = iter->second;
  return true;
}
Esempio n. 10
0
static unsigned
get_relative_location_for_stmt (gimple *stmt)
{
  location_t locus = gimple_location (stmt);
  if (LOCATION_LOCUS (locus) == UNKNOWN_LOCATION)
    return UNKNOWN_LOCATION;

  for (tree block = gimple_block (stmt); block && (TREE_CODE (block) == BLOCK);
       block = BLOCK_SUPERCONTEXT (block))
    if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (block)) != UNKNOWN_LOCATION)
      return get_combined_location (locus,
                                    get_function_decl_from_block (block));
  return get_combined_location (locus, current_function_decl);
}
static tree
rhs_to_tree (tree type, gimple stmt)
{
  location_t loc = gimple_location (stmt);
  enum tree_code code = gimple_assign_rhs_code (stmt);
  if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
    return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
			gimple_assign_rhs2 (stmt));
  else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
    return build1 (code, type, gimple_assign_rhs1 (stmt));
  else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
    return gimple_assign_rhs1 (stmt);
  else
    gcc_unreachable ();
}
Esempio n. 12
0
static void
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
		      gimple_stmt_iterator bsi)
{
  tree addr_base, nb_bytes;
  bool res = false;
  gimple_seq stmt_list = NULL, stmts;
  gimple fn_call;
  tree mem, fn;
  struct data_reference *dr = XCNEW (struct data_reference);
  location_t loc = gimple_location (stmt);

  DR_STMT (dr) = stmt;
  DR_REF (dr) = op0;
  res = dr_analyze_innermost (dr);
  gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0)));

  nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
  addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
  addr_base = fold_convert_loc (loc, sizetype, addr_base);

  /* Test for a negative stride, iterating over every element.  */
  if (integer_zerop (size_binop (PLUS_EXPR,
				 TYPE_SIZE_UNIT (TREE_TYPE (op0)),
				 fold_convert (sizetype, DR_STEP (dr)))))
    {
      addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base,
				  fold_convert_loc (loc, sizetype, nb_bytes));
      addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base,
				  TYPE_SIZE_UNIT (TREE_TYPE (op0)));
    }

  addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
			       TREE_TYPE (DR_BASE_ADDRESS (dr)),
			       DR_BASE_ADDRESS (dr), addr_base);
  mem = force_gimple_operand (addr_base, &stmts, true, NULL);
  gimple_seq_add_seq (&stmt_list, stmts);

  fn = build_fold_addr_expr (implicit_built_in_decls [BUILT_IN_MEMSET]);
  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
  gimple_seq_add_stmt (&stmt_list, fn_call);
  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "generated memset zero\n");

  free_data_ref (dr);
}
Esempio n. 13
0
gcov_type
autofdo_source_profile::get_callsite_total_count (
    struct cgraph_edge *edge) const
{
  inline_stack stack;
  stack.safe_push (std::make_pair (edge->callee->decl, 0));
  get_inline_stack (gimple_location (edge->call_stmt), &stack);

  function_instance *s = get_function_instance_by_inline_stack (stack);
  if (s == NULL
      || afdo_string_table->get_index (IDENTIFIER_POINTER (
             DECL_ASSEMBLER_NAME (edge->callee->decl))) != s->name ())
    return 0;

  return s->total_count ();
}
Esempio n. 14
0
static bool
afdo_set_bb_count (basic_block bb, const stmt_set &promoted)
{
  gimple_stmt_iterator gsi;
  edge e;
  edge_iterator ei;
  gcov_type max_count = 0;
  bool has_annotated = false;

  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    {
      count_info info;
      gimple *stmt = gsi_stmt (gsi);
      if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
        continue;
      if (afdo_source_profile->get_count_info (stmt, &info))
        {
          if (info.count > max_count)
            max_count = info.count;
          has_annotated = true;
          if (info.targets.size () > 0
              && promoted.find (stmt) == promoted.end ())
            afdo_vpt (&gsi, info.targets, false);
        }
    }

  if (!has_annotated)
    return false;

  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    afdo_source_profile->mark_annotated (gimple_location (gsi_stmt (gsi)));
  for (gphi_iterator gpi = gsi_start_phis (bb);
       !gsi_end_p (gpi);
       gsi_next (&gpi))
    {
      gphi *phi = gpi.phi ();
      size_t i;
      for (i = 0; i < gimple_phi_num_args (phi); i++)
        afdo_source_profile->mark_annotated (gimple_phi_arg_location (phi, i));
    }
  FOR_EACH_EDGE (e, ei, bb->succs)
  afdo_source_profile->mark_annotated (e->goto_locus);

  bb->count = profile_count::from_gcov_type (max_count).afdo ();
  return true;
}
Esempio n. 15
0
static enum ssa_prop_result
copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
{
  enum ssa_prop_result retval = SSA_PROP_VARYING;
  location_t loc = gimple_location (stmt);

  tree op0 = gimple_cond_lhs (stmt);
  tree op1 = gimple_cond_rhs (stmt);

  /* The only conditionals that we may be able to compute statically
     are predicates involving two SSA_NAMEs.  */
  if (TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == SSA_NAME)
    {
      op0 = valueize_val (op0);
      op1 = valueize_val (op1);

      /* See if we can determine the predicate's value.  */
      if (dump_file && (dump_flags & TDF_DETAILS))
	{
	  fprintf (dump_file, "Trying to determine truth value of ");
	  fprintf (dump_file, "predicate ");
	  print_gimple_stmt (dump_file, stmt, 0, 0);
	}

      /* We can fold COND and get a useful result only when we have
	 the same SSA_NAME on both sides of a comparison operator.  */
      if (op0 == op1)
	{
	  tree folded_cond = fold_binary_loc (loc, gimple_cond_code (stmt),
                                          boolean_type_node, op0, op1);
	  if (folded_cond)
	    {
	      basic_block bb = gimple_bb (stmt);
	      *taken_edge_p = find_taken_edge (bb, folded_cond);
	      if (*taken_edge_p)
		retval = SSA_PROP_INTERESTING;
	    }
	}
    }

  if (dump_file && (dump_flags & TDF_DETAILS) && *taken_edge_p)
    fprintf (dump_file, "\nConditional will always take edge %d->%d\n",
	     (*taken_edge_p)->src->index, (*taken_edge_p)->dest->index);

  return retval;
}
Esempio n. 16
0
static void
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  gimple stmt = gsi_stmt (*gsi);
  gimple t;
  int i;
  return_statements_t tmp_rs;

  /* Match this up with an existing return statement that's been created.  */
  for (i = data->return_statements.length () - 1;
       i >= 0; i--)
    {
      tmp_rs = data->return_statements[i];

      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
	{
	  /* Remove the line number from the representative return statement.
	     It now fills in for many such returns.  Failure to remove this
	     will result in incorrect results for coverage analysis.  */
	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);

	  goto found;
	}
    }

  /* Not found.  Create a new label and record the return statement.  */
  tmp_rs.label = create_artificial_label (cfun->function_end_locus);
  tmp_rs.stmt = stmt;
  data->return_statements.safe_push (tmp_rs);

  /* Generate a goto statement and remove the return statement.  */
 found:
  /* When not optimizing, make sure user returns are preserved.  */
  if (!optimize && gimple_has_location (stmt))
    DECL_ARTIFICIAL (tmp_rs.label) = 0;
  t = gimple_build_goto (tmp_rs.label);
  gimple_set_location (t, gimple_location (stmt));
  gimple_set_block (t, gimple_block (stmt));
  gsi_insert_before (gsi, t, GSI_SAME_STMT);
  gsi_remove (gsi, false);
}
Esempio n. 17
0
static void
compare_and_warn (gimple stmt, tree lhs, tree rhs)
{
  if (operand_equal_p (lhs, rhs, OEP_PURE_SAME))
    {
      location_t location;
      location = (gimple_has_location (stmt)
                  ? gimple_location (stmt)
                  : (DECL_P (lhs)
                     ? DECL_SOURCE_LOCATION (lhs)
                     : input_location));
      /* If LHS contains any tree node not currently supported by
         get_non_ssa_expr, simply emit a generic warning without
         specifying LHS in the message.  */
      lhs = get_non_ssa_expr (lhs);
      if (lhs)
        warning_at (location, 0, G_("%qE is assigned to itself"), lhs);
      else
        warning_at (location, 0, G_("self-assignment detected"));
    }
}
Esempio n. 18
0
static unsigned int sancov_execute(void)
{
	basic_block bb;

	/* Remove this line when this plugin and kcov will be in the kernel.
	if (!strcmp(DECL_NAME_POINTER(current_function_decl), DECL_NAME_POINTER(sancov_fndecl)))
		return 0;
	*/

	FOR_EACH_BB_FN(bb, cfun) {
		const_gimple stmt;
		gcall *gcall;
		gimple_stmt_iterator gsi = gsi_after_labels(bb);

		if (gsi_end_p(gsi))
			continue;

		stmt = gsi_stmt(gsi);
		gcall = as_a_gcall(gimple_build_call(sancov_fndecl, 0));
		gimple_set_location(gcall, gimple_location(stmt));
		gsi_insert_before(&gsi, gcall, GSI_SAME_STMT);
	}
Esempio n. 19
0
/* Try to compare bounds value and address value
   used in the check CI.  If we can prove that check
   always pass then remove it.  */
static void
chkp_remove_check_if_pass (struct check_info *ci)
{
    int result = 0;

    if (dump_file && (dump_flags & TDF_DETAILS))
    {
        fprintf (dump_file, "Trying to remove check: ");
        print_gimple_stmt (dump_file, ci->stmt, 0, 0);
    }

    result = chkp_get_check_result (ci, ci->bounds);

    if (result == 1)
    {
        gimple_stmt_iterator i = gsi_for_stmt (ci->stmt);

        if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file, "  action: delete check (always pass)\n");

        gsi_remove (&i, true);
        unlink_stmt_vdef (ci->stmt);
        release_defs (ci->stmt);
        ci->stmt = NULL;
    }
    else if (result == -1)
    {
        if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file, "  action: keep check (always fail)\n");
        warning_at (gimple_location (ci->stmt), OPT_Wchkp,
                    "memory access check always fail");
    }
    else if (result == 0)
    {
        if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file, "  action: keep check (cannot compute result)\n");
    }
}
Esempio n. 20
0
static void
set_location_for_edge (edge e)
{
  if (e->goto_locus)
    {
      set_curr_insn_source_location (e->goto_locus);
      set_curr_insn_block (e->goto_block);
    }
  else
    {
      basic_block bb = e->src;
      gimple_stmt_iterator gsi;

      do
	{
	  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
	    {
	      gimple stmt = gsi_stmt (gsi);
	      if (is_gimple_debug (stmt))
		continue;
	      if (gimple_has_location (stmt) || gimple_block (stmt))
		{
		  set_curr_insn_source_location (gimple_location (stmt));
		  set_curr_insn_block (gimple_block (stmt));
		  return;
		}
	    }
	  /* Nothing found in this basic block.  Make a half-assed attempt
	     to continue with another block.  */
	  if (single_pred_p (bb))
	    bb = single_pred (bb);
	  else
	    bb = e->src;
	}
      while (bb != e->src);
    }
}
Esempio n. 21
0
bool
autofdo_source_profile::update_inlined_ind_target (gcall *stmt,
                                                   count_info *info)
{
  if (dump_file)
    {
      fprintf (dump_file, "Checking indirect call -> direct call ");
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
    }

  if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
    {
      if (dump_file)
	fprintf (dump_file, " good locus\n");
      return false;
    }

  count_info old_info;
  get_count_info (stmt, &old_info);
  gcov_type total = 0;
  for (icall_target_map::const_iterator iter = old_info.targets.begin ();
       iter != old_info.targets.end (); ++iter)
    total += iter->second;

  /* Program behavior changed, original promoted (and inlined) target is not
     hot any more. Will avoid promote the original target.

     To check if original promoted target is still hot, we check the total
     count of the unpromoted targets (stored in TOTAL). If a callsite count
     (stored in INFO) is smaller than half of the total count, the original
     promoted target is considered not hot any more.  */
  if (info->count < total / 2)
    {
      if (dump_file)
	fprintf (dump_file, " not hot anymore %ld < %ld",
		 (long)info->count,
		 (long)total /2);
      return false;
    }

  inline_stack stack;
  get_inline_stack (gimple_location (stmt), &stack);
  if (stack.length () == 0)
    {
      if (dump_file)
	fprintf (dump_file, " no inline stack\n");
      return false;
    }
  function_instance *s = get_function_instance_by_inline_stack (stack);
  if (s == NULL)
    {
      if (dump_file)
	fprintf (dump_file, " function not found in inline stack\n");
      return false;
    }
  icall_target_map map;
  if (s->find_icall_target_map (stmt, &map) == 0)
    {
      if (dump_file)
	fprintf (dump_file, " no target map\n");
      return false;
    }
  for (icall_target_map::const_iterator iter = map.begin ();
       iter != map.end (); ++iter)
    info->targets[iter->first] = iter->second;
  if (dump_file)
    fprintf (dump_file, " looks good\n");
  return true;
}
Esempio n. 22
0
void
ubsan_expand_si_overflow_mul_check (gimple stmt)
{
  rtx res, op0, op1;
  tree lhs, fn, arg0, arg1;
  rtx_code_label *done_label, *do_error;
  rtx target = NULL_RTX;

  lhs = gimple_call_lhs (stmt);
  arg0 = gimple_call_arg (stmt, 0);
  arg1 = gimple_call_arg (stmt, 1);
  done_label = gen_label_rtx ();
  do_error = gen_label_rtx ();

  do_pending_stack_adjust ();
  op0 = expand_normal (arg0);
  op1 = expand_normal (arg1);

  machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
  if (lhs)
    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);

  enum insn_code icode = optab_handler (mulv4_optab, mode);
  if (icode != CODE_FOR_nothing)
    {
      struct expand_operand ops[4];
      rtx_insn *last = get_last_insn ();

      res = gen_reg_rtx (mode);
      create_output_operand (&ops[0], res, mode);
      create_input_operand (&ops[1], op0, mode);
      create_input_operand (&ops[2], op1, mode);
      create_fixed_operand (&ops[3], do_error);
      if (maybe_expand_insn (icode, 4, ops))
	{
	  last = get_last_insn ();
	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
	      && JUMP_P (last)
	      && any_condjump_p (last)
	      && !find_reg_note (last, REG_BR_PROB, 0))
	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
	  emit_jump (done_label);
        }
      else
	{
	  delete_insns_since (last);
	  icode = CODE_FOR_nothing;
	}
    }

  if (icode == CODE_FOR_nothing)
    {
      struct separate_ops ops;
      machine_mode hmode
	= mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
      ops.op0 = arg0;
      ops.op1 = arg1;
      ops.op2 = NULL_TREE;
      ops.location = gimple_location (stmt);
      if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
	  && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
	{
	  machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
	  ops.code = WIDEN_MULT_EXPR;
	  ops.type
	    = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);

	  res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
	  rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
				     GET_MODE_PRECISION (mode), NULL_RTX, 0);
	  hipart = gen_lowpart (mode, hipart);
	  res = gen_lowpart (mode, res);
	  rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
				      GET_MODE_PRECISION (mode) - 1,
				      NULL_RTX, 0);
	  /* RES is low half of the double width result, HIPART
	     the high half.  There was overflow if
	     HIPART is different from RES < 0 ? -1 : 0.  */
	  emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
				   false, done_label, PROB_VERY_LIKELY);
	}
      else if (hmode != BLKmode
	       && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
	{
	  rtx_code_label *large_op0 = gen_label_rtx ();
	  rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
	  rtx_code_label *one_small_one_large = gen_label_rtx ();
	  rtx_code_label *both_ops_large = gen_label_rtx ();
	  rtx_code_label *after_hipart_neg = gen_label_rtx ();
	  rtx_code_label *after_lopart_neg = gen_label_rtx ();
	  rtx_code_label *do_overflow = gen_label_rtx ();
	  rtx_code_label *hipart_different = gen_label_rtx ();

	  unsigned int hprec = GET_MODE_PRECISION (hmode);
	  rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
				      NULL_RTX, 0);
	  hipart0 = gen_lowpart (hmode, hipart0);
	  rtx lopart0 = gen_lowpart (hmode, op0);
	  rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
				       NULL_RTX, 0);
	  rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
				      NULL_RTX, 0);
	  hipart1 = gen_lowpart (hmode, hipart1);
	  rtx lopart1 = gen_lowpart (hmode, op1);
	  rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
				       NULL_RTX, 0);

	  res = gen_reg_rtx (mode);

	  /* True if op0 resp. op1 are known to be in the range of
	     halfstype.  */
	  bool op0_small_p = false;
	  bool op1_small_p = false;
	  /* True if op0 resp. op1 are known to have all zeros or all ones
	     in the upper half of bits, but are not known to be
	     op{0,1}_small_p.  */
	  bool op0_medium_p = false;
	  bool op1_medium_p = false;
	  /* -1 if op{0,1} is known to be negative, 0 if it is known to be
	     nonnegative, 1 if unknown.  */
	  int op0_sign = 1;
	  int op1_sign = 1;

	  if (TREE_CODE (arg0) == SSA_NAME)
	    {
	      wide_int arg0_min, arg0_max;
	      if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
		{
		  unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
		  unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
		  if (mprec0 <= hprec && mprec1 <= hprec)
		    op0_small_p = true;
		  else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
		    op0_medium_p = true;
		  if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
		    op0_sign = 0;
		  else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
		    op0_sign = -1;
		}
	    }
	  if (TREE_CODE (arg1) == SSA_NAME)
	    {
	      wide_int arg1_min, arg1_max;
	      if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
		{
		  unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
		  unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
		  if (mprec0 <= hprec && mprec1 <= hprec)
		    op1_small_p = true;
		  else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
		    op1_medium_p = true;
		  if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
		    op1_sign = 0;
		  else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
		    op1_sign = -1;
		}
	    }

	  int smaller_sign = 1;
	  int larger_sign = 1;
	  if (op0_small_p)
	    {
	      smaller_sign = op0_sign;
	      larger_sign = op1_sign;
	    }
	  else if (op1_small_p)
	    {
	      smaller_sign = op1_sign;
	      larger_sign = op0_sign;
	    }
	  else if (op0_sign == op1_sign)
	    {
	      smaller_sign = op0_sign;
	      larger_sign = op0_sign;
	    }

	  if (!op0_small_p)
	    emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
				     false, large_op0, PROB_UNLIKELY);

	  if (!op1_small_p)
	    emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
				     false, small_op0_large_op1,
				     PROB_UNLIKELY);

	  /* If both op0 and op1 are sign extended from hmode to mode,
	     the multiplication will never overflow.  We can do just one
	     hmode x hmode => mode widening multiplication.  */
	  if (GET_CODE (lopart0) == SUBREG)
	    {
	      SUBREG_PROMOTED_VAR_P (lopart0) = 1;
	      SUBREG_PROMOTED_SET (lopart0, 0);
	    }
	  if (GET_CODE (lopart1) == SUBREG)
	    {
	      SUBREG_PROMOTED_VAR_P (lopart1) = 1;
	      SUBREG_PROMOTED_SET (lopart1, 0);
	    }
	  tree halfstype = build_nonstandard_integer_type (hprec, 0);
	  ops.op0 = make_tree (halfstype, lopart0);
	  ops.op1 = make_tree (halfstype, lopart1);
	  ops.code = WIDEN_MULT_EXPR;
	  ops.type = TREE_TYPE (arg0);
	  rtx thisres
	    = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
	  emit_move_insn (res, thisres);
	  emit_jump (done_label);

	  emit_label (small_op0_large_op1);

	  /* If op0 is sign extended from hmode to mode, but op1 is not,
	     just swap the arguments and handle it as op1 sign extended,
	     op0 not.  */
	  rtx larger = gen_reg_rtx (mode);
	  rtx hipart = gen_reg_rtx (hmode);
	  rtx lopart = gen_reg_rtx (hmode);
	  emit_move_insn (larger, op1);
	  emit_move_insn (hipart, hipart1);
	  emit_move_insn (lopart, lopart0);
	  emit_jump (one_small_one_large);

	  emit_label (large_op0);

	  if (!op1_small_p)
	    emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
				     false, both_ops_large, PROB_UNLIKELY);

	  /* If op1 is sign extended from hmode to mode, but op0 is not,
	     prepare larger, hipart and lopart pseudos and handle it together
	     with small_op0_large_op1.  */
	  emit_move_insn (larger, op0);
	  emit_move_insn (hipart, hipart0);
	  emit_move_insn (lopart, lopart1);

	  emit_label (one_small_one_large);

	  /* lopart is the low part of the operand that is sign extended
	     to mode, larger is the the other operand, hipart is the
	     high part of larger and lopart0 and lopart1 are the low parts
	     of both operands.
	     We perform lopart0 * lopart1 and lopart * hipart widening
	     multiplications.  */
	  tree halfutype = build_nonstandard_integer_type (hprec, 1);
	  ops.op0 = make_tree (halfutype, lopart0);
	  ops.op1 = make_tree (halfutype, lopart1);
	  rtx lo0xlo1
	    = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);

	  ops.op0 = make_tree (halfutype, lopart);
	  ops.op1 = make_tree (halfutype, hipart);
	  rtx loxhi = gen_reg_rtx (mode);
	  rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
	  emit_move_insn (loxhi, tem);

	  /* if (hipart < 0) loxhi -= lopart << (bitsize / 2);  */
	  if (larger_sign == 0)
	    emit_jump (after_hipart_neg);
	  else if (larger_sign != -1)
	    emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
				     false, after_hipart_neg, PROB_EVEN);

	  tem = convert_modes (mode, hmode, lopart, 1);
	  tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
	  tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
				     1, OPTAB_DIRECT);
	  emit_move_insn (loxhi, tem);

	  emit_label (after_hipart_neg);

	  /* if (lopart < 0) loxhi -= larger;  */
	  if (smaller_sign == 0)
	    emit_jump (after_lopart_neg);
	  else if (smaller_sign != -1)
	    emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
				     false, after_lopart_neg, PROB_EVEN);

	  tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
				     1, OPTAB_DIRECT);
	  emit_move_insn (loxhi, tem);

	  emit_label (after_lopart_neg);

	  /* loxhi += (uns) lo0xlo1 >> (bitsize / 2);  */
	  tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
	  tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
				     1, OPTAB_DIRECT);
	  emit_move_insn (loxhi, tem);

	  /* if (loxhi >> (bitsize / 2)
		 == (hmode) loxhi >> (bitsize / 2 - 1))  */
	  rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
					  NULL_RTX, 0);
	  hipartloxhi = gen_lowpart (hmode, hipartloxhi);
	  rtx lopartloxhi = gen_lowpart (hmode, loxhi);
	  rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
					   hprec - 1, NULL_RTX, 0);

	  emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
				   hmode, false, do_overflow,
				   PROB_VERY_UNLIKELY);

	  /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1;  */
	  rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
					   NULL_RTX, 1);
	  tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);

	  tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
				     1, OPTAB_DIRECT);
	  if (tem != res)
	    emit_move_insn (res, tem);
	  emit_jump (done_label);

	  emit_label (both_ops_large);

	  /* If both operands are large (not sign extended from hmode),
	     then perform the full multiplication which will be the result
	     of the operation.  The only cases which don't overflow are
	     some cases where both hipart0 and highpart1 are 0 or -1.  */
	  ops.code = MULT_EXPR;
	  ops.op0 = make_tree (TREE_TYPE (arg0), op0);
	  ops.op1 = make_tree (TREE_TYPE (arg0), op1);
	  tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
	  emit_move_insn (res, tem);

	  if (!op0_medium_p)
	    {
	      tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
					 NULL_RTX, 1, OPTAB_DIRECT);
	      emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
				       true, do_error, PROB_VERY_UNLIKELY);
	    }

	  if (!op1_medium_p)
	    {
	      tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
					 NULL_RTX, 1, OPTAB_DIRECT);
	      emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
				       true, do_error, PROB_VERY_UNLIKELY);
	    }

	  /* At this point hipart{0,1} are both in [-1, 0].  If they are the
	     same, overflow happened if res is negative, if they are different,
	     overflow happened if res is positive.  */
	  if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
	    emit_jump (hipart_different);
	  else if (op0_sign == 1 || op1_sign == 1)
	    emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
				     true, hipart_different, PROB_EVEN);

	  emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
				   do_error, PROB_VERY_UNLIKELY);
	  emit_jump (done_label);

	  emit_label (hipart_different);

	  emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
				   do_error, PROB_VERY_UNLIKELY);
	  emit_jump (done_label);

	  emit_label (do_overflow);

	  /* Overflow, do full multiplication and fallthru into do_error.  */
	  ops.op0 = make_tree (TREE_TYPE (arg0), op0);
	  ops.op1 = make_tree (TREE_TYPE (arg0), op1);
	  tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
	  emit_move_insn (res, tem);
	}
      else
	{
	  ops.code = MULT_EXPR;
	  ops.type = TREE_TYPE (arg0);
	  res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
	  emit_jump (done_label);
	}
    }

  emit_label (do_error);
  /* Expand the ubsan builtin call.  */
  push_temp_slots ();
  fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
				     TREE_TYPE (arg0), arg0, arg1);
  expand_normal (fn);
  pop_temp_slots ();
  do_pending_stack_adjust ();

  /* We're done.  */
  emit_label (done_label);

  if (lhs)
    emit_move_insn (target, res);
}
static int
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
  gimple stmt = gsi_stmt (*gsi_p);
  location_t loc = gimple_location (stmt);
  int did_something = 0;

  do {
    tree tmp = NULL_TREE;
    tree cond = gimple_assign_rhs1 (stmt);
    tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
    gimple def_stmt;
    bool single_use0_p = false, single_use1_p = false;

    /* We can do tree combining on SSA_NAME and comparison expressions.  */
    if (COMPARISON_CLASS_P (cond)
	&& TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
      {
	/* For comparisons use the first operand, that is likely to
	   simplify comparisons against constants.  */
	name = TREE_OPERAND (cond, 0);
	def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
	if (def_stmt && can_propagate_from (def_stmt))
	  {
	    tree op1 = TREE_OPERAND (cond, 1);
	    rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
	    tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
					  boolean_type_node,
					  rhs0, op1, !single_use0_p);
	  }
	/* If that wasn't successful, try the second operand.  */
	if (tmp == NULL_TREE
	    && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
	  {
	    tree op0 = TREE_OPERAND (cond, 0);
	    name = TREE_OPERAND (cond, 1);
	    def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
	    if (!def_stmt || !can_propagate_from (def_stmt))
	      return did_something;

	    rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
	    tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
					  boolean_type_node,
					  op0, rhs1, !single_use1_p);
	  }
	/* If that wasn't successful either, try both operands.  */
	if (tmp == NULL_TREE
	    && rhs0 != NULL_TREE
	    && rhs1 != NULL_TREE)
	  tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
					boolean_type_node,
					rhs0,
					fold_convert_loc (loc,
							  TREE_TYPE (rhs0),
							  rhs1),
					!(single_use0_p && single_use1_p));
      }
    else if (TREE_CODE (cond) == SSA_NAME)
      {
	name = cond;
	def_stmt = get_prop_source_stmt (name, true, NULL);
	if (def_stmt || !can_propagate_from (def_stmt))
	  return did_something;

	rhs0 = gimple_assign_rhs1 (def_stmt);
	tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
				      build_int_cst (TREE_TYPE (rhs0), 0),
				      false);
      }

    if (tmp)
      {
	if (dump_file && tmp)
	  {
	    fprintf (dump_file, "  Replaced '");
	    print_generic_expr (dump_file, cond, 0);
	    fprintf (dump_file, "' with '");
	    print_generic_expr (dump_file, tmp, 0);
	    fprintf (dump_file, "'\n");
	  }

	gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
	stmt = gsi_stmt (*gsi_p);
	update_stmt (stmt);

	/* Remove defining statements.  */
	remove_prop_source_from_use (name, NULL);

	if (is_gimple_min_invariant (tmp))
	  did_something = 2;
	else if (did_something == 0)
	  did_something = 1;

	/* Continue combining.  */
	continue;
      }

    break;
  } while (1);

  return did_something;
}
Esempio n. 24
0
void
ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
{
  rtx res, op0, op1;
  tree lhs, fn, arg0, arg1;
  rtx_code_label *done_label, *do_error;
  rtx target = NULL_RTX;

  lhs = gimple_call_lhs (stmt);
  arg0 = gimple_call_arg (stmt, 0);
  arg1 = gimple_call_arg (stmt, 1);
  done_label = gen_label_rtx ();
  do_error = gen_label_rtx ();
  do_pending_stack_adjust ();
  op0 = expand_normal (arg0);
  op1 = expand_normal (arg1);

  machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
  if (lhs)
    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);

  enum insn_code icode
    = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
  if (icode != CODE_FOR_nothing)
    {
      struct expand_operand ops[4];
      rtx_insn *last = get_last_insn ();

      res = gen_reg_rtx (mode);
      create_output_operand (&ops[0], res, mode);
      create_input_operand (&ops[1], op0, mode);
      create_input_operand (&ops[2], op1, mode);
      create_fixed_operand (&ops[3], do_error);
      if (maybe_expand_insn (icode, 4, ops))
	{
	  last = get_last_insn ();
	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
	      && JUMP_P (last)
	      && any_condjump_p (last)
	      && !find_reg_note (last, REG_BR_PROB, 0))
	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
	  emit_jump (done_label);
        }
      else
	{
	  delete_insns_since (last);
	  icode = CODE_FOR_nothing;
	}
    }

  if (icode == CODE_FOR_nothing)
    {
      rtx_code_label *sub_check = gen_label_rtx ();
      int pos_neg = 3;

      /* Compute the operation.  On RTL level, the addition is always
	 unsigned.  */
      res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
			  op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);

      /* If we can prove one of the arguments (for MINUS_EXPR only
	 the second operand, as subtraction is not commutative) is always
	 non-negative or always negative, we can do just one comparison
	 and conditional jump instead of 2 at runtime, 3 present in the
	 emitted code.  If one of the arguments is CONST_INT, all we
	 need is to make sure it is op1, then the first
	 emit_cmp_and_jump_insns will be just folded.  Otherwise try
	 to use range info if available.  */
      if (code == PLUS_EXPR && CONST_INT_P (op0))
	{
	  rtx tem = op0;
	  op0 = op1;
	  op1 = tem;
	}
      else if (CONST_INT_P (op1))
	;
      else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
	{
	  wide_int arg0_min, arg0_max;
	  if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
	    {
	      if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
		pos_neg = 1;
	      else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
		pos_neg = 2;
	    }
	  if (pos_neg != 3)
	    {
	      rtx tem = op0;
	      op0 = op1;
	      op1 = tem;
	    }
	}
      if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
	{
	  wide_int arg1_min, arg1_max;
	  if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
	    {
	      if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
		pos_neg = 1;
	      else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
		pos_neg = 2;
	    }
	}

      /* If the op1 is negative, we have to use a different check.  */
      if (pos_neg == 3)
	emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
				 false, sub_check, PROB_EVEN);

      /* Compare the result of the operation with one of the operands.  */
      if (pos_neg & 1)
	emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
				 NULL_RTX, mode, false, done_label,
				 PROB_VERY_LIKELY);

      /* If we get here, we have to print the error.  */
      if (pos_neg == 3)
	{
	  emit_jump (do_error);

	  emit_label (sub_check);
	}

      /* We have k = a + b for b < 0 here.  k <= a must hold.  */
      if (pos_neg & 2)
	emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
				 NULL_RTX, mode, false, done_label,
				 PROB_VERY_LIKELY);
    }

  emit_label (do_error);
  /* Expand the ubsan builtin call.  */
  push_temp_slots ();
  fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
				     TREE_TYPE (arg0), arg0, arg1);
  expand_normal (fn);
  pop_temp_slots ();
  do_pending_stack_adjust ();

  /* We're done.  */
  emit_label (done_label);

  if (lhs)
    emit_move_insn (target, res);
}
Esempio n. 25
0
static void
my_dump_gimple(gimple gnode, gimple_stmt_iterator *ptrgsi)
{
    int gcode;
    tree tnode;
    tree funcdecl;
    tree desc_node;
    tree ptr_desc_node;
    tree t;
    tree tmp_var;
    tree const_char_restrict_ptr_type_node;
    gimple tmp_gstmt;
    gimple new_gnode;
    const char *hellocstr = "Hello, GCC!\n";
    int i;
    struct c_binding *b;
    expanded_location xloc;

    /*
     * Extract the Gimple Code from a gimple node
     */

    gcode = gimple_code(gnode);

    /*
     * Get the line number of cooresponding 
     * source code from a gimple node
     */
    if(gimple_has_location(gnode))
    {
        xloc = expand_location(gimple_location(gnode));
        printf("line %d:", xloc.line);
    }
    printf("\t\t\t\t%s\n", gimple_code_name[gcode]);
    
    switch(gcode)
    {
        case GIMPLE_ASSIGN:
            /*
             * Add a printf("Hello, GCC!\n"); statement 
             * after the first appearing assignment
             * if yes equals to 1, then we have already
             * added the statement, and no need to add 
             * again
             */
            if(!yes)
            {
                /*
                 * Since printf is a builtin function, we need
                 * to get the function declaration using 
                 * built_in_decls[]. The index number can be
                 * found in gcc source gcc/builtins.def
                 */
                funcdecl = built_in_decls[BUILT_IN_PRINTF];

                if(funcdecl == NULL_TREE)
                {
                    printf("cannot find printf\n");
                }
                else
                {
                    /*
                     * In gimple, every statement is simplified into 
                     * three oprands mode. And our printf() statement 
                     * is change into following two gimple statements:
                     * 
                     * <D.XXX> = (const char * restrict) &"Hello, GCC!\n"[0]
                     * printf(<D.XXX>);
                     *
                     * Note that <D.XXX> is a temporary variable, we can 
                     * actually use any name we like as long as no 
                     * confliction.
                     */

                    /*
                     * Generate a STRING_CST, the value is "Hello, GCC!\n"
                     */
                    desc_node = build_string(strlen(hellocstr), hellocstr);

                    /*
                     * Two points need to notice here:
                     * 1. STRING_CST build by build_string() do
                     *    not have TREE_TYPE set, so we need to 
                     *    set it manually.
                     * 2. build_string() will add a trailing '\0'
                     *    when building the STRING_CST, so we do
                     *    not need to care with it.
                     */
                    TREE_TYPE(desc_node) = build_array_type(
                            char_type_node, 
                            build_index_type(
                                build_int_cst(NULL_TREE, 
                                    strlen(hellocstr))));

                    /*
                     * Define a const char * restrict type node
                     * here for convertion.
                     * I'm not sure why we need to add a restrict
                     * attribute, but GCC really does it when it 
                     * converting a STRING_CST from AST to Gimple.
                     */
                    const_char_restrict_ptr_type_node = 
                        build_qualified_type(
                            build_pointer_type(
                                    build_qualified_type(
                                        char_type_node,
                                        TYPE_QUAL_CONST)),
                            TYPE_QUAL_RESTRICT);

                    /*
                     * When we in AST, if we want to use STRING_CST
                     * the form is like this <ADDR_EXPR<STRING_CST>>,
                     * but when we turn to gimple, it is like this
                     * <ADDR_EXPR<ADDAR_REF<STRING_CST>>>.
                     * So we need to do a convertion there.
                     */

                    /*
                     * First wrap STRING_CST with ARRAY_REF
                     */
                    t = build4(ARRAY_REF, char_type_node, 
                            desc_node, build_int_cst(NULL_TREE, 0), 
                            NULL, NULL);

                    /*
                     * Second wrap ARRAY_REF with ADDR_EXPR
                     */
                    ptr_desc_node = build1(ADDR_EXPR, 
                            const_char_restrict_ptr_type_node,
                            t);

                    /*
                     * I'm not sure why we need to use fold_convert()
                     * here, but if we do not, we cannot make the 
                     * compiling successful.
                     */
                    ptr_desc_node = fold_convert(
                            const_char_restrict_ptr_type_node, 
                            ptr_desc_node);

                    /*
                     * If is_gimple_min_invariant(ptr_desc_node)
                     * is true, we build a corrent argument, otherwise
                     * the argument is not suitable for gimple call
                     */
                    if(!is_gimple_min_invariant(ptr_desc_node))
                    {
                        printf("Something wrong with is_gimple_min_invariant\n");
                        return ;
                    }

                    /*
                     * This applies for a temporary variable
                     */
                    tmp_var = make_rename_temp(
                            const_char_restrict_ptr_type_node,
                            "plugin_var");

                    /*
                     * Build a gimple statement. Still remember that?
                     * <D.XXX> = (const char * restrict) "Hello, GCC!\n"
                     */
                    tmp_gstmt = gimple_build_assign(tmp_var, 
                                                    ptr_desc_node);

                    /*
                     * Check if the gimple statment is corrent
                     */
                    if(!is_gimple_assign(tmp_gstmt))
                    {
                        printf("tmp_gstmt is invalid\n");
                    }

                    printf("Insert gimple statment:");
                    print_gimple_stmt(stdout, tmp_gstmt, 0,
                                TDF_DETAILS | TDF_VERBOSE | TDF_TREE);
                    /*
                     * Insert the gimple statment into the basic block
                     */
                    gsi_insert_after(ptrgsi, tmp_gstmt, 
                            GSI_NEW_STMT);

                    if(is_gimple_operand(tmp_var))
                    {

                        printf("begin to insert printf\n");
                        yes = 1;
                        printf("Insert gimple statment:");
                        /*
                         * Insert the gimple statment printf 
                         * into the basic block
                         */
                        new_gnode = gimple_build_call(
                                funcdecl, 1, tmp_var);
                        print_gimple_stmt(stdout, new_gnode, 0, 0);
                        gsi_insert_after(ptrgsi, new_gnode, 
                                GSI_NEW_STMT); 
                    }
                    else
                    {
                        print_generic_stmt(stdout, ptr_desc_node, 
                                TDF_DETAILS | TDF_VERBOSE | TDF_TREE);
                        printf("Not Gimple Operands\n");
                    }

                    /*
                     * Since we have more than one consecutive statements 
                     * to insert, we can actually use build a gimple 
                     * sequence, insert all statement into the sequence, 
                     * and then insert the sequence into the basic block. 
                     * This seems to be a better method.
                     */
                }
            }
            else
            {
            }
            break;
        default:
            break;
    }
}
Esempio n. 26
0
tree
walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
		  walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
  gimple *ret;
  tree tree_ret;
  gimple *stmt = gsi_stmt (*gsi);

  if (wi)
    {
      wi->gsi = *gsi;
      wi->removed_stmt = false;

      if (wi->want_locations && gimple_has_location (stmt))
	input_location = gimple_location (stmt);
    }

  ret = NULL;

  /* Invoke the statement callback.  Return if the callback handled
     all of STMT operands by itself.  */
  if (callback_stmt)
    {
      bool handled_ops = false;
      tree_ret = callback_stmt (gsi, &handled_ops, wi);
      if (handled_ops)
	return tree_ret;

      /* If CALLBACK_STMT did not handle operands, it should not have
	 a value to return.  */
      gcc_assert (tree_ret == NULL);

      if (wi && wi->removed_stmt)
	return NULL;

      /* Re-read stmt in case the callback changed it.  */
      stmt = gsi_stmt (*gsi);
    }

  /* If CALLBACK_OP is defined, invoke it on every operand of STMT.  */
  if (callback_op)
    {
      tree_ret = walk_gimple_op (stmt, callback_op, wi);
      if (tree_ret)
	return tree_ret;
    }

  /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
  switch (gimple_code (stmt))
    {
    case GIMPLE_BIND:
      ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
				 callback_stmt, callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_CATCH:
      ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
				   as_a <gcatch *> (stmt)),
				 callback_stmt, callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_EH_FILTER:
      ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
		             callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_EH_ELSE:
      {
	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
	ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
				   callback_stmt, callback_op, wi);
	if (ret)
	  return wi->callback_result;
	ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
				   callback_stmt, callback_op, wi);
	if (ret)
	  return wi->callback_result;
      }
      break;

    case GIMPLE_TRY:
      ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
	                     wi);
      if (ret)
	return wi->callback_result;

      ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
	                     callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_OMP_FOR:
      ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
		             callback_op, wi);
      if (ret)
	return wi->callback_result;

      /* FALL THROUGH.  */
    case GIMPLE_OMP_CRITICAL:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_TASKGROUP:
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_TARGET:
    case GIMPLE_OMP_TEAMS:
      ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
			     callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_WITH_CLEANUP_EXPR:
      ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
			     callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    case GIMPLE_TRANSACTION:
      ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
				   as_a <gtransaction *> (stmt)),
			     callback_stmt, callback_op, wi);
      if (ret)
	return wi->callback_result;
      break;

    default:
      gcc_assert (!gimple_has_substatements (stmt));
      break;
    }

  return NULL;
}
Esempio n. 27
0
static void
output_gimple_stmt (struct output_block *ob, gimple stmt)
{
  unsigned i;
  enum gimple_code code;
  enum LTO_tags tag;
  struct bitpack_d bp;
  histogram_value hist;

  /* Emit identifying tag.  */
  code = gimple_code (stmt);
  tag = lto_gimple_code_to_tag (code);
  streamer_write_record_start (ob, tag);

  /* Emit the tuple header.  */
  bp = bitpack_create (ob->main_stream);
  bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt));
  bp_pack_value (&bp, gimple_no_warning_p (stmt), 1);
  if (is_gimple_assign (stmt))
    bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1);
  bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1);
  hist = gimple_histogram_value (cfun, stmt);
  bp_pack_value (&bp, hist != NULL, 1);
  bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode);

  /* Emit location information for the statement.  */
  stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt)));
  streamer_write_bitpack (&bp);

  /* Emit the lexical block holding STMT.  */
  stream_write_tree (ob, gimple_block (stmt), true);

  /* Emit the operands.  */
  switch (gimple_code (stmt))
    {
    case GIMPLE_RESX:
      streamer_write_hwi (ob, gimple_resx_region (stmt));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true);
      break;

    case GIMPLE_EH_DISPATCH:
      streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt));
      break;

    case GIMPLE_ASM:
      streamer_write_uhwi (ob, gimple_asm_ninputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_noutputs (stmt));
      streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt));
      streamer_write_uhwi (ob, gimple_asm_nlabels (stmt));
      streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt),
			     true);
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < gimple_num_ops (stmt); i++)
	{
	  tree op = gimple_op (stmt, i);
	  tree *basep = NULL;
	  /* Wrap all uses of non-automatic variables inside MEM_REFs
	     so that we do not have to deal with type mismatches on
	     merged symbols during IL read in.  The first operand
	     of GIMPLE_DEBUG must be a decl, not MEM_REF, though.  */
	  if (op && (i || !is_gimple_debug (stmt)))
	    {
	      basep = &op;
	      while (handled_component_p (*basep))
		basep = &TREE_OPERAND (*basep, 0);
	      if (TREE_CODE (*basep) == VAR_DECL
		  && !auto_var_in_fn_p (*basep, current_function_decl)
		  && !DECL_REGISTER (*basep))
		{
		  bool volatilep = TREE_THIS_VOLATILE (*basep);
		  *basep = build2 (MEM_REF, TREE_TYPE (*basep),
				   build_fold_addr_expr (*basep),
				   build_int_cst (build_pointer_type
						  (TREE_TYPE (*basep)), 0));
		  TREE_THIS_VOLATILE (*basep) = volatilep;
		}
	      else
		basep = NULL;
	    }
	  stream_write_tree (ob, op, true);
	  /* Restore the original base if we wrapped it inside a MEM_REF.  */
	  if (basep)
	    *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0);
	}
      if (is_gimple_call (stmt))
	{
	  if (gimple_call_internal_p (stmt))
	    streamer_write_enum (ob->main_stream, internal_fn,
				 IFN_LAST, gimple_call_internal_fn (stmt));
	  else
	    stream_write_tree (ob, gimple_call_fntype (stmt), true);
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gcc_assert (gimple_transaction_body (stmt) == NULL);
      stream_write_tree (ob, gimple_transaction_label (stmt), true);
      break;

    default:
      gcc_unreachable ();
    }
  if (hist)
    stream_out_histogram_value (ob, hist);
}
Esempio n. 28
0
static void
eliminate_tail_call (struct tailcall *t)
{
  tree param, rslt;
  gimple stmt, call;
  tree arg;
  size_t idx;
  basic_block bb, first;
  edge e;
  gimple phi;
  gimple_stmt_iterator gsi;
  gimple orig_stmt;

  stmt = orig_stmt = gsi_stmt (t->call_gsi);
  bb = gsi_bb (t->call_gsi);

  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
	       bb->index);
      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
      fprintf (dump_file, "\n");
    }

  gcc_assert (is_gimple_call (stmt));

  first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));

  /* Remove the code after call_gsi that will become unreachable.  The
     possibly unreachable code in other blocks is removed later in
     cfg cleanup.  */
  gsi = t->call_gsi;
  gsi_next (&gsi);
  while (!gsi_end_p (gsi))
    {
      gimple t = gsi_stmt (gsi);
      /* Do not remove the return statement, so that redirect_edge_and_branch
	 sees how the block ends.  */
      if (gimple_code (t) == GIMPLE_RETURN)
	break;

      gsi_remove (&gsi, true);
      release_defs (t);
    }

  /* Number of executions of function has reduced by the tailcall.  */
  e = single_succ_edge (gsi_bb (t->call_gsi));
  decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
  decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
		    EDGE_FREQUENCY (e));
  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));

  /* Replace the call by a jump to the start of function.  */
  e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
				first);
  gcc_assert (e);
  PENDING_STMT (e) = NULL;

  /* Add phi node entries for arguments.  The ordering of the phi nodes should
     be the same as the ordering of the arguments.  */
  for (param = DECL_ARGUMENTS (current_function_decl),
	 idx = 0, gsi = gsi_start_phis (first);
       param;
       param = DECL_CHAIN (param), idx++)
    {
      if (!arg_needs_copy_p (param))
	continue;

      arg = gimple_call_arg (stmt, idx);
      phi = gsi_stmt (gsi);
      gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));

      add_phi_arg (phi, arg, e, gimple_location (stmt));
      gsi_next (&gsi);
    }

  /* Update the values of accumulators.  */
  adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);

  call = gsi_stmt (t->call_gsi);
  rslt = gimple_call_lhs (call);
  if (rslt != NULL_TREE)
    {
      /* Result of the call will no longer be defined.  So adjust the
	 SSA_NAME_DEF_STMT accordingly.  */
      SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
    }

  gsi_remove (&t->call_gsi, true);
  release_defs (call);
}
Esempio n. 29
0
void
ubsan_expand_si_overflow_neg_check (gimple stmt)
{
  rtx res, op1;
  tree lhs, fn, arg1;
  rtx_code_label *done_label, *do_error;
  rtx target = NULL_RTX;

  lhs = gimple_call_lhs (stmt);
  arg1 = gimple_call_arg (stmt, 1);
  done_label = gen_label_rtx ();
  do_error = gen_label_rtx ();

  do_pending_stack_adjust ();
  op1 = expand_normal (arg1);

  machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
  if (lhs)
    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);

  enum insn_code icode = optab_handler (negv3_optab, mode);
  if (icode != CODE_FOR_nothing)
    {
      struct expand_operand ops[3];
      rtx_insn *last = get_last_insn ();

      res = gen_reg_rtx (mode);
      create_output_operand (&ops[0], res, mode);
      create_input_operand (&ops[1], op1, mode);
      create_fixed_operand (&ops[2], do_error);
      if (maybe_expand_insn (icode, 3, ops))
	{
	  last = get_last_insn ();
	  if (profile_status_for_fn (cfun) != PROFILE_ABSENT
	      && JUMP_P (last)
	      && any_condjump_p (last)
	      && !find_reg_note (last, REG_BR_PROB, 0))
	    add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
	  emit_jump (done_label);
        }
      else
	{
	  delete_insns_since (last);
	  icode = CODE_FOR_nothing;
	}
    }

  if (icode == CODE_FOR_nothing)
    {
      /* Compute the operation.  On RTL level, the addition is always
	 unsigned.  */
      res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);

      /* Compare the operand with the most negative value.  */
      rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
      emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
			       done_label, PROB_VERY_LIKELY);
    }

  emit_label (do_error);
  /* Expand the ubsan builtin call.  */
  push_temp_slots ();
  fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
				     TREE_TYPE (arg1), arg1, NULL_TREE);
  expand_normal (fn);
  pop_temp_slots ();
  do_pending_stack_adjust ();

  /* We're done.  */
  emit_label (done_label);

  if (lhs)
    emit_move_insn (target, res);
}
static bool
forward_propagate_addr_expr_1 (tree name, tree def_rhs,
			       gimple_stmt_iterator *use_stmt_gsi,
			       bool single_use_p)
{
  tree lhs, rhs, rhs2, array_ref;
  tree *rhsp, *lhsp;
  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
  enum tree_code rhs_code;
  bool res = true;

  gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);

  lhs = gimple_assign_lhs (use_stmt);
  rhs_code = gimple_assign_rhs_code (use_stmt);
  rhs = gimple_assign_rhs1 (use_stmt);

  /* Trivial cases.  The use statement could be a trivial copy or a
     useless conversion.  Recurse to the uses of the lhs as copyprop does
     not copy through different variant pointers and FRE does not catch
     all useless conversions.  Treat the case of a single-use name and
     a conversion to def_rhs type separate, though.  */
  if (TREE_CODE (lhs) == SSA_NAME
      && ((rhs_code == SSA_NAME && rhs == name)
	  || CONVERT_EXPR_CODE_P (rhs_code)))
    {
      /* Only recurse if we don't deal with a single use or we cannot
	 do the propagation to the current statement.  In particular
	 we can end up with a conversion needed for a non-invariant
	 address which we cannot do in a single statement.  */
      if (!single_use_p
	  || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
	      && (!is_gimple_min_invariant (def_rhs)
		  || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
		      && POINTER_TYPE_P (TREE_TYPE (def_rhs))
		      && (TYPE_PRECISION (TREE_TYPE (lhs))
			  > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
	return forward_propagate_addr_expr (lhs, def_rhs);

      gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
	gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
      else
	gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
      return true;
    }

  /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
     ADDR_EXPR will not appear on the LHS.  */
  lhsp = gimple_assign_lhs_ptr (use_stmt);
  while (handled_component_p (*lhsp))
    lhsp = &TREE_OPERAND (*lhsp, 0);
  lhs = *lhsp;

  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (lhs) == INDIRECT_REF
      && TREE_OPERAND (lhs, 0) == name)
    {
      if (may_propagate_address_into_dereference (def_rhs, lhs)
	  && (lhsp != gimple_assign_lhs_ptr (use_stmt)
	      || useless_type_conversion_p
	           (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
	{
	  *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
	  fold_stmt_inplace (use_stmt);
	  tidy_after_forward_propagate_addr (use_stmt);

	  /* Continue propagating into the RHS if this was not the only use.  */
	  if (single_use_p)
	    return true;
	}
      else
	/* We can have a struct assignment dereferencing our name twice.
	   Note that we didn't propagate into the lhs to not falsely
	   claim we did when propagating into the rhs.  */
	res = false;
    }

  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     nodes from the RHS.  */
  rhsp = gimple_assign_rhs1_ptr (use_stmt);
  while (handled_component_p (*rhsp)
	 || TREE_CODE (*rhsp) == ADDR_EXPR)
    rhsp = &TREE_OPERAND (*rhsp, 0);
  rhs = *rhsp;

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, rhs))
    {
      *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return res;
    }

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and try to
     create a VCE and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && TYPE_SIZE (TREE_TYPE (rhs))
      && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      /* Function decls should not be used for VCE either as it could be a
         function descriptor that we want and not the actual function code.  */
      && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
      /* We should not convert volatile loads to non volatile loads. */
      && !TYPE_VOLATILE (TREE_TYPE (rhs))
      && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
			  TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
      /* Make sure we only do TBAA compatible replacements.  */
      && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
   {
     tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
     if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
       {
	 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
	    valid if we can replace the whole rhs of the use statement.  */
	 if (rhs != gimple_assign_rhs1 (use_stmt))
	   return false;
	 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
					     true, GSI_NEW_STMT);
	 gimple_assign_set_rhs1 (use_stmt, new_rhs);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return res;
       }
     /* If the defining rhs comes from an indirect reference, then do not
        convert into a VIEW_CONVERT_EXPR.  */
     def_rhs_base = TREE_OPERAND (def_rhs, 0);
     while (handled_component_p (def_rhs_base))
       def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
     if (!INDIRECT_REF_P (def_rhs_base))
       {
	 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
	    reference.  Place it there and fold the thing.  */
	 *rhsp = new_rhs;
	 fold_stmt_inplace (use_stmt);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return res;
       }
   }

  /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     is nothing to do. */
  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
      || gimple_assign_rhs1 (use_stmt) != name)
    return false;

  /* The remaining cases are all for turning pointer arithmetic into
     array indexing.  They only apply when we have the address of
     element zero in an array.  If that is not the case then there
     is nothing to do.  */
  array_ref = TREE_OPERAND (def_rhs, 0);
  if (TREE_CODE (array_ref) != ARRAY_REF
      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
      || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
    return false;

  rhs2 = gimple_assign_rhs2 (use_stmt);
  /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
     of the elements in X into &x[C1 + C2/element size].  */
  if (TREE_CODE (rhs2) == INTEGER_CST)
    {
      tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
	  				       TREE_TYPE (def_rhs),
					       def_rhs, rhs2);
      if (new_rhs)
	{
	  tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
	  new_rhs = unshare_expr (new_rhs);
	  if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
	    {
	      if (!is_gimple_min_invariant (new_rhs))
		new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
						    true, NULL_TREE,
						    true, GSI_SAME_STMT);
	      new_rhs = fold_convert (type, new_rhs);
	    }
	  gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
	  use_stmt = gsi_stmt (*use_stmt_gsi);
	  update_stmt (use_stmt);
	  tidy_after_forward_propagate_addr (use_stmt);
	  return true;
	}
    }

  /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
     converting a multiplication of an index by the size of the
     array elements, then the result is converted into the proper
     type for the arithmetic.  */
  if (TREE_CODE (rhs2) == SSA_NAME
      && integer_zerop (TREE_OPERAND (array_ref, 1))
      && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
    return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
							     use_stmt_gsi);
  return false;
}