Example #1
0
static bool
remove_exits_and_undefined_stmts (struct loop *loop, unsigned int npeeled)
{
  struct nb_iter_bound *elt;
  bool changed = false;

  for (elt = loop->bounds; elt; elt = elt->next)
    {
      /* If statement is known to be undefined after peeling, turn it
	 into unreachable (or trap when debugging experience is supposed
	 to be good).  */
      if (!elt->is_exit
	  && wi::ltu_p (elt->bound, npeeled))
	{
	  gimple_stmt_iterator gsi = gsi_for_stmt (elt->stmt);
	  gcall *stmt = gimple_build_call
	      (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
	  gimple_set_location (stmt, gimple_location (elt->stmt));
	  gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
	  split_block (gimple_bb (stmt), stmt);
	  changed = true;
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "Forced statement unreachable: ");
	      print_gimple_stmt (dump_file, elt->stmt, 0, 0);
	    }
	}
      /* If we know the exit will be taken after peeling, update.  */
      else if (elt->is_exit
	       && wi::leu_p (elt->bound, npeeled))
	{
	  basic_block bb = gimple_bb (elt->stmt);
	  edge exit_edge = EDGE_SUCC (bb, 0);

	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "Forced exit to be taken: ");
	      print_gimple_stmt (dump_file, elt->stmt, 0, 0);
	    }
	  if (!loop_exit_edge_p (loop, exit_edge))
	    exit_edge = EDGE_SUCC (bb, 1);
	  gcc_checking_assert (loop_exit_edge_p (loop, exit_edge));
	  gcond *cond_stmt = as_a <gcond *> (elt->stmt);
	  if (exit_edge->flags & EDGE_TRUE_VALUE)
	    gimple_cond_make_true (cond_stmt);
	  else
	    gimple_cond_make_false (cond_stmt);
	  update_stmt (cond_stmt);
	  changed = true;
	}
    }
  return changed;
}
Example #2
0
static unsigned int sancov_execute(void)
{
	basic_block bb;

	/* Remove this line when this plugin and kcov will be in the kernel.
	if (!strcmp(DECL_NAME_POINTER(current_function_decl), DECL_NAME_POINTER(sancov_fndecl)))
		return 0;
	*/

	FOR_EACH_BB_FN(bb, cfun) {
		const_gimple stmt;
		gcall *gcall;
		gimple_stmt_iterator gsi = gsi_after_labels(bb);

		if (gsi_end_p(gsi))
			continue;

		stmt = gsi_stmt(gsi);
		gcall = as_a_gcall(gimple_build_call(sancov_fndecl, 0));
		gimple_set_location(gcall, gimple_location(stmt));
		gsi_insert_before(&gsi, gcall, GSI_SAME_STMT);
	}
Example #3
0
static gimple
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
		   struct function *fn, enum LTO_tags tag)
{
  gimple stmt;
  enum gimple_code code;
  unsigned HOST_WIDE_INT num_ops;
  size_t i;
  struct bitpack_d bp;

  code = lto_tag_to_gimple_code (tag);

  /* Read the tuple header.  */
  bp = streamer_read_bitpack (ib);
  num_ops = bp_unpack_var_len_unsigned (&bp);
  stmt = gimple_alloc (code, num_ops);
  stmt->gsbase.no_warning = bp_unpack_value (&bp, 1);
  if (is_gimple_assign (stmt))
    stmt->gsbase.nontemporal_move = bp_unpack_value (&bp, 1);
  stmt->gsbase.has_volatile_ops = bp_unpack_value (&bp, 1);
  stmt->gsbase.subcode = bp_unpack_var_len_unsigned (&bp);

  /* Read location information.  */
  gimple_set_location (stmt, lto_input_location (ib, data_in));

  /* Read lexical block reference.  */
  gimple_set_block (stmt, stream_read_tree (ib, data_in));

  /* Read in all the operands.  */
  switch (code)
    {
    case GIMPLE_RESX:
      gimple_resx_set_region (stmt, streamer_read_hwi (ib));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in));
      break;

    case GIMPLE_EH_DISPATCH:
      gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib));
      break;

    case GIMPLE_ASM:
      {
	/* FIXME lto.  Move most of this into a new gimple_asm_set_string().  */
	tree str;
	stmt->gimple_asm.ni = streamer_read_uhwi (ib);
	stmt->gimple_asm.no = streamer_read_uhwi (ib);
	stmt->gimple_asm.nc = streamer_read_uhwi (ib);
	stmt->gimple_asm.nl = streamer_read_uhwi (ib);
	str = streamer_read_string_cst (data_in, ib);
	stmt->gimple_asm.string = TREE_STRING_POINTER (str);
      }
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < num_ops; i++)
	{
	  tree op = stream_read_tree (ib, data_in);
	  gimple_set_op (stmt, i, op);
	  if (!op)
	    continue;

	  /* Fixup FIELD_DECLs in COMPONENT_REFs, they are not handled
	     by decl merging.  */
	  if (TREE_CODE (op) == ADDR_EXPR)
	    op = TREE_OPERAND (op, 0);
	  while (handled_component_p (op))
	    {
	      if (TREE_CODE (op) == COMPONENT_REF)
		{
		  tree field, type, tem;
		  tree closest_match = NULL_TREE;
		  field = TREE_OPERAND (op, 1);
		  type = DECL_CONTEXT (field);
		  for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
		    {
		      if (TREE_CODE (tem) != FIELD_DECL)
			continue;
		      if (tem == field)
			break;
		      if (DECL_NONADDRESSABLE_P (tem)
			  == DECL_NONADDRESSABLE_P (field)
			  && gimple_compare_field_offset (tem, field))
			{
			  if (types_compatible_p (TREE_TYPE (tem),
						  TREE_TYPE (field)))
			    break;
			  else
			    closest_match = tem;
			}
		    }
		  /* In case of type mismatches across units we can fail
		     to unify some types and thus not find a proper
		     field-decl here.  */
		  if (tem == NULL_TREE)
		    {
		      /* Thus, emit a ODR violation warning.  */
		      if (warning_at (gimple_location (stmt), 0,
				      "use of type %<%E%> with two mismatching "
				      "declarations at field %<%E%>",
				      type, TREE_OPERAND (op, 1)))
			{
			  if (TYPE_FIELDS (type))
			    inform (DECL_SOURCE_LOCATION (TYPE_FIELDS (type)),
				    "original type declared here");
			  inform (DECL_SOURCE_LOCATION (TREE_OPERAND (op, 1)),
				  "field in mismatching type declared here");
			  if (TYPE_NAME (TREE_TYPE (field))
			      && (TREE_CODE (TYPE_NAME (TREE_TYPE (field)))
				  == TYPE_DECL))
			    inform (DECL_SOURCE_LOCATION
				      (TYPE_NAME (TREE_TYPE (field))),
				    "type of field declared here");
			  if (closest_match
			      && TYPE_NAME (TREE_TYPE (closest_match))
			      && (TREE_CODE (TYPE_NAME
				   (TREE_TYPE (closest_match))) == TYPE_DECL))
			    inform (DECL_SOURCE_LOCATION
				      (TYPE_NAME (TREE_TYPE (closest_match))),
				    "type of mismatching field declared here");
			}
		      /* And finally fixup the types.  */
		      TREE_OPERAND (op, 0)
			= build1 (VIEW_CONVERT_EXPR, type,
				  TREE_OPERAND (op, 0));
		    }
		  else
		    TREE_OPERAND (op, 1) = tem;
		}

	      op = TREE_OPERAND (op, 0);
	    }
	}
      if (is_gimple_call (stmt))
	{
	  if (gimple_call_internal_p (stmt))
	    gimple_call_set_internal_fn
	      (stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
	  else
	    gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in));
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in));
      break;

    default:
      internal_error ("bytecode stream: unknown GIMPLE statement tag %s",
		      lto_tag_name (tag));
    }

  /* Update the properties of symbols, SSA names and labels associated
     with STMT.  */
  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
    {
      tree lhs = gimple_get_lhs (stmt);
      if (lhs && TREE_CODE (lhs) == SSA_NAME)
	SSA_NAME_DEF_STMT (lhs) = stmt;
    }
  else if (code == GIMPLE_LABEL)
    gcc_assert (emit_label_in_global_context_p (gimple_label_label (stmt))
	        || DECL_CONTEXT (gimple_label_label (stmt)) == fn->decl);
  else if (code == GIMPLE_ASM)
    {
      unsigned i;

      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
	{
	  tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
	  if (TREE_CODE (op) == SSA_NAME)
	    SSA_NAME_DEF_STMT (op) = stmt;
	}
    }

  /* Reset alias information.  */
  if (code == GIMPLE_CALL)
    gimple_call_reset_alias_info (stmt);

  /* Mark the statement modified so its operand vectors can be filled in.  */
  gimple_set_modified (stmt, true);

  return stmt;
}
static unsigned int
lower_function_body (void)
{
  struct lower_data data;
  gimple_seq body = gimple_body (current_function_decl);
  gimple_seq lowered_body;
  gimple_stmt_iterator i;
  gimple bind;
  gimple x;

  /* The gimplifier should've left a body of exactly one statement,
     namely a GIMPLE_BIND.  */
  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);

  memset (&data, 0, sizeof (data));
  data.block = DECL_INITIAL (current_function_decl);
  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
  BLOCK_CHAIN (data.block) = NULL_TREE;
  TREE_ASM_WRITTEN (data.block) = 1;
  data.return_statements.create (8);

  bind = gimple_seq_first_stmt (body);
  lowered_body = NULL;
  gimple_seq_add_stmt (&lowered_body, bind);
  i = gsi_start (lowered_body);
  lower_gimple_bind (&i, &data);

  i = gsi_last (lowered_body);

  /* If the function falls off the end, we need a null return statement.
     If we've already got one in the return_statements vector, we don't
     need to do anything special.  Otherwise build one by hand.  */
  if (gimple_seq_may_fallthru (lowered_body)
      && (data.return_statements.is_empty ()
	  || (gimple_return_retval (data.return_statements.last().stmt)
	      != NULL)))
    {
      x = gimple_build_return (NULL);
      gimple_set_location (x, cfun->function_end_locus);
      gimple_set_block (x, DECL_INITIAL (current_function_decl));
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
    }

  /* If we lowered any return statements, emit the representative
     at the end of the function.  */
  while (!data.return_statements.is_empty ())
    {
      return_statements_t t = data.return_statements.pop ();
      x = gimple_build_label (t.label);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
    }

  /* Once the old body has been lowered, replace it with the new
     lowered sequence.  */
  gimple_set_body (current_function_decl, lowered_body);

  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
  BLOCK_SUBBLOCKS (data.block)
    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));

  clear_block_marks (data.block);
  data.return_statements.release ();
  return 0;
}
Example #5
0
static void
instrument_bool_enum_load (gimple_stmt_iterator *gsi)
{
  gimple stmt = gsi_stmt (*gsi);
  tree rhs = gimple_assign_rhs1 (stmt);
  tree type = TREE_TYPE (rhs);
  tree minv = NULL_TREE, maxv = NULL_TREE;

  if (TREE_CODE (type) == BOOLEAN_TYPE && (flag_sanitize & SANITIZE_BOOL))
    {
      minv = boolean_false_node;
      maxv = boolean_true_node;
    }
  else if (TREE_CODE (type) == ENUMERAL_TYPE
	   && (flag_sanitize & SANITIZE_ENUM)
	   && TREE_TYPE (type) != NULL_TREE
	   && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
	   && (TYPE_PRECISION (TREE_TYPE (type))
	       < GET_MODE_PRECISION (TYPE_MODE (type))))
    {
      minv = TYPE_MIN_VALUE (TREE_TYPE (type));
      maxv = TYPE_MAX_VALUE (TREE_TYPE (type));
    }
  else
    return;

  int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
  HOST_WIDE_INT bitsize, bitpos;
  tree offset;
  enum machine_mode mode;
  int volatilep = 0, unsignedp = 0;
  tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
				   &unsignedp, &volatilep, false);
  tree utype = build_nonstandard_integer_type (modebitsize, 1);

  if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base))
      || (bitpos % modebitsize) != 0
      || bitsize != modebitsize
      || GET_MODE_BITSIZE (TYPE_MODE (utype)) != modebitsize
      || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
    return;

  location_t loc = gimple_location (stmt);
  tree ptype = build_pointer_type (TREE_TYPE (rhs));
  tree atype = reference_alias_ptr_type (rhs);
  gimple g = gimple_build_assign (make_ssa_name (ptype, NULL),
				  build_fold_addr_expr (rhs));
  gimple_set_location (g, loc);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
  tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g),
		     build_int_cst (atype, 0));
  tree urhs = make_ssa_name (utype, NULL);
  g = gimple_build_assign (urhs, mem);
  gimple_set_location (g, loc);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);
  minv = fold_convert (utype, minv);
  maxv = fold_convert (utype, maxv);
  if (!integer_zerop (minv))
    {
      g = gimple_build_assign_with_ops (MINUS_EXPR,
					make_ssa_name (utype, NULL),
					urhs, minv);
      gimple_set_location (g, loc);
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  gimple_stmt_iterator gsi2 = *gsi;
  basic_block then_bb, fallthru_bb;
  *gsi = create_cond_insert_point (gsi, true, false, true,
				   &then_bb, &fallthru_bb);
  g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
			 int_const_binop (MINUS_EXPR, maxv, minv),
			 NULL_TREE, NULL_TREE);
  gimple_set_location (g, loc);
  gsi_insert_after (gsi, g, GSI_NEW_STMT);

  gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs, NULL_TREE);
  update_stmt (stmt);

  gsi2 = gsi_after_labels (then_bb);
  if (flag_sanitize_undefined_trap_on_error)
    g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_invalid_value_data", &loc, NULL,
				     ubsan_type_descriptor (type), NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      enum built_in_function bcode
	= flag_sanitize_recover
	  ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE
	  : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT;
      tree fn = builtin_decl_explicit (bcode);

      tree val = force_gimple_operand_gsi (&gsi2, ubsan_encode_value (urhs),
					   true, NULL_TREE, true,
					   GSI_SAME_STMT);
      g = gimple_build_call (fn, 2, data, val);
    }
  gimple_set_location (g, loc);
  gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
}
Example #6
0
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
  gimple stmt = gsi_stmt (gsi);
  tree_code code = gimple_assign_rhs_code (stmt);
  tree lhs = gimple_assign_lhs (stmt);
  tree lhstype = TREE_TYPE (lhs);
  tree a, b;
  gimple g;

  /* If this is not a signed operation, don't instrument anything here.
     Also punt on bit-fields.  */
  if (!INTEGRAL_TYPE_P (lhstype)
      || TYPE_OVERFLOW_WRAPS (lhstype)
      || GET_MODE_BITSIZE (TYPE_MODE (lhstype)) != TYPE_PRECISION (lhstype))
    return;

  switch (code)
    {
    case MINUS_EXPR:
    case PLUS_EXPR:
    case MULT_EXPR:
      /* Transform
	 i = u {+,-,*} 5;
	 into
	 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5);  */
      a = gimple_assign_rhs1 (stmt);
      b = gimple_assign_rhs2 (stmt);
      g = gimple_build_call_internal (code == PLUS_EXPR
				      ? IFN_UBSAN_CHECK_ADD
				      : code == MINUS_EXPR
				      ? IFN_UBSAN_CHECK_SUB
				      : IFN_UBSAN_CHECK_MUL, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    case NEGATE_EXPR:
      /* Represent i = -u;
	 as
	 i = UBSAN_CHECK_SUB (0, u);  */
      a = build_int_cst (lhstype, 0);
      b = gimple_assign_rhs1 (stmt);
      g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    case ABS_EXPR:
      /* Transform i = ABS_EXPR<u>;
	 into
	 _N = UBSAN_CHECK_SUB (0, u);
	 i = ABS_EXPR<_N>;  */
      a = build_int_cst (lhstype, 0);
      b = gimple_assign_rhs1 (stmt);
      g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
      a = make_ssa_name (lhstype, NULL);
      gimple_call_set_lhs (g, a);
      gimple_set_location (g, gimple_location (stmt));
      gsi_insert_before (&gsi, g, GSI_SAME_STMT);
      gimple_assign_set_rhs1 (stmt, a);
      update_stmt (stmt);
      break;
    default:
      break;
    }
}
Example #7
0
void
ubsan_expand_null_ifn (gimple_stmt_iterator gsi)
{
  gimple stmt = gsi_stmt (gsi);
  location_t loc = gimple_location (stmt);
  gcc_assert (gimple_call_num_args (stmt) == 2);
  tree ptr = gimple_call_arg (stmt, 0);
  tree ckind = gimple_call_arg (stmt, 1);

  basic_block cur_bb = gsi_bb (gsi);

  /* Split the original block holding the pointer dereference.  */
  edge e = split_block (cur_bb, stmt);

  /* Get a hold on the 'condition block', the 'then block' and the
     'else block'.  */
  basic_block cond_bb = e->src;
  basic_block fallthru_bb = e->dest;
  basic_block then_bb = create_empty_bb (cond_bb);
  add_bb_to_loop (then_bb, cond_bb->loop_father);
  loops_state_set (LOOPS_NEED_FIXUP);

  /* Make an edge coming from the 'cond block' into the 'then block';
     this edge is unlikely taken, so set up the probability accordingly.  */
  e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
  e->probability = PROB_VERY_UNLIKELY;

  /* Connect 'then block' with the 'else block'.  This is needed
     as the ubsan routines we call in the 'then block' are not noreturn.
     The 'then block' only has one outcoming edge.  */
  make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);

  /* Set up the fallthrough basic block.  */
  e = find_edge (cond_bb, fallthru_bb);
  e->flags = EDGE_FALSE_VALUE;
  e->count = cond_bb->count;
  e->probability = REG_BR_PROB_BASE - PROB_VERY_UNLIKELY;

  /* Update dominance info for the newly created then_bb; note that
     fallthru_bb's dominance info has already been updated by
     split_bock.  */
  if (dom_info_available_p (CDI_DOMINATORS))
    set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);

  /* Put the ubsan builtin call into the newly created BB.  */
  gimple g;
  if (flag_sanitize_undefined_trap_on_error)
    g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
  else
    {
      enum built_in_function bcode
	= flag_sanitize_recover
	  ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH
	  : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_ABORT;
      tree fn = builtin_decl_implicit (bcode);
      const struct ubsan_mismatch_data m
	= { build_zero_cst (pointer_sized_int_node), ckind };
      tree data
	= ubsan_create_data ("__ubsan_null_data", &loc, &m,
			     ubsan_type_descriptor (TREE_TYPE (ptr),
						    UBSAN_PRINT_POINTER),
			     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      g = gimple_build_call (fn, 2, data,
			     build_zero_cst (pointer_sized_int_node));
    }
  gimple_set_location (g, loc);
  gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
  gsi_insert_after (&gsi2, g, GSI_NEW_STMT);

  /* Unlink the UBSAN_NULLs vops before replacing it.  */
  unlink_stmt_vdef (stmt);

  g = gimple_build_cond (EQ_EXPR, ptr, build_int_cst (TREE_TYPE (ptr), 0),
			 NULL_TREE, NULL_TREE);
  gimple_set_location (g, loc);

  /* Replace the UBSAN_NULL with a GIMPLE_COND stmt.  */
  gsi_replace (&gsi, g, false);
}
Example #8
0
static unsigned int
lower_function_body (void)
{
  struct lower_data data;
  gimple_seq body = gimple_body (current_function_decl);
  gimple_seq lowered_body;
  gimple_stmt_iterator i;
  gimple *bind;
  gimple *x;

  /* The gimplifier should've left a body of exactly one statement,
     namely a GIMPLE_BIND.  */
  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);

  memset (&data, 0, sizeof (data));
  data.block = DECL_INITIAL (current_function_decl);
  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
  BLOCK_CHAIN (data.block) = NULL_TREE;
  TREE_ASM_WRITTEN (data.block) = 1;
  data.return_statements.create (8);

  bind = gimple_seq_first_stmt (body);
  lowered_body = NULL;
  gimple_seq_add_stmt (&lowered_body, bind);
  i = gsi_start (lowered_body);
  lower_gimple_bind (&i, &data);

  i = gsi_last (lowered_body);

  /* If we had begin stmt markers from e.g. PCH, but this compilation
     doesn't want them, lower_stmt will have cleaned them up; we can
     now clear the flag that indicates we had them.  */
  if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
    {
      /* This counter needs not be exact, but before lowering it will
	 most certainly be.  */
      gcc_assert (cfun->debug_marker_count == 0);
      cfun->debug_nonbind_markers = false;
    }

  /* If the function falls off the end, we need a null return statement.
     If we've already got one in the return_statements vector, we don't
     need to do anything special.  Otherwise build one by hand.  */
  bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
  if (may_fallthru
      && (data.return_statements.is_empty ()
	  || (gimple_return_retval (data.return_statements.last().stmt)
	      != NULL)))
    {
      x = gimple_build_return (NULL);
      gimple_set_location (x, cfun->function_end_locus);
      gimple_set_block (x, DECL_INITIAL (current_function_decl));
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      may_fallthru = false;
    }

  /* If we lowered any return statements, emit the representative
     at the end of the function.  */
  while (!data.return_statements.is_empty ())
    {
      return_statements_t t = data.return_statements.pop ();
      x = gimple_build_label (t.label);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
      if (may_fallthru)
	{
	  /* Remove the line number from the representative return statement.
	     It now fills in for the fallthru too.  Failure to remove this
	     will result in incorrect results for coverage analysis.  */
	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
	  may_fallthru = false;
	}
    }

  /* Once the old body has been lowered, replace it with the new
     lowered sequence.  */
  gimple_set_body (current_function_decl, lowered_body);

  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
  BLOCK_SUBBLOCKS (data.block)
    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));

  clear_block_marks (data.block);
  data.return_statements.release ();
  return 0;
}
Example #9
0
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
   _DECLs if appropriate.  Arrange to call the __mf_register function
   now, and the __mf_unregister function later for each.  Return the
   gimple sequence after synthesis.  */
gimple_seq
mx_register_decls (tree decl, gimple_seq seq, location_t location)
{
  gimple_seq finally_stmts = NULL;
  gimple_stmt_iterator initially_stmts = gsi_start (seq);

  while (decl != NULL_TREE)
    {
      if (mf_decl_eligible_p (decl)
          /* Not already processed.  */
          && ! mf_marked_p (decl)
          /* Automatic variable.  */
          && ! DECL_EXTERNAL (decl)
          && ! TREE_STATIC (decl))
        {
          tree size = NULL_TREE, variable_name;
          gimple unregister_fncall, register_fncall;
	  tree unregister_fncall_param, register_fncall_param;

	  /* Variable-sized objects should have sizes already been
	     gimplified when we got here. */
	  size = fold_convert (size_type_node,
			       TYPE_SIZE_UNIT (TREE_TYPE (decl)));
	  gcc_assert (is_gimple_val (size));


          unregister_fncall_param =
	    mf_mark (build1 (ADDR_EXPR,
			     build_pointer_type (TREE_TYPE (decl)),
			     decl));
          /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
          unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
						 unregister_fncall_param,
						 size,
						 integer_three_node);


          variable_name = mf_varname_tree (decl);
          register_fncall_param =
	    mf_mark (build1 (ADDR_EXPR,
			     build_pointer_type (TREE_TYPE (decl)),
			     decl));
          /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
	                    "name") */
	  register_fncall = gimple_build_call (mf_register_fndecl, 4,
					       register_fncall_param,
					       size,
					       integer_three_node,
					       variable_name);


          /* Accumulate the two calls.  */
	  gimple_set_location (register_fncall, location);
	  gimple_set_location (unregister_fncall, location);

          /* Add the __mf_register call at the current appending point.  */
          if (gsi_end_p (initially_stmts))
	    {
	      if (!mf_artificial (decl))
		warning (OPT_Wmudflap,
			 "mudflap cannot track %qE in stub function",
			 DECL_NAME (decl));
	    }
	  else
	    {
	      gsi_insert_before (&initially_stmts, register_fncall,
				 GSI_SAME_STMT);

	      /* Accumulate the FINALLY piece.  */
	      gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
	    }
          mf_mark (decl);
        }

      decl = DECL_CHAIN (decl);
    }

  /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
  if (finally_stmts != NULL)
    {
      gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
      gimple_seq new_seq = NULL;

      gimple_seq_add_stmt (&new_seq, stmt);
      return new_seq;
    }
   else
    return seq;
}
static void
use_internal_fn (gcall *call)
{
  /* We'll be inserting another call with the same arguments after the
     lhs has been set, so prevent any possible coalescing failure from
     having both values live at once.  See PR 71020.  */
  replace_abnormal_ssa_names (call);

  unsigned nconds = 0;
  auto_vec<gimple *, 12> conds;
  if (can_test_argument_range (call))
    {
      gen_shrink_wrap_conditions (call, conds, &nconds);
      gcc_assert (nconds != 0);
    }
  else
    gcc_assert (edom_only_function (call));

  internal_fn ifn = replacement_internal_fn (call);
  gcc_assert (ifn != IFN_LAST);

  /* Construct the new call, with the same arguments as the original one.  */
  auto_vec <tree, 16> args;
  unsigned int nargs = gimple_call_num_args (call);
  for (unsigned int i = 0; i < nargs; ++i)
    args.safe_push (gimple_call_arg (call, i));
  gcall *new_call = gimple_build_call_internal_vec (ifn, args);
  gimple_set_location (new_call, gimple_location (call));
  gimple_call_set_nothrow (new_call, gimple_call_nothrow_p (call));

  /* Transfer the LHS to the new call.  */
  tree lhs = gimple_call_lhs (call);
  gimple_call_set_lhs (new_call, lhs);
  gimple_call_set_lhs (call, NULL_TREE);
  SSA_NAME_DEF_STMT (lhs) = new_call;

  /* Insert the new call.  */
  gimple_stmt_iterator gsi = gsi_for_stmt (call);
  gsi_insert_before (&gsi, new_call, GSI_SAME_STMT);

  if (nconds == 0)
    {
      /* Skip the call if LHS == LHS.  If we reach here, EDOM is the only
	 valid errno value and it is used iff the result is NaN.  */
      conds.quick_push (gimple_build_cond (EQ_EXPR, lhs, lhs,
					   NULL_TREE, NULL_TREE));
      nconds++;

      /* Try replacing the original call with a direct assignment to
	 errno, via an internal function.  */
      if (set_edom_supported_p () && !stmt_ends_bb_p (call))
	{
	  gimple_stmt_iterator gsi = gsi_for_stmt (call);
	  gcall *new_call = gimple_build_call_internal (IFN_SET_EDOM, 0);
	  gimple_set_vuse (new_call, gimple_vuse (call));
	  gimple_set_vdef (new_call, gimple_vdef (call));
	  SSA_NAME_DEF_STMT (gimple_vdef (new_call)) = new_call;
	  gimple_set_location (new_call, gimple_location (call));
	  gsi_replace (&gsi, new_call, false);
	  call = new_call;
	}
    }

  shrink_wrap_one_built_in_call_with_conds (call, conds, nconds);
}
Example #11
0
static gimple_seq
gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
{
  gimple top, entry, stmt;
  gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
  tree cont_block, break_block;
  location_t stmt_locus;

  stmt_locus = input_location;
  stmt_list = NULL;
  body_seq = NULL;
  incr_seq = NULL;
  exit_seq = NULL;
  entry = NULL;

  break_block = begin_bc_block (bc_break);
  cont_block = begin_bc_block (bc_continue);

  /* If condition is zero don't generate a loop construct.  */
  if (cond && integer_zerop (cond))
    {
      top = NULL;
      if (cond_is_first)
	{
	  stmt = gimple_build_goto (get_bc_label (bc_break));
	  gimple_set_location (stmt, stmt_locus);
	  gimple_seq_add_stmt (&stmt_list, stmt);
	}
    }
  else
    {
      /* If we use a LOOP_EXPR here, we have to feed the whole thing
	 back through the main gimplifier to lower it.  Given that we
	 have to gimplify the loop body NOW so that we can resolve
	 break/continue stmts, seems easier to just expand to gotos.  */
      top = gimple_build_label (create_artificial_label ());

      /* If we have an exit condition, then we build an IF with gotos either
	 out of the loop, or to the top of it.  If there's no exit condition,
	 then we just build a jump back to the top.  */
      if (cond && !integer_nonzerop (cond))
	{
	  if (cond != error_mark_node)
	    { 
	      gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
	      stmt = gimple_build_cond (NE_EXPR, cond,
					build_int_cst (TREE_TYPE (cond), 0),
					gimple_label_label (top),
					get_bc_label (bc_break));
	      gimple_seq_add_stmt (&exit_seq, stmt);
	    }

	  if (cond_is_first)
	    {
	      if (incr)
		{
		  entry = gimple_build_label (create_artificial_label ());
		  stmt = gimple_build_goto (gimple_label_label (entry));
		}
	      else
		stmt = gimple_build_goto (get_bc_label (bc_continue));
	      gimple_set_location (stmt, stmt_locus);
	      gimple_seq_add_stmt (&stmt_list, stmt);
	    }
	}
      else
	{
	  stmt = gimple_build_goto (gimple_label_label (top));
	  gimple_seq_add_stmt (&exit_seq, stmt);
	}
    }

  gimplify_stmt (&body, &body_seq);
  gimplify_stmt (&incr, &incr_seq);

  body_seq = finish_bc_block (bc_continue, cont_block, body_seq);

  gimple_seq_add_stmt (&stmt_list, top);
  gimple_seq_add_seq (&stmt_list, body_seq);
  gimple_seq_add_seq (&stmt_list, incr_seq);
  gimple_seq_add_stmt (&stmt_list, entry);
  gimple_seq_add_seq (&stmt_list, exit_seq);

  annotate_all_with_location (stmt_list, stmt_locus);

  return finish_bc_block (bc_break, break_block, stmt_list);
}
Example #12
0
static gimple *
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
		   enum LTO_tags tag)
{
  gimple *stmt;
  enum gimple_code code;
  unsigned HOST_WIDE_INT num_ops;
  size_t i;
  struct bitpack_d bp;
  bool has_hist;

  code = lto_tag_to_gimple_code (tag);

  /* Read the tuple header.  */
  bp = streamer_read_bitpack (ib);
  num_ops = bp_unpack_var_len_unsigned (&bp);
  stmt = gimple_alloc (code, num_ops);
  stmt->no_warning = bp_unpack_value (&bp, 1);
  if (is_gimple_assign (stmt))
    stmt->nontemporal_move = bp_unpack_value (&bp, 1);
  stmt->has_volatile_ops = bp_unpack_value (&bp, 1);
  has_hist = bp_unpack_value (&bp, 1);
  stmt->subcode = bp_unpack_var_len_unsigned (&bp);

  /* Read location information.  Caching here makes no sense until streamer
     cache can handle the following gimple_set_block.  */
  gimple_set_location (stmt, stream_input_location_now (&bp, data_in));

  /* Read lexical block reference.  */
  gimple_set_block (stmt, stream_read_tree (ib, data_in));

  /* Read in all the operands.  */
  switch (code)
    {
    case GIMPLE_RESX:
      gimple_resx_set_region (as_a <gresx *> (stmt),
			      streamer_read_hwi (ib));
      break;

    case GIMPLE_EH_MUST_NOT_THROW:
      gimple_eh_must_not_throw_set_fndecl (
	as_a <geh_mnt *> (stmt),
	stream_read_tree (ib, data_in));
      break;

    case GIMPLE_EH_DISPATCH:
      gimple_eh_dispatch_set_region (as_a <geh_dispatch *> (stmt),
				     streamer_read_hwi (ib));
      break;

    case GIMPLE_ASM:
      {
	/* FIXME lto.  Move most of this into a new gimple_asm_set_string().  */
	gasm *asm_stmt = as_a <gasm *> (stmt);
	tree str;
	asm_stmt->ni = streamer_read_uhwi (ib);
	asm_stmt->no = streamer_read_uhwi (ib);
	asm_stmt->nc = streamer_read_uhwi (ib);
	asm_stmt->nl = streamer_read_uhwi (ib);
	str = streamer_read_string_cst (data_in, ib);
	asm_stmt->string = TREE_STRING_POINTER (str);
      }
      /* Fallthru  */

    case GIMPLE_ASSIGN:
    case GIMPLE_CALL:
    case GIMPLE_RETURN:
    case GIMPLE_SWITCH:
    case GIMPLE_LABEL:
    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_DEBUG:
      for (i = 0; i < num_ops; i++)
	{
	  tree *opp, op = stream_read_tree (ib, data_in);
	  gimple_set_op (stmt, i, op);
	  if (!op)
	    continue;

	  opp = gimple_op_ptr (stmt, i);
	  if (TREE_CODE (*opp) == ADDR_EXPR)
	    opp = &TREE_OPERAND (*opp, 0);
	  while (handled_component_p (*opp))
	    opp = &TREE_OPERAND (*opp, 0);
	  /* At LTO output time we wrap all global decls in MEM_REFs to
	     allow seamless replacement with prevailing decls.  Undo this
	     here if the prevailing decl allows for this.
	     ???  Maybe we should simply fold all stmts.  */
	  if (TREE_CODE (*opp) == MEM_REF
	      && TREE_CODE (TREE_OPERAND (*opp, 0)) == ADDR_EXPR
	      && integer_zerop (TREE_OPERAND (*opp, 1))
	      && (TREE_THIS_VOLATILE (*opp)
		  == TREE_THIS_VOLATILE
		       (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0)))
	      && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*opp, 1)))
	      && (TREE_TYPE (*opp)
		  == TREE_TYPE (TREE_TYPE (TREE_OPERAND (*opp, 1))))
	      && (TREE_TYPE (*opp)
		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))))
	    *opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0);
	}
      if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
	{
	  if (gimple_call_internal_p (call_stmt))
	    gimple_call_set_internal_fn
	      (call_stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
	  else
	    gimple_call_set_fntype (call_stmt, stream_read_tree (ib, data_in));
	}
      break;

    case GIMPLE_NOP:
    case GIMPLE_PREDICT:
      break;

    case GIMPLE_TRANSACTION:
      gimple_transaction_set_label (as_a <gtransaction *> (stmt),
				    stream_read_tree (ib, data_in));
      break;

    default:
      internal_error ("bytecode stream: unknown GIMPLE statement tag %s",
		      lto_tag_name (tag));
    }

  /* Update the properties of symbols, SSA names and labels associated
     with STMT.  */
  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
    {
      tree lhs = gimple_get_lhs (stmt);
      if (lhs && TREE_CODE (lhs) == SSA_NAME)
	SSA_NAME_DEF_STMT (lhs) = stmt;
    }
  else if (code == GIMPLE_ASM)
    {
      gasm *asm_stmt = as_a <gasm *> (stmt);
      unsigned i;

      for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
	{
	  tree op = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
	  if (TREE_CODE (op) == SSA_NAME)
	    SSA_NAME_DEF_STMT (op) = stmt;
	}
    }

  /* Reset alias information.  */
  if (code == GIMPLE_CALL)
    gimple_call_reset_alias_info (as_a <gcall *> (stmt));

  /* Mark the statement modified so its operand vectors can be filled in.  */
  gimple_set_modified (stmt, true);
  if (has_hist)
    stream_in_histogram_value (ib, stmt);

  return stmt;
}
static bool
conditional_replacement (basic_block cond_bb, basic_block middle_bb,
			 edge e0, edge e1, gimple phi,
			 tree arg0, tree arg1)
{
  tree result;
  gimple stmt, new_stmt;
  tree cond;
  gimple_stmt_iterator gsi;
  edge true_edge, false_edge;
  tree new_var, new_var2;

  /* FIXME: Gimplification of complex type is too hard for now.  */
  if (TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
      || TREE_CODE (TREE_TYPE (arg1)) == COMPLEX_TYPE)
    return false;

  /* The PHI arguments have the constants 0 and 1, then convert
     it to the conditional.  */
  if ((integer_zerop (arg0) && integer_onep (arg1))
      || (integer_zerop (arg1) && integer_onep (arg0)))
    ;
  else
    return false;

  if (!empty_block_p (middle_bb))
    return false;

  /* At this point we know we have a GIMPLE_COND with two successors.
     One successor is BB, the other successor is an empty block which
     falls through into BB.

     There is a single PHI node at the join point (BB) and its arguments
     are constants (0, 1).

     So, given the condition COND, and the two PHI arguments, we can
     rewrite this PHI into non-branching code:

       dest = (COND) or dest = COND'

     We use the condition as-is if the argument associated with the
     true edge has the value one or the argument associated with the
     false edge as the value zero.  Note that those conditions are not
     the same since only one of the outgoing edges from the GIMPLE_COND
     will directly reach BB and thus be associated with an argument.  */

  stmt = last_stmt (cond_bb);
  result = PHI_RESULT (phi);

  /* To handle special cases like floating point comparison, it is easier and
     less error-prone to build a tree and gimplify it on the fly though it is
     less efficient.  */
  cond = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
		      gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));

  /* We need to know which is the true edge and which is the false
     edge so that we know when to invert the condition below.  */
  extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
  if ((e0 == true_edge && integer_zerop (arg0))
      || (e0 == false_edge && integer_onep (arg0))
      || (e1 == true_edge && integer_zerop (arg1))
      || (e1 == false_edge && integer_onep (arg1)))
    cond = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (cond), cond);

  /* Insert our new statements at the end of conditional block before the
     COND_STMT.  */
  gsi = gsi_for_stmt (stmt);
  new_var = force_gimple_operand_gsi (&gsi, cond, true, NULL, true,
				      GSI_SAME_STMT);

  if (!useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (new_var)))
    {
      source_location locus_0, locus_1;

      new_var2 = create_tmp_var (TREE_TYPE (result), NULL);
      add_referenced_var (new_var2);
      new_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_var2,
					       new_var, NULL);
      new_var2 = make_ssa_name (new_var2, new_stmt);
      gimple_assign_set_lhs (new_stmt, new_var2);
      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
      new_var = new_var2;

      /* Set the locus to the first argument, unless is doesn't have one.  */
      locus_0 = gimple_phi_arg_location (phi, 0);
      locus_1 = gimple_phi_arg_location (phi, 1);
      if (locus_0 == UNKNOWN_LOCATION)
        locus_0 = locus_1;
      gimple_set_location (new_stmt, locus_0);
    }

  replace_phi_edge_with_variable (cond_bb, e1, phi, new_var);

  /* Note that we optimized this PHI.  */
  return true;
}
Example #14
0
static unsigned int
lower_function_body (void)
{
  struct lower_data data;
  gimple_seq body = gimple_body (current_function_decl);
  gimple_seq lowered_body;
  gimple_stmt_iterator i;
  gimple bind;
  tree t;
  gimple x;

  /* The gimplifier should've left a body of exactly one statement,
     namely a GIMPLE_BIND.  */
  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);

  memset (&data, 0, sizeof (data));
  data.block = DECL_INITIAL (current_function_decl);
  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
  BLOCK_CHAIN (data.block) = NULL_TREE;
  TREE_ASM_WRITTEN (data.block) = 1;
  data.return_statements.create (8);

  bind = gimple_seq_first_stmt (body);
  lowered_body = NULL;
  gimple_seq_add_stmt (&lowered_body, bind);
  i = gsi_start (lowered_body);
  lower_gimple_bind (&i, &data);

  i = gsi_last (lowered_body);

  /* If the function falls off the end, we need a null return statement.
     If we've already got one in the return_statements vector, we don't
     need to do anything special.  Otherwise build one by hand.  */
  if (gimple_seq_may_fallthru (lowered_body)
      && (data.return_statements.is_empty ()
	  || gimple_return_retval (data.return_statements.last().stmt) != NULL))
    {
      x = gimple_build_return (NULL);
      gimple_set_location (x, cfun->function_end_locus);
      gimple_set_block (x, DECL_INITIAL (current_function_decl));
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
    }

  /* If we lowered any return statements, emit the representative
     at the end of the function.  */
  while (!data.return_statements.is_empty ())
    {
      return_statements_t t = data.return_statements.pop ();
      x = gimple_build_label (t.label);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
    }

  /* If the function calls __builtin_setjmp, we need to emit the computed
     goto that will serve as the unique dispatcher for all the receivers.  */
  if (data.calls_builtin_setjmp)
    {
      tree disp_label, disp_var, arg;

      /* Build 'DISP_LABEL:' and insert.  */
      disp_label = create_artificial_label (cfun->function_end_locus);
      /* This mark will create forward edges from every call site.  */
      DECL_NONLOCAL (disp_label) = 1;
      cfun->has_nonlocal_label = 1;
      x = gimple_build_label (disp_label);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);

      /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
	 and insert.  */
      disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
      arg = build_addr (disp_label, current_function_decl);
      t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
      x = gimple_build_call (t, 1, arg);
      gimple_call_set_lhs (x, disp_var);

      /* Build 'goto DISP_VAR;' and insert.  */
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
      x = gimple_build_goto (disp_var);
      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
    }

  /* Once the old body has been lowered, replace it with the new
     lowered sequence.  */
  gimple_set_body (current_function_decl, lowered_body);

  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
  BLOCK_SUBBLOCKS (data.block)
    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));

  clear_block_marks (data.block);
  data.return_statements.release ();
  return 0;
}
Example #15
0
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
   _DECLs if appropriate.  Arrange to call the __mf_register function
   now, and the __mf_unregister function later for each.  Return the
   gimple sequence after synthesis.  */
gimple_seq
mx_register_decls (tree decl, gimple_seq seq, gimple stmt, location_t location, bool func_args)
{
    gimple_seq finally_stmts = NULL;
    gimple_stmt_iterator initially_stmts = gsi_start (seq);
    bool sframe_inserted = false;
    size_t front_rz_size, rear_rz_size;
    tree fsize, rsize, size;
    gimple uninit_fncall_front, uninit_fncall_rear, init_fncall_front, \
        init_fncall_rear, init_assign_stmt;
    tree fncall_param_front, fncall_param_rear;
    int map_ret;

    while (decl != NULL_TREE)
    {
        if ((mf_decl_eligible_p (decl) || TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE)
                /* Not already processed.  */
                && ! mf_marked_p (decl)
                /* Automatic variable.  */
                && ! DECL_EXTERNAL (decl)
                && ! TREE_STATIC (decl)
                && get_name(decl))
        {
            DEBUGLOG("DEBUG Instrumenting %s is_complete_type %d\n", IDENTIFIER_POINTER(DECL_NAME(decl)), COMPLETE_TYPE_P(decl));

            /* construct a tree corresponding to the type struct{
               unsigned int rz_front[6U];
               original variable
               unsigned int rz_rear[6U];
               };
             */

            if (!sframe_inserted){
                gimple ensure_fn_call = gimple_build_call (lbc_ensure_sframe_bitmap_fndecl, 0);
                gimple_set_location (ensure_fn_call, location);
                gsi_insert_before (&initially_stmts, ensure_fn_call, GSI_SAME_STMT);

                sframe_inserted = true;
            }

            // Calculate the zone sizes
            size_t element_size = 0, request_size = 0;
            if (COMPLETE_TYPE_P(decl)){
                request_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(decl)));
                if (TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE)
                    element_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(TREE_TYPE(decl))));
                else
                    element_size = request_size;
            }
            calculate_zone_sizes(element_size, request_size, /*global*/ false, COMPLETE_TYPE_P(decl), &front_rz_size, &rear_rz_size);
            DEBUGLOG("DEBUG *SIZES* req_size %u, ele_size %u, fsize %u, rsize %u\n", request_size, element_size, front_rz_size, rear_rz_size);
			
            tree struct_type = create_struct_type(decl, front_rz_size, rear_rz_size);
            tree struct_var = create_struct_var(struct_type, decl, location);
            declare_vars(struct_var, stmt, 0);

			/* Inserting into hashtable */
            PWord_t PV;
            JSLI(PV, decl_map, mf_varname_tree(decl));
            gcc_assert(PV);
            *PV = (PWord_t) struct_var;

            fsize = convert (unsigned_type_node, size_int(front_rz_size));
            gcc_assert (is_gimple_val (fsize));

            tree rz_front = TYPE_FIELDS(struct_type);
            fncall_param_front = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_front),
                                                      struct_var, rz_front, NULL_TREE)));
            uninit_fncall_front = gimple_build_call (lbc_uninit_front_rz_fndecl, 2, fncall_param_front, fsize);
            init_fncall_front = gimple_build_call (lbc_init_front_rz_fndecl, 2, fncall_param_front, fsize);
            gimple_set_location (init_fncall_front, location);
            gimple_set_location (uninit_fncall_front, location);

            // In complete types have only a front red zone
            if (COMPLETE_TYPE_P(decl)){
                rsize = convert (unsigned_type_node, size_int(rear_rz_size));
                gcc_assert (is_gimple_val (rsize));

                tree rz_rear = DECL_CHAIN(DECL_CHAIN(TYPE_FIELDS (struct_type)));
                fncall_param_rear = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_rear),
                                struct_var, rz_rear, NULL_TREE)));
                init_fncall_rear = gimple_build_call (lbc_init_rear_rz_fndecl, 2, fncall_param_rear, rsize);
                uninit_fncall_rear = gimple_build_call (lbc_uninit_rear_rz_fndecl, 2, fncall_param_rear, rsize);
                gimple_set_location (init_fncall_rear, location);
                gimple_set_location (uninit_fncall_rear, location);
            }

            // TODO Do I need this?
#if 0
            if (DECL_INITIAL(decl) != NULL_TREE){
                // This code never seems to be getting executed for somehting like int i = 10;
                // I have no idea why? But looking at the tree dump, seems like its because
                // by the time it gets here, these kind of statements are split into two statements
                // as int i; and i = 10; respectively. I am leaving it in just in case.
                tree orig_var_type = DECL_CHAIN(TYPE_FIELDS (struct_type));
                tree orig_var_lval = mf_mark (build3 (COMPONENT_REF, TREE_TYPE(orig_var_type),
                                        struct_var, orig_var_type, NULL_TREE));
                init_assign_stmt = gimple_build_assign(orig_var_lval, DECL_INITIAL(decl));
                gimple_set_location (init_assign_stmt, location);
            }
#endif

            if (gsi_end_p (initially_stmts))
            {
                // TODO handle this
                if (!DECL_ARTIFICIAL (decl))
                    warning (OPT_Wmudflap,
                            "mudflap cannot track %qE in stub function",
                            DECL_NAME (decl));
            }
            else
            {
#if 0
                // Insert the declaration initializer
                if (DECL_INITIAL(decl) != NULL_TREE)
                    gsi_insert_before (&initially_stmts, init_assign_stmt, GSI_SAME_STMT);
#endif

                //gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT);
                gsi_insert_before (&initially_stmts, init_fncall_front, GSI_SAME_STMT);
                if (COMPLETE_TYPE_P(decl))
                    gsi_insert_before (&initially_stmts, init_fncall_rear, GSI_SAME_STMT);

                /* Accumulate the FINALLY piece.  */
                //gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
                gimple_seq_add_stmt (&finally_stmts, uninit_fncall_front);
                if (COMPLETE_TYPE_P(decl))
                    gimple_seq_add_stmt (&finally_stmts, uninit_fncall_rear);

            }
            mf_mark (decl);
        }

        decl = DECL_CHAIN (decl);
    }

    /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
    if (finally_stmts != NULL)
    {
        gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
        gimple_seq new_seq = gimple_seq_alloc ();

        gimple_seq_add_stmt (&new_seq, stmt);
        return new_seq;
    }
    else
        return seq;
}
Example #16
0
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
  gimple stmt = gsi_stmt (*gsi);
  location_t loc = gimple_location (stmt);
  tree cont_label = create_artificial_label (loc);
  tree next_label = create_artificial_label (loc);
  tree dest, t, arg;
  gimple g;

  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
  FORCED_LABEL (next_label) = 1;

  dest = gimple_call_lhs (stmt);

  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
  arg = build_addr (next_label, current_function_decl);
  t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
  gimple_set_location (g, loc);
  gimple_set_block (g, gimple_block (stmt));
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'DEST = 0' and insert.  */
  if (dest)
    {
      g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
      gimple_set_location (g, loc);
      gimple_set_block (g, gimple_block (stmt));
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  /* Build 'goto CONT_LABEL' and insert.  */
  g = gimple_build_goto (cont_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'NEXT_LABEL:' and insert.  */
  g = gimple_build_label (next_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
  arg = build_addr (next_label, current_function_decl);
  t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
  g = gimple_build_call (t, 1, arg);
  gimple_set_location (g, loc);
  gimple_set_block (g, gimple_block (stmt));
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'DEST = 1' and insert.  */
  if (dest)
    {
      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
						       integer_one_node));
      gimple_set_location (g, loc);
      gimple_set_block (g, gimple_block (stmt));
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  /* Build 'CONT_LABEL:' and insert.  */
  g = gimple_build_label (cont_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Remove the call to __builtin_setjmp.  */
  gsi_remove (gsi, false);
}
Example #17
0
static void
mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
		location_t location, tree dirflag)
{
	tree type, base=NULL_TREE, limit, addr, size, t, elt=NULL_TREE;
	tree temp, field, offset;
	bool check_red_flag = 0, instrumented = 0;
	tree fncall_param_val;
	gimple is_char_red_call;
	tree temp_instr, type_node;

    // TODO fix this to use our flag
	/* Don't instrument read operations.  */
	if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
		return;

	DEBUGLOG("TREE_CODE(t) = %s, mf_decl_eligible_p : %d\n", 
			tree_code_name[(int)TREE_CODE(*tp)], mf_decl_eligible_p(*tp));

	t = *tp;
	type = TREE_TYPE (t);

	if (type == error_mark_node)
		return;

	size = TYPE_SIZE_UNIT (type);

	/* Don't instrument marked nodes.  */
	if (mf_marked_p (t) && !mf_decl_eligible_p(t)){
		DEBUGLOG("Returning Here - 1\n");
		return;
	}

    if (TREE_CODE(t) == ADDR_EXPR || \
            TREE_CODE(t) == COMPONENT_REF || \
            TREE_CODE(t) == ARRAY_REF || \
            (TREE_CODE(t) == VAR_DECL && mf_decl_eligible_p(t)))
    {
        DEBUGLOG("------ INSTRUMENTING NODES ---------\n");
        temp = TREE_OPERAND(t, 0);

        if(temp && (TREE_CODE(temp) == STRING_CST || \
                TREE_CODE(temp) == FUNCTION_DECL)) // TODO Check this out? What do you do in this case?
            return;

        DEBUGLOG("TREE_CODE(temp) : %s\n", tree_code_name[(int)TREE_CODE(temp)]);

        if (TREE_CODE(t) == VAR_DECL)
            *tp = mf_walk_n_instrument(tp, &instrumented);
        else
            TREE_OPERAND(t,0) = mf_walk_n_instrument(&(TREE_OPERAND(t,0)), &instrumented);

        if (TREE_CODE(t) == ADDR_EXPR)
            return;
    } 

    DEBUGLOG("Pass2 derefs: entering deref section\n");

    type_node = NULL_TREE;
    //TODO move this to appropriate cases
    t = *tp;
	switch (TREE_CODE (t))
	{
		case ARRAY_REF:
		case COMPONENT_REF: // TODO check if following works for comp refs
			{ 
                DEBUGLOG("------ INSIDE CASE COMPONENT_REF  ---------\n");
                HOST_WIDE_INT bitsize, bitpos;
                tree inner, offset;
                int volatilep, unsignedp;
                enum machine_mode mode1;
                check_red_flag = 1; 
                inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
                        &mode1, &unsignedp, &volatilep, false);
                if (!offset)
                    offset = size_zero_node;
                offset = size_binop (PLUS_EXPR, offset,
                        size_int (bitpos / BITS_PER_UNIT));
                addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
                        build1 (ADDR_EXPR, build_pointer_type(type), inner), offset);
                break; // TODO continue?
            }

		case INDIRECT_REF:
			DEBUGLOG("------ INSIDE CASE INDIRECT_REF  ---------\n");
			check_red_flag = 1;
			addr = TREE_OPERAND (t, 0);
            break; // TODO continue?

		case MEM_REF:
			DEBUGLOG("------ INSIDE CASE MEM_REF  ---------\n");
			check_red_flag = 1;
			addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)),
					TREE_OPERAND (t, 0), fold_convert (sizetype, TREE_OPERAND (t, 1)));
            break;

		case TARGET_MEM_REF:
			DEBUGLOG("------ INSIDE CASE TARGET_MEM_REF  ---------\n");
			check_red_flag = 1;
			addr = tree_mem_ref_addr (ptr_type_node, t);
			break; // TODO do you want to do this case? find out what it does.

		case ARRAY_RANGE_REF:
			DEBUGLOG("------ INSIDE CASE ARRAY_RANGE_REF  ---------\n");
			DEBUGLOG("------ TODO not handled yet---------\n");
			return;

		case BIT_FIELD_REF:
			DEBUGLOG("------ INSIDE CASE BIT_FIELD_REF  ---------\n");
			DEBUGLOG("------ TODO not handled yet---------\n");
			return;

		default:
			DEBUGLOG("------ INSIDE CASE DEFAULT  ---------\n");
			if(mf_decl_eligible_p(t))
			{
                DEBUGLOG("Do you want to be here?\n");
                return;
				/*if((*tp = mx_xform_instrument_pass2(t)) == NULL_TREE){
					DEBUGLOG("Failed to set tree operand\n");
					return;
				}*/
			}
	}


    // Add the call to is_char_red
    if (check_red_flag) {
        DEBUGLOG("Entering is_char_red\n");
        fncall_param_val = fold_build2_loc (location, MEM_REF, ptr_type_node, addr, \
                            build_int_cst(build_pointer_type(type), 0));
        fncall_param_val = fold_convert_loc (location, unsigned_type_node, fncall_param_val);
        is_char_red_call = gimple_build_call (lbc_is_char_red_fndecl, 3, fncall_param_val, size, \
                            fold_convert_loc(location, ptr_type_node, addr));
        gimple_set_location (is_char_red_call, location);
        //debug_gimple_stmt(is_char_red_call);
        gsi_insert_before (iter, is_char_red_call, GSI_SAME_STMT);
        DEBUGLOG("Done with is_char_red\n");
    }
    DEBUGLOG("Exiting derefs \n");
}
Example #18
0
static void
mf_build_check_statement_for (tree base, tree limit,
                              gimple_stmt_iterator *instr_gsi,
                              location_t location, tree dirflag)
{
  gimple_stmt_iterator gsi;
  basic_block cond_bb, then_bb, join_bb;
  edge e;
  tree cond, t, u, v;
  tree mf_base;
  tree mf_elem;
  tree mf_limit;
  gimple g;
  gimple_seq seq, stmts;

  /* We first need to split the current basic block, and start altering
     the CFG.  This allows us to insert the statements we're about to
     construct into the right basic blocks.  */

  cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
  gsi = *instr_gsi;
  gsi_prev (&gsi);
  if (! gsi_end_p (gsi))
    e = split_block (cond_bb, gsi_stmt (gsi));
  else
    e = split_block_after_labels (cond_bb);
  cond_bb = e->src;
  join_bb = e->dest;

  /* A recap at this point: join_bb is the basic block at whose head
     is the gimple statement for which this check expression is being
     built.  cond_bb is the (possibly new, synthetic) basic block the
     end of which will contain the cache-lookup code, and a
     conditional that jumps to the cache-miss code or, much more
     likely, over to join_bb.  */

  /* Create the bb that contains the cache-miss fallback block (mf_check).  */
  then_bb = create_empty_bb (cond_bb);
  make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
  make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);

  /* Mark the pseudo-fallthrough edge from cond_bb to join_bb.  */
  e = find_edge (cond_bb, join_bb);
  e->flags = EDGE_FALSE_VALUE;
  e->count = cond_bb->count;
  e->probability = REG_BR_PROB_BASE;

  /* Update dominance info.  Note that bb_join's data was
     updated by split_block.  */
  if (dom_info_available_p (CDI_DOMINATORS))
    {
      set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
      set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
    }

  /* Update loop info.  */
  if (current_loops)
    add_bb_to_loop (then_bb, cond_bb->loop_father);

  /* Build our local variables.  */
  mf_elem = create_tmp_reg (mf_cache_structptr_type, "__mf_elem");
  mf_base = create_tmp_reg (mf_uintptr_type, "__mf_base");
  mf_limit = create_tmp_reg (mf_uintptr_type, "__mf_limit");

  /* Build: __mf_base = (uintptr_t) <base address expression>.  */
  seq = NULL;
  t = fold_convert_loc (location, mf_uintptr_type,
			unshare_expr (base));
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_base, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build: __mf_limit = (uintptr_t) <limit address expression>.  */
  t = fold_convert_loc (location, mf_uintptr_type,
			unshare_expr (limit));
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_limit, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
                                            & __mf_mask].  */
  t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
              flag_mudflap_threads ? mf_cache_shift_decl
	       : mf_cache_shift_decl_l);
  t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
              flag_mudflap_threads ? mf_cache_mask_decl
	       : mf_cache_mask_decl_l);
  t = build4 (ARRAY_REF,
              TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
              mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
  t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_assign (mf_elem, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Quick validity check.

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))
        {
          __mf_check ();
          ... and only if single-threaded:
          __mf_lookup_shift_1 = f...;
          __mf_lookup_mask_l = ...;
        }

     It is expected that this body of code is rarely executed so we mark
     the edge to the THEN clause of the conditional jump as unlikely.  */

  /* Construct t <-- '__mf_elem->low  > __mf_base'.  */
  t = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
  t = build2 (GT_EXPR, boolean_type_node, t, mf_base);

  /* Construct '__mf_elem->high < __mf_limit'.

     First build:
        1) u <--  '__mf_elem->high'
        2) v <--  '__mf_limit'.

     Then build 'u <-- (u < v).  */

  u = build3 (COMPONENT_REF, mf_uintptr_type,
              build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
              DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);

  v = mf_limit;

  u = build2 (LT_EXPR, boolean_type_node, u, v);

  /* Build the composed conditional: t <-- 't || u'.  Then store the
     result of the evaluation of 't' in a temporary variable which we
     can use as the condition for the conditional jump.  */
  t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
  t = force_gimple_operand (t, &stmts, false, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  cond = create_tmp_reg (boolean_type_node, "__mf_unlikely_cond");
  g = gimple_build_assign  (cond, t);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* Build the conditional jump.  'cond' is just a temporary so we can
     simply build a void COND_EXPR.  We do need labels in both arms though.  */
  g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
			 NULL_TREE);
  gimple_set_location (g, location);
  gimple_seq_add_stmt (&seq, g);

  /* At this point, after so much hard work, we have only constructed
     the conditional jump,

     if (__mf_elem->low > __mf_base
         || (__mf_elem_high < __mf_limit))

     The lowered GIMPLE tree representing this code is in the statement
     list starting at 'head'.

     We can insert this now in the current basic block, i.e. the one that
     the statement we're instrumenting was originally in.  */
  gsi = gsi_last_bb (cond_bb);
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);

  /*  Now build up the body of the cache-miss handling:

     __mf_check();
     refresh *_l vars.

     This is the body of the conditional.  */

  seq = NULL;
  /* u is a string, so it is already a gimple value.  */
  u = mf_file_function_line_tree (location);
  /* NB: we pass the overall [base..limit] range to mf_check.  */
  v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
		   fold_build2_loc (location,
				MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
		   build_int_cst (mf_uintptr_type, 1));
  v = force_gimple_operand (v, &stmts, true, NULL_TREE);
  gimple_seq_add_seq (&seq, stmts);
  g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
  gimple_seq_add_stmt (&seq, g);

  if (! flag_mudflap_threads)
    {
      if (stmt_ends_bb_p (g))
	{
	  gsi = gsi_start_bb (then_bb);
	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
	  e = split_block (then_bb, g);
	  then_bb = e->dest;
	  seq = NULL;
	}

      g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
      gimple_seq_add_stmt (&seq, g);

      g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
      gimple_seq_add_stmt (&seq, g);
    }

  /* Insert the check code in the THEN block.  */
  gsi = gsi_start_bb (then_bb);
  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);

  *instr_gsi = gsi_start_bb (join_bb);
}
Example #19
0
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
  gimple *stmt = gsi_stmt (*gsi);
  location_t loc = gimple_location (stmt);
  tree cont_label = create_artificial_label (loc);
  tree next_label = create_artificial_label (loc);
  tree dest, t, arg;
  gimple *g;

  /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
     these builtins are modelled as non-local label jumps to the label
     that is passed to these two builtins, so pretend we have a non-local
     label during GIMPLE passes too.  See PR60003.  */ 
  cfun->has_nonlocal_label = 1;

  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
  FORCED_LABEL (next_label) = 1;

  tree orig_dest = dest = gimple_call_lhs (stmt);
  if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
    dest = create_tmp_reg (TREE_TYPE (orig_dest));

  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
  arg = build_addr (next_label);
  t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
  gimple_set_location (g, loc);
  gimple_set_block (g, gimple_block (stmt));
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'DEST = 0' and insert.  */
  if (dest)
    {
      g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
      gimple_set_location (g, loc);
      gimple_set_block (g, gimple_block (stmt));
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  /* Build 'goto CONT_LABEL' and insert.  */
  g = gimple_build_goto (cont_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'NEXT_LABEL:' and insert.  */
  g = gimple_build_label (next_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
  arg = build_addr (next_label);
  t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
  g = gimple_build_call (t, 1, arg);
  gimple_set_location (g, loc);
  gimple_set_block (g, gimple_block (stmt));
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build 'DEST = 1' and insert.  */
  if (dest)
    {
      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
						       integer_one_node));
      gimple_set_location (g, loc);
      gimple_set_block (g, gimple_block (stmt));
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  /* Build 'CONT_LABEL:' and insert.  */
  g = gimple_build_label (cont_label);
  gsi_insert_before (gsi, g, GSI_SAME_STMT);

  /* Build orig_dest = dest if necessary.  */
  if (dest != orig_dest)
    {
      g = gimple_build_assign (orig_dest, dest);
      gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

  /* Remove the call to __builtin_setjmp.  */
  gsi_remove (gsi, false);
}