Esempio n. 1
0
static bool
stmt_may_generate_copy (gimple stmt)
{
  if (gimple_code (stmt) == GIMPLE_PHI)
    return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt));

  if (gimple_code (stmt) != GIMPLE_ASSIGN)
    return false;

  /* If the statement has volatile operands, it won't generate a
     useful copy.  */
  if (gimple_has_volatile_ops (stmt))
    return false;

  /* Statements with loads and/or stores will never generate a useful copy.  */
  if (gimple_vuse (stmt))
    return false;

  /* Otherwise, the only statements that generate useful copies are
     assignments whose RHS is just an SSA name that doesn't flow
     through abnormal edges.  */
  return ((gimple_assign_rhs_code (stmt) == SSA_NAME
	   && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)));
}
Esempio n. 2
0
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements. 
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     offset = gimple_assign_rhs1 (offset_def);
     if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
	 || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
	 || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
			       TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
	return false;

      /* The first operand to the MULT_EXPR is the desired index.  */
      index = offset;
    }

  /* Replace the pointer addition with array indexing.  */
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
static void
tidy_after_forward_propagate_addr (gimple stmt)
{
  /* We may have turned a trapping insn into a non-trapping insn.  */
  if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
      && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
    cfg_changed = true;

  if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
     recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
}
Esempio n. 4
0
/* Check whether G is a potential conditional compare candidate.  */
static bool
ccmp_candidate_p (gimple *g)
{
  tree rhs = gimple_assign_rhs_to_tree (g);
  tree lhs, op0, op1;
  gimple *gs0, *gs1;
  enum tree_code tcode, tcode0, tcode1;
  tcode = TREE_CODE (rhs);

  if (tcode != BIT_AND_EXPR && tcode != BIT_IOR_EXPR)
    return false;

  lhs = gimple_assign_lhs (g);
  op0 = TREE_OPERAND (rhs, 0);
  op1 = TREE_OPERAND (rhs, 1);

  if ((TREE_CODE (op0) != SSA_NAME) || (TREE_CODE (op1) != SSA_NAME)
      || !has_single_use (lhs))
    return false;

  gs0 = get_gimple_for_ssa_name (op0);
  gs1 = get_gimple_for_ssa_name (op1);
  if (!gs0 || !gs1 || !is_gimple_assign (gs0) || !is_gimple_assign (gs1)
      /* g, gs0 and gs1 must be in the same basic block, since current stage
	 is out-of-ssa.  We can not guarantee the correctness when forwording
	 the gs0 and gs1 into g whithout DATAFLOW analysis.  */
      || gimple_bb (gs0) != gimple_bb (gs1)
      || gimple_bb (gs0) != gimple_bb (g))
    return false;

  if (!(INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (gs0)))
       || POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (gs0))))
      || !(INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (gs1)))
	   || POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (gs1)))))
    return false;

  tcode0 = gimple_assign_rhs_code (gs0);
  tcode1 = gimple_assign_rhs_code (gs1);
  if (TREE_CODE_CLASS (tcode0) == tcc_comparison
      && TREE_CODE_CLASS (tcode1) == tcc_comparison)
    return true;
  if (TREE_CODE_CLASS (tcode0) == tcc_comparison
      && ccmp_candidate_p (gs1))
    return true;
  else if (TREE_CODE_CLASS (tcode1) == tcc_comparison
	   && ccmp_candidate_p (gs0))
    return true;
  /* We skip ccmp_candidate_p (gs1) && ccmp_candidate_p (gs0) since
     there is no way to set the CC flag.  */
  return false;
}
Esempio n. 5
0
static tree
rhs_to_tree (tree type, gimple stmt)
{
  enum tree_code code = gimple_assign_rhs_code (stmt);
  if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
    return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
			gimple_assign_rhs2 (stmt));
  else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
    return build1 (code, type, gimple_assign_rhs1 (stmt));
  else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
    return gimple_assign_rhs1 (stmt);
  else
    gcc_unreachable ();
}
Esempio n. 6
0
File: ubsan.c Progetto: didemoto/gcc
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
  gimple stmt = gsi_stmt (gsi);
  tree_code code = gimple_assign_rhs_code (stmt);
  tree lhs = gimple_assign_lhs (stmt);
  tree lhstype = TREE_TYPE (lhs);
  tree a, b;
  gimple g;

  /* If this is not a signed operation, don't instrument anything here.
     Also punt on bit-fields.  */
  if (!INTEGRAL_TYPE_P (lhstype)
      || TYPE_OVERFLOW_WRAPS (lhstype)
      || GET_MODE_BITSIZE (TYPE_MODE (lhstype)) != TYPE_PRECISION (lhstype))
    return;

  switch (code)
    {
    case MINUS_EXPR:
    case PLUS_EXPR:
    case MULT_EXPR:
      /* Transform
	 i = u {+,-,*} 5;
	 into
	 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5);  */
      a = gimple_assign_rhs1 (stmt);
      b = gimple_assign_rhs2 (stmt);
      g = gimple_build_call_internal (code == PLUS_EXPR
				      ? IFN_UBSAN_CHECK_ADD
				      : code == MINUS_EXPR
				      ? IFN_UBSAN_CHECK_SUB
				      : IFN_UBSAN_CHECK_MUL, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    case NEGATE_EXPR:
      /* Represent i = -u;
	 as
	 i = UBSAN_CHECK_SUB (0, u);  */
      a = build_int_cst (lhstype, 0);
      b = gimple_assign_rhs1 (stmt);
      g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
      gimple_call_set_lhs (g, lhs);
      gsi_replace (&gsi, g, false);
      break;
    default:
      break;
    }
}
Esempio n. 7
0
static enum ssa_prop_result
copy_prop_visit_stmt (gimple stmt, edge *taken_edge_p, tree *result_p)
{
  enum ssa_prop_result retval;

  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "\nVisiting statement:\n");
      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
      fprintf (dump_file, "\n");
    }

  if (gimple_assign_single_p (stmt)
      && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
    {
      /* If the statement is a copy assignment, evaluate its RHS to
	 see if the lattice value of its output has changed.  */
      retval = copy_prop_visit_assignment (stmt, result_p);
    }
  else if (gimple_code (stmt) == GIMPLE_COND)
    {
      /* See if we can determine which edge goes out of a conditional
	 jump.  */
      retval = copy_prop_visit_cond_stmt (stmt, taken_edge_p);
    }
  else
    retval = SSA_PROP_VARYING;

  if (retval == SSA_PROP_VARYING)
    {
      tree def;
      ssa_op_iter i;

      /* Any other kind of statement is not interesting for constant
	 propagation and, therefore, not worth simulating.  */
      if (dump_file && (dump_flags & TDF_DETAILS))
	fprintf (dump_file, "No interesting values produced.\n");

      /* The assignment is not a copy operation.  Don't visit this
	 statement again and mark all the definitions in the statement
	 to be copies of nothing.  */
      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_ALL_DEFS)
	set_copy_of_val (def, def);
    }

  return retval;
}
Esempio n. 8
0
bool
walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
			       walk_stmt_load_store_addr_fn visit_load,
			       walk_stmt_load_store_addr_fn visit_store,
			       walk_stmt_load_store_addr_fn visit_addr)
{
  bool ret = false;
  unsigned i;
  if (gimple_assign_single_p (stmt))
    {
      tree lhs, rhs, arg;
      if (visit_store)
	{
	  arg = gimple_assign_lhs (stmt);
	  lhs = get_base_loadstore (arg);
	  if (lhs)
	    ret |= visit_store (stmt, lhs, arg, data);
	}
      arg = gimple_assign_rhs1 (stmt);
      rhs = arg;
      while (handled_component_p (rhs))
	rhs = TREE_OPERAND (rhs, 0);
      if (visit_addr)
	{
	  if (TREE_CODE (rhs) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
	  else if (TREE_CODE (rhs) == TARGET_MEM_REF
		   && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), arg,
			       data);
	  else if (TREE_CODE (rhs) == OBJ_TYPE_REF
		   && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
						   0), arg, data);
	  else if (TREE_CODE (rhs) == CONSTRUCTOR)
	    {
	      unsigned int ix;
	      tree val;

	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
		if (TREE_CODE (val) == ADDR_EXPR)
		  ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
		else if (TREE_CODE (val) == OBJ_TYPE_REF
			 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
		  ret |= visit_addr (stmt,
				     TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
						   0), arg, data);
	    }
          lhs = gimple_assign_lhs (stmt);
	  if (TREE_CODE (lhs) == TARGET_MEM_REF
              && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), lhs, data);
	}
      if (visit_load)
	{
	  rhs = get_base_loadstore (rhs);
	  if (rhs)
	    ret |= visit_load (stmt, rhs, arg, data);
	}
    }
static bool
forward_propagate_addr_expr (tree name, tree rhs)
{
  int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
  imm_use_iterator iter;
  gimple use_stmt;
  bool all = true;
  bool single_use_p = has_single_use (name);

  FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
    {
      bool result;
      tree use_rhs;

      /* If the use is not in a simple assignment statement, then
	 there is nothing we can do.  */
      if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
	{
	  if (!is_gimple_debug (use_stmt))
	    all = false;
	  continue;
	}

      /* If the use is in a deeper loop nest, then we do not want
	 to propagate the ADDR_EXPR into the loop as that is likely
	 adding expression evaluations into the loop.  */
      if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
	{
	  all = false;
	  continue;
	}

      {
	gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
	result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
						single_use_p);
	/* If the use has moved to a different statement adjust
	   the update machinery for the old statement too.  */
	if (use_stmt != gsi_stmt (gsi))
	  {
	    update_stmt (use_stmt);
	    use_stmt = gsi_stmt (gsi);
	  }

	update_stmt (use_stmt);
      }
      all &= result;

      /* Remove intermediate now unused copy and conversion chains.  */
      use_rhs = gimple_assign_rhs1 (use_stmt);
      if (result
	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
	  && TREE_CODE (use_rhs) == SSA_NAME
	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
	{
	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
	  release_defs (use_stmt);
	  gsi_remove (&gsi, true);
	}
    }
Esempio n. 10
0
/* PREV is the CC flag from precvious compares.  The function expands the
   next compare based on G which ops previous compare with CODE.
   PREP_SEQ returns all insns to prepare opearands for compare.
   GEN_SEQ returnss all compare insns.  */
static rtx
expand_ccmp_next (gimple *g, enum tree_code code, rtx prev,
		  rtx *prep_seq, rtx *gen_seq)
{
  enum rtx_code rcode;
  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (g)));

  gcc_assert (code == BIT_AND_EXPR || code == BIT_IOR_EXPR);

  rcode = get_rtx_code (gimple_assign_rhs_code (g), unsignedp);

  return targetm.gen_ccmp_next (prep_seq, gen_seq, prev, rcode,
				gimple_assign_rhs1 (g),
				gimple_assign_rhs2 (g),
				get_rtx_code (code, 0));
}
Esempio n. 11
0
void
backprop::optimize_assign (gassign *assign, tree lhs, const usage_info *info)
{
  switch (gimple_assign_rhs_code (assign))
    {
    case MULT_EXPR:
    case RDIV_EXPR:
      /* If the sign of the result doesn't matter, strip sign operations
	 from both inputs.  */
      if (info->flags.ignore_sign)
	replace_assign_rhs (assign, lhs,
			    strip_sign_op (gimple_assign_rhs1 (assign)),
			    strip_sign_op (gimple_assign_rhs2 (assign)),
			    NULL_TREE);
      break;

    case COND_EXPR:
      /* If the sign of A ? B : C doesn't matter, strip sign operations
	 from both B and C.  */
      if (info->flags.ignore_sign)
	replace_assign_rhs (assign, lhs,
			    NULL_TREE,
			    strip_sign_op (gimple_assign_rhs2 (assign)),
			    strip_sign_op (gimple_assign_rhs3 (assign)));
      break;

    default:
      break;
    }
}
Esempio n. 12
0
static tree
strip_sign_op_1 (tree rhs)
{
  if (TREE_CODE (rhs) != SSA_NAME)
    return NULL_TREE;

  gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
  if (gassign *assign = dyn_cast <gassign *> (def_stmt))
    switch (gimple_assign_rhs_code (assign))
      {
      case ABS_EXPR:
      case NEGATE_EXPR:
	return gimple_assign_rhs1 (assign);

      default:
	break;
      }
  else if (gcall *call = dyn_cast <gcall *> (def_stmt))
    switch (gimple_call_combined_fn (call))
      {
      CASE_CFN_COPYSIGN:
	return gimple_call_arg (call, 0);

      default:
	break;
      }

  return NULL_TREE;
}
Esempio n. 13
0
static gimple
get_prop_dest_stmt (tree name, tree *final_name_p)
{
  use_operand_p use;
  gimple use_stmt;

  do {
    /* If name has multiple uses, bail out.  */
    if (!single_imm_use (name, &use, &use_stmt))
      return NULL;

    /* If this is not a trivial copy, we found it.  */
    if (!gimple_assign_copy_p (use_stmt)
	|| TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
	|| gimple_assign_rhs1 (use_stmt) != name)
      break;

    /* Continue searching uses of the copy destination.  */
    name = gimple_assign_lhs (use_stmt);
  } while (1);

  if (final_name_p)
    *final_name_p = name;

  return use_stmt;
}
Esempio n. 14
0
static gimple
get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
{
  bool single_use = true;

  do {
    gimple def_stmt = SSA_NAME_DEF_STMT (name);

    if (!has_single_use (name))
      {
	single_use = false;
	if (single_use_only)
	  return NULL;
      }

    /* If name is defined by a PHI node or is the default def, bail out.  */
    if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
      return NULL;

    /* If name is not a simple copy destination, we found it.  */
    if (!gimple_assign_copy_p (def_stmt)
        || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
      {
	tree rhs;

	if (!single_use_only && single_use_p)
	  *single_use_p = single_use;

	/* We can look through pointer conversions in the search
	   for a useful stmt for the comparison folding.  */
	rhs = gimple_assign_rhs1 (def_stmt);
	if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
	    && TREE_CODE (rhs) == SSA_NAME
	    && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
	    && POINTER_TYPE_P (TREE_TYPE (rhs)))
	  name = rhs;
	else
	  return def_stmt;
      }
    else
      {
	/* Continue searching the def of the copy source name.  */
	name = gimple_assign_rhs1 (def_stmt);
      }
  } while (1);
}
static tree
lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
{
  imm_use_iterator imm_iter;
  gimple use_stmt;
  use_operand_p use_p;

  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
    {
      use_stmt = USE_STMT (use_p);
      if (use_stmt != stmt
          && gimple_assign_single_p (use_stmt)
          && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR
          && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op
	  && dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt)))
	{
	  return gimple_assign_lhs (use_stmt);
	}
    }
Esempio n. 16
0
static tree
get_name_for_bit_test (tree candidate)
{
  /* Skip single-use names in favor of using the name from a
     non-widening conversion definition.  */
  if (TREE_CODE (candidate) == SSA_NAME
      && has_single_use (candidate))
    {
      gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
      if (is_gimple_assign (def_stmt)
	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
	{
	  if (TYPE_PRECISION (TREE_TYPE (candidate))
	      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
	    return gimple_assign_rhs1 (def_stmt);
	}
    }

  return candidate;
}
Esempio n. 17
0
/* Return whether USE_STMT is a floating-point division by DEF.  */
static inline bool
is_division_by (gimple use_stmt, tree def)
{
  return is_gimple_assign (use_stmt)
	 && gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
	 && gimple_assign_rhs2 (use_stmt) == def
	 /* Do not recognize x / x as valid division, as we are getting
	    confused later by replacing all immediate uses x in such
	    a stmt.  */
	 && gimple_assign_rhs1 (use_stmt) != def;
}
Esempio n. 18
0
static tree
maybe_get_single_definition (tree t)
{
  if (TREE_CODE (t) == SSA_NAME)
    {
      gimple g = SSA_NAME_DEF_STMT (t);
      if (gimple_assign_single_p (g))
	return gimple_assign_rhs1 (g);
    }
  return NULL_TREE;
}
static bool
can_propagate_from (gimple def_stmt)
{
  use_operand_p use_p;
  ssa_op_iter iter;

  gcc_assert (is_gimple_assign (def_stmt));

  /* If the rhs has side-effects we cannot propagate from it.  */
  if (gimple_has_volatile_ops (def_stmt))
    return false;

  /* If the rhs is a load we cannot propagate from it.  */
  if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
      || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
    return false;

  /* Constants can be always propagated.  */
  if (gimple_assign_single_p (def_stmt)
      && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
    return true;

  /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
  FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
    if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
      return false;

  /* If the definition is a conversion of a pointer to a function type,
     then we can not apply optimizations as some targets require
     function pointers to be canonicalized and in this case this
     optimization could eliminate a necessary canonicalization.  */
  if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
    {
      tree rhs = gimple_assign_rhs1 (def_stmt);
      if (POINTER_TYPE_P (TREE_TYPE (rhs))
          && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
        return false;
    }

  return true;
}
// e.g., 3.8.2, 64, arch/x86/ia32/ia32_signal.c copy_siginfo_from_user32(): compat_ptr() u32 max
static bool skip_asm_cast(const_tree arg)
{
	gimple def_stmt = get_def_stmt(arg);

	if (!def_stmt || !gimple_assign_cast_p(def_stmt))
		return false;

	def_stmt = get_def_stmt(gimple_assign_rhs1(def_stmt));
	if (is_size_overflow_asm(def_stmt))
		return false;
	return def_stmt && gimple_code(def_stmt) == GIMPLE_ASM;
}
Esempio n. 21
0
static struct mem_ref_group *
gather_memory_references (struct loop *loop, bool *no_other_refs)
{
  basic_block *body = get_loop_body_in_dom_order (loop);
  basic_block bb;
  unsigned i;
  gimple_stmt_iterator bsi;
  gimple stmt;
  tree lhs, rhs;
  struct mem_ref_group *refs = NULL;

  *no_other_refs = true;

  /* Scan the loop body in order, so that the former references precede the
     later ones.  */
  for (i = 0; i < loop->num_nodes; i++)
    {
      bb = body[i];
      if (bb->loop_father != loop)
	continue;

      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
	{
	  stmt = gsi_stmt (bsi);

	  if (gimple_code (stmt) != GIMPLE_ASSIGN)
	    {
	      if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
		  || (is_gimple_call (stmt)
		      && !(gimple_call_flags (stmt) & ECF_CONST)))
		*no_other_refs = false;
	      continue;
	    }

	  lhs = gimple_assign_lhs (stmt);
	  rhs = gimple_assign_rhs1 (stmt);

	  if (REFERENCE_CLASS_P (rhs))
	    *no_other_refs &= gather_memory_references_ref (loop, &refs,
							    rhs, false, stmt);
	  if (REFERENCE_CLASS_P (lhs))
	    *no_other_refs &= gather_memory_references_ref (loop, &refs,
							    lhs, true, stmt);
	}
    }
  free (body);

  return refs;
}
Esempio n. 22
0
static void
instrument_null (gimple_stmt_iterator gsi, bool is_lhs)
{
  gimple stmt = gsi_stmt (gsi);
  tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
  t = get_base_address (t);
  const enum tree_code code = TREE_CODE (t);
  if (code == MEM_REF
      && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
    instrument_mem_ref (TREE_OPERAND (t, 0), &gsi, is_lhs);
  else if (code == ADDR_EXPR
	   && POINTER_TYPE_P (TREE_TYPE (t))
	   && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == METHOD_TYPE)
    instrument_member_call (&gsi);
}
Esempio n. 23
0
static bool
widened_name_p (tree name, gimple use_stmt, tree *half_type, gimple *def_stmt,
		bool check_sign)
{
  tree dummy;
  gimple dummy_gimple;
  loop_vec_info loop_vinfo;
  stmt_vec_info stmt_vinfo;
  tree type = TREE_TYPE (name);
  tree oprnd0;
  enum vect_def_type dt;
  tree def;

  stmt_vinfo = vinfo_for_stmt (use_stmt);
  loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);

  if (!vect_is_simple_use (name, loop_vinfo, NULL, def_stmt, &def, &dt))
    return false;

  if (dt != vect_internal_def
      && dt != vect_external_def && dt != vect_constant_def)
    return false;

  if (! *def_stmt)
    return false;

  if (!is_gimple_assign (*def_stmt))
    return false;

  if (gimple_assign_rhs_code (*def_stmt) != NOP_EXPR)
    return false;

  oprnd0 = gimple_assign_rhs1 (*def_stmt);

  *half_type = TREE_TYPE (oprnd0);
  if (!INTEGRAL_TYPE_P (type) || !INTEGRAL_TYPE_P (*half_type)
      || ((TYPE_UNSIGNED (type) != TYPE_UNSIGNED (*half_type)) && check_sign)
      || (TYPE_PRECISION (type) < (TYPE_PRECISION (*half_type) * 2)))
    return false;

  if (!vect_is_simple_use (oprnd0, loop_vinfo, NULL, &dummy_gimple, &dummy,
                           &dt))
    return false;

  return true;
}
Esempio n. 24
0
static tree
get_real_ref_rhs (tree expr)
{
  switch (TREE_CODE (expr))
    {
      case SSA_NAME:
        {
          /* Given a self-assign statement, say foo.x = foo.x,
             the IR (after SSA) looks like:

             D.1797_14 = foo.x;
             foo.x ={v} D.1797_14;

             So if the rhs EXPR is an SSA_NAME of a temp variable,
             e.g. D.1797_14, we need to grab the rhs of its SSA def
             statement (i.e. foo.x).  */
          tree vdecl = SSA_NAME_VAR (expr);
          if ((!vdecl || DECL_ARTIFICIAL (vdecl))
              && !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
            {
              gimple def_stmt = SSA_NAME_DEF_STMT (expr);
              /* We are only interested in an assignment with a single
                 rhs operand because if it is not, the original assignment
                 will not possibly be a self-assignment.  */
              if (gimple_assign_single_p (def_stmt))
                return get_real_ref_rhs (gimple_assign_rhs1 (def_stmt));
              else
                return NULL_TREE;
            }
          else
            return vdecl;
        }
      case VAR_DECL:
      case PARM_DECL:
      case FIELD_DECL:
      case COMPONENT_REF:
      case MEM_REF:
      case ARRAY_REF:
        return expr;
      default:
        return NULL_TREE;
    }
}
bool
may_propagate_copy_into_stmt (gimple dest, tree orig)
{
    tree type_d;
    tree type_o;

    /* If the statement is a switch or a single-rhs assignment,
       then the expression to be replaced by the propagation may
       be an SSA_NAME.  Fortunately, there is an explicit tree
       for the expression, so we delegate to may_propagate_copy.  */

    if (gimple_assign_single_p (dest))
        return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
    else if (gimple_code (dest) == GIMPLE_SWITCH)
        return may_propagate_copy (gimple_switch_index (dest), orig);

    /* In other cases, the expression is not materialized, so there
       is no destination to pass to may_propagate_copy.  On the other
       hand, the expression cannot be an SSA_NAME, so the analysis
       is much simpler.  */

    if (TREE_CODE (orig) == SSA_NAME
            && (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
                ||  TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG))
        return false;

    if (is_gimple_assign (dest))
        type_d = TREE_TYPE (gimple_assign_lhs (dest));
    else if (gimple_code (dest) == GIMPLE_COND)
        type_d = boolean_type_node;
    else if (is_gimple_call (dest)
             && gimple_call_lhs (dest) != NULL_TREE)
        type_d = TREE_TYPE (gimple_call_lhs (dest));
    else
        gcc_unreachable ();

    type_o = TREE_TYPE (orig);

    if (!useless_type_conversion_p (type_d, type_o))
        return false;

    return true;
}
Esempio n. 26
0
static void
warn_self_assign (gimple stmt)
{
  tree rhs, lhs;

  /* Check assigment statement.  */
  if (gimple_assign_single_p (stmt))
    {
      rhs = get_real_ref_rhs (gimple_assign_rhs1 (stmt));
      if (!rhs)
        return;

      lhs = gimple_assign_lhs (stmt);
      if (TREE_CODE (lhs) == SSA_NAME)
        {
          lhs = SSA_NAME_VAR (lhs);
          if (!lhs || DECL_ARTIFICIAL (lhs))
            return;
        }

      compare_and_warn (stmt, lhs, rhs);
    }
  /* Check overloaded operator '=' (if enabled).  */
  else if (check_operator_eq && is_gimple_call (stmt))
    {
      tree fdecl = gimple_call_fndecl (stmt);
      if (fdecl && (DECL_NAME (fdecl) == maybe_get_identifier ("operator=")))
        {
          /* If 'operator=' takes reference operands, the arguments will be 
             ADDR_EXPR trees. In this case, just remove the address-taken
             operator before we compare the lhs and rhs.  */
          lhs = gimple_call_arg (stmt, 0);
          if (TREE_CODE (lhs) == ADDR_EXPR)
            lhs = TREE_OPERAND (lhs, 0);
          rhs = gimple_call_arg (stmt, 1);
          if (TREE_CODE (rhs) == ADDR_EXPR)
            rhs = TREE_OPERAND (rhs, 0);

          compare_and_warn (stmt, lhs, rhs);
        }
    }
}
Esempio n. 27
0
static bool
recognize_bits_test (gimple cond, tree *name, tree *bits)
{
  gimple stmt;

  /* Get at the definition of the result of the bit test.  */
  if (gimple_cond_code (cond) != NE_EXPR
      || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
      || !integer_zerop (gimple_cond_rhs (cond)))
    return false;
  stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
  if (!is_gimple_assign (stmt)
      || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
    return false;

  *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
  *bits = gimple_assign_rhs2 (stmt);

  return true;
}
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
    gimple stmt = gsi_stmt (*gsi);

    if (is_gimple_assign (stmt))
    {
        tree expr = NULL_TREE;
        if (gimple_assign_single_p (stmt))
            expr = gimple_assign_rhs1 (stmt);
        propagate_tree_value (&expr, val);
        gimple_assign_set_rhs_from_tree (gsi, expr);
        stmt = gsi_stmt (*gsi);
    }
    else if (gimple_code (stmt) == GIMPLE_COND)
    {
        tree lhs = NULL_TREE;
        tree rhs = fold_convert (TREE_TYPE (val), integer_zero_node);
        propagate_tree_value (&lhs, val);
        gimple_cond_set_code (stmt, NE_EXPR);
        gimple_cond_set_lhs (stmt, lhs);
        gimple_cond_set_rhs (stmt, rhs);
    }
    else if (is_gimple_call (stmt)
             && gimple_call_lhs (stmt) != NULL_TREE)
    {
        gimple new_stmt;

        tree expr = NULL_TREE;
        propagate_tree_value (&expr, val);
        new_stmt  = gimple_build_assign (gimple_call_lhs (stmt), expr);
        copy_virtual_operands (new_stmt, stmt);
        move_ssa_defining_stmt_for_defs (new_stmt, stmt);
        gsi_replace (gsi, new_stmt, false);
    }
    else if (gimple_code (stmt) == GIMPLE_SWITCH)
        propagate_tree_value (gimple_switch_index_ptr (stmt), val);
    else
        gcc_unreachable ();
}
Esempio n. 29
0
static bool
remove_prop_source_from_use (tree name, gimple up_to_stmt)
{
  gimple_stmt_iterator gsi;
  gimple stmt;

  do {
    if (!has_zero_uses (name))
      return false;

    stmt = SSA_NAME_DEF_STMT (name);
    if (stmt == up_to_stmt)
      return true;

    gsi = gsi_for_stmt (stmt);
    release_defs (stmt);
    gsi_remove (&gsi, true);

    name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
  } while (name && TREE_CODE (name) == SSA_NAME);

  return false;
}
Esempio n. 30
0
static enum ssa_prop_result
copy_prop_visit_assignment (gimple stmt, tree *result_p)
{
  tree lhs, rhs;

  lhs = gimple_assign_lhs (stmt);
  rhs = valueize_val (gimple_assign_rhs1 (stmt));

  if (TREE_CODE (lhs) == SSA_NAME)
    {
      /* Straight copy between two SSA names.  First, make sure that
	 we can propagate the RHS into uses of LHS.  */
      if (!may_propagate_copy (lhs, rhs))
	return SSA_PROP_VARYING;

      *result_p = lhs;
      if (set_copy_of_val (*result_p, rhs))
	return SSA_PROP_INTERESTING;
      else
	return SSA_PROP_NOT_INTERESTING;
    }

  return SSA_PROP_VARYING;
}