Beispiel #1
0
static bool
copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
{
  bool anything_changed = false;
  rtx insn;

  for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
    {
      int n_ops, i, alt, predicated;
      bool is_asm, any_replacements;
      rtx set;
      rtx link;
      bool replaced[MAX_RECOG_OPERANDS];
      bool changed = false;
      struct kill_set_value_data ksvd;

      if (!NONDEBUG_INSN_P (insn))
	{
	  if (DEBUG_INSN_P (insn))
	    {
	      rtx loc = INSN_VAR_LOCATION_LOC (insn);
	      if (!VAR_LOC_UNKNOWN_P (loc))
		replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
					   ALL_REGS, GET_MODE (loc),
					   ADDR_SPACE_GENERIC, insn, vd);
	    }

	  if (insn == BB_END (bb))
	    break;
	  else
	    continue;
	}

      set = single_set (insn);
      extract_insn (insn);
      if (! constrain_operands (1))
	fatal_insn_not_found (insn);
      preprocess_constraints ();
      alt = which_alternative;
      n_ops = recog_data.n_operands;
      is_asm = asm_noperands (PATTERN (insn)) >= 0;

      /* Simplify the code below by rewriting things to reflect
	 matching constraints.  Also promote OP_OUT to OP_INOUT
	 in predicated instructions.  */

      predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
      for (i = 0; i < n_ops; ++i)
	{
	  int matches = recog_op_alt[i][alt].matches;
	  if (matches >= 0)
	    recog_op_alt[i][alt].cl = recog_op_alt[matches][alt].cl;
	  if (matches >= 0 || recog_op_alt[i][alt].matched >= 0
	      || (predicated && recog_data.operand_type[i] == OP_OUT))
	    recog_data.operand_type[i] = OP_INOUT;
	}

      /* Apply changes to earlier DEBUG_INSNs if possible.  */
      if (vd->n_debug_insn_changes)
	note_uses (&PATTERN (insn), cprop_find_used_regs, vd);

      /* For each earlyclobber operand, zap the value data.  */
      for (i = 0; i < n_ops; i++)
	if (recog_op_alt[i][alt].earlyclobber)
	  kill_value (recog_data.operand[i], vd);

      /* Within asms, a clobber cannot overlap inputs or outputs.
	 I wouldn't think this were true for regular insns, but
	 scan_rtx treats them like that...  */
      note_stores (PATTERN (insn), kill_clobbered_value, vd);

      /* Kill all auto-incremented values.  */
      /* ??? REG_INC is useless, since stack pushes aren't done that way.  */
      for_each_rtx (&PATTERN (insn), kill_autoinc_value, vd);

      /* Kill all early-clobbered operands.  */
      for (i = 0; i < n_ops; i++)
	if (recog_op_alt[i][alt].earlyclobber)
	  kill_value (recog_data.operand[i], vd);

      /* If we have dead sets in the insn, then we need to note these as we
	 would clobbers.  */
      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
	{
	  if (REG_NOTE_KIND (link) == REG_UNUSED)
	    {
	      kill_value (XEXP (link, 0), vd);
	      /* Furthermore, if the insn looked like a single-set,
		 but the dead store kills the source value of that
		 set, then we can no-longer use the plain move
		 special case below.  */
	      if (set
		  && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
		set = NULL;
	    }
	}

      /* Special-case plain move instructions, since we may well
	 be able to do the move from a different register class.  */
      if (set && REG_P (SET_SRC (set)))
	{
	  rtx src = SET_SRC (set);
	  unsigned int regno = REGNO (src);
	  enum machine_mode mode = GET_MODE (src);
	  unsigned int i;
	  rtx new_rtx;

	  /* If we are accessing SRC in some mode other that what we
	     set it in, make sure that the replacement is valid.  */
	  if (mode != vd->e[regno].mode)
	    {
	      if (hard_regno_nregs[regno][mode]
		  > hard_regno_nregs[regno][vd->e[regno].mode])
		goto no_move_special_case;

	      /* And likewise, if we are narrowing on big endian the transformation
		 is also invalid.  */
	      if (hard_regno_nregs[regno][mode]
		  < hard_regno_nregs[regno][vd->e[regno].mode]
		  && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
		      ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
		goto no_move_special_case;
	    }

	  /* If the destination is also a register, try to find a source
	     register in the same class.  */
	  if (REG_P (SET_DEST (set)))
	    {
	      new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
	      if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
		{
		  if (dump_file)
		    fprintf (dump_file,
			     "insn %u: replaced reg %u with %u\n",
			     INSN_UID (insn), regno, REGNO (new_rtx));
		  changed = true;
		  goto did_replacement;
		}
	      /* We need to re-extract as validate_change clobbers
		 recog_data.  */
	      extract_insn (insn);
	      if (! constrain_operands (1))
		fatal_insn_not_found (insn);
	      preprocess_constraints ();
	    }

	  /* Otherwise, try all valid registers and see if its valid.  */
	  for (i = vd->e[regno].oldest_regno; i != regno;
	       i = vd->e[i].next_regno)
	    {
	      new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
				       mode, i, regno);
	      if (new_rtx != NULL_RTX)
		{
		  if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
		    {
		      ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
		      REG_ATTRS (new_rtx) = REG_ATTRS (src);
		      REG_POINTER (new_rtx) = REG_POINTER (src);
		      if (dump_file)
			fprintf (dump_file,
				 "insn %u: replaced reg %u with %u\n",
				 INSN_UID (insn), regno, REGNO (new_rtx));
		      changed = true;
		      goto did_replacement;
		    }
		  /* We need to re-extract as validate_change clobbers
		     recog_data.  */
		  extract_insn (insn);
		  if (! constrain_operands (1))
		    fatal_insn_not_found (insn);
		  preprocess_constraints ();
		}
	    }
	}
      no_move_special_case:

      any_replacements = false;

      /* For each input operand, replace a hard register with the
	 eldest live copy that's in an appropriate register class.  */
      for (i = 0; i < n_ops; i++)
	{
	  replaced[i] = false;

	  /* Don't scan match_operand here, since we've no reg class
	     information to pass down.  Any operands that we could
	     substitute in will be represented elsewhere.  */
	  if (recog_data.constraints[i][0] == '\0')
	    continue;

	  /* Don't replace in asms intentionally referencing hard regs.  */
	  if (is_asm && REG_P (recog_data.operand[i])
	      && (REGNO (recog_data.operand[i])
		  == ORIGINAL_REGNO (recog_data.operand[i])))
	    continue;

	  if (recog_data.operand_type[i] == OP_IN)
	    {
	      if (recog_op_alt[i][alt].is_address)
		replaced[i]
		  = replace_oldest_value_addr (recog_data.operand_loc[i],
					       recog_op_alt[i][alt].cl,
					       VOIDmode, ADDR_SPACE_GENERIC,
					       insn, vd);
	      else if (REG_P (recog_data.operand[i]))
		replaced[i]
		  = replace_oldest_value_reg (recog_data.operand_loc[i],
					      recog_op_alt[i][alt].cl,
					      insn, vd);
	      else if (MEM_P (recog_data.operand[i]))
		replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
							insn, vd);
	    }
	  else if (MEM_P (recog_data.operand[i]))
	    replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
						    insn, vd);

	  /* If we performed any replacement, update match_dups.  */
	  if (replaced[i])
	    {
	      int j;
	      rtx new_rtx;

	      new_rtx = *recog_data.operand_loc[i];
	      recog_data.operand[i] = new_rtx;
	      for (j = 0; j < recog_data.n_dups; j++)
		if (recog_data.dup_num[j] == i)
		  validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);

	      any_replacements = true;
	    }
	}

      if (any_replacements)
	{
	  if (! apply_change_group ())
	    {
	      for (i = 0; i < n_ops; i++)
		if (replaced[i])
		  {
		    rtx old = *recog_data.operand_loc[i];
		    recog_data.operand[i] = old;
		  }

	      if (dump_file)
		fprintf (dump_file,
			 "insn %u: reg replacements not verified\n",
			 INSN_UID (insn));
	    }
	  else
	    changed = true;
	}

    did_replacement:
      if (changed)
	{
	  anything_changed = true;

	  /* If something changed, perhaps further changes to earlier
	     DEBUG_INSNs can be applied.  */
	  if (vd->n_debug_insn_changes)
	    note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
	}

      ksvd.vd = vd;
      ksvd.ignore_set_reg = NULL_RTX;

      /* Clobber call-clobbered registers.  */
      if (CALL_P (insn))
	{
	  unsigned int set_regno = INVALID_REGNUM;
	  unsigned int set_nregs = 0;
	  unsigned int regno;
	  rtx exp;
	  hard_reg_set_iterator hrsi;

	  for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
	    {
	      rtx x = XEXP (exp, 0);
	      if (GET_CODE (x) == SET)
		{
		  rtx dest = SET_DEST (x);
		  kill_value (dest, vd);
		  set_value_regno (REGNO (dest), GET_MODE (dest), vd);
		  copy_value (dest, SET_SRC (x), vd);
		  ksvd.ignore_set_reg = dest;
		  set_regno = REGNO (dest);
		  set_nregs
		    = hard_regno_nregs[set_regno][GET_MODE (dest)];
		  break;
		}
	    }

	  EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
	    if (regno < set_regno || regno >= set_regno + set_nregs)
	      kill_value_regno (regno, 1, vd);

	  /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
	     of the SET isn't in regs_invalidated_by_call hard reg set,
	     but instead among CLOBBERs on the CALL_INSN, we could wrongly
	     assume the value in it is still live.  */
	  if (ksvd.ignore_set_reg)
	    note_stores (PATTERN (insn), kill_clobbered_value, vd);
	}

      /* Notice stores.  */
      note_stores (PATTERN (insn), kill_set_value, &ksvd);

      /* Notice copies.  */
      if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
	copy_value (SET_DEST (set), SET_SRC (set), vd);

      if (insn == BB_END (bb))
	break;
    }

  return anything_changed;
}
Beispiel #2
0
static bool
propagate_rtx_1 (rtx *px, rtx old_rtx, rtx new_rtx, int flags)
{
  rtx x = *px, tem = NULL_RTX, op0, op1, op2;
  enum rtx_code code = GET_CODE (x);
  machine_mode mode = GET_MODE (x);
  machine_mode op_mode;
  bool can_appear = (flags & PR_CAN_APPEAR) != 0;
  bool valid_ops = true;

  if (!(flags & PR_HANDLE_MEM) && MEM_P (x) && !MEM_READONLY_P (x))
    {
      /* If unsafe, change MEMs to CLOBBERs or SCRATCHes (to preserve whether
	 they have side effects or not).  */
      *px = (side_effects_p (x)
	     ? gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)
	     : gen_rtx_SCRATCH (GET_MODE (x)));
      return false;
    }

  /* If X is OLD_RTX, return NEW_RTX.  But not if replacing only within an
     address, and we are *not* inside one.  */
  if (x == old_rtx)
    {
      *px = new_rtx;
      return can_appear;
    }

  /* If this is an expression, try recursive substitution.  */
  switch (GET_RTX_CLASS (code))
    {
    case RTX_UNARY:
      op0 = XEXP (x, 0);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0))
	return true;
      tem = simplify_gen_unary (code, mode, op0, op_mode);
      break;

    case RTX_BIN_ARITH:
    case RTX_COMM_ARITH:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_binary (code, mode, op0, op1);
      break;

    case RTX_COMPARE:
    case RTX_COMM_COMPARE:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_relational (code, mode, op_mode, op0, op1);
      break;

    case RTX_TERNARY:
    case RTX_BITFIELD_OPS:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op2 = XEXP (x, 2);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op2, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
	return true;
      if (op_mode == VOIDmode)
	op_mode = GET_MODE (op0);
      tem = simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
      break;

    case RTX_EXTRA:
      /* The only case we try to handle is a SUBREG.  */
      if (code == SUBREG)
	{
          op0 = XEXP (x, 0);
	  valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0))
	    return true;
	  tem = simplify_gen_subreg (mode, op0, GET_MODE (SUBREG_REG (x)),
				     SUBREG_BYTE (x));
	}
      break;

    case RTX_OBJ:
      if (code == MEM && x != new_rtx)
	{
	  rtx new_op0;
	  op0 = XEXP (x, 0);

	  /* There are some addresses that we cannot work on.  */
	  if (!can_simplify_addr (op0))
	    return true;

	  op0 = new_op0 = targetm.delegitimize_address (op0);
	  valid_ops &= propagate_rtx_1 (&new_op0, old_rtx, new_rtx,
					flags | PR_CAN_APPEAR);

	  /* Dismiss transformation that we do not want to carry on.  */
	  if (!valid_ops
	      || new_op0 == op0
	      || !(GET_MODE (new_op0) == GET_MODE (op0)
		   || GET_MODE (new_op0) == VOIDmode))
	    return true;

	  canonicalize_address (new_op0);

	  /* Copy propagations are always ok.  Otherwise check the costs.  */
	  if (!(REG_P (old_rtx) && REG_P (new_rtx))
	      && !should_replace_address (op0, new_op0, GET_MODE (x),
					  MEM_ADDR_SPACE (x),
	      			 	  flags & PR_OPTIMIZE_FOR_SPEED))
	    return true;

	  tem = replace_equiv_address_nv (x, new_op0);
	}

      else if (code == LO_SUM)
	{
          op0 = XEXP (x, 0);
          op1 = XEXP (x, 1);

	  /* The only simplification we do attempts to remove references to op0
	     or make it constant -- in both cases, op0's invalidity will not
	     make the result invalid.  */
	  propagate_rtx_1 (&op0, old_rtx, new_rtx, flags | PR_CAN_APPEAR);
	  valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	    return true;

	  /* (lo_sum (high x) x) -> x  */
	  if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
	    tem = op1;
	  else
	    tem = gen_rtx_LO_SUM (mode, op0, op1);

	  /* OP1 is likely not a legitimate address, otherwise there would have
	     been no LO_SUM.  We want it to disappear if it is invalid, return
	     false in that case.  */
	  return memory_address_p (mode, tem);
	}

      else if (code == REG)
	{
	  if (rtx_equal_p (x, old_rtx))
	    {
              *px = new_rtx;
              return can_appear;
	    }
	}
      break;

    default:
      break;
    }

  /* No change, no trouble.  */
  if (tem == NULL_RTX)
    return true;

  *px = tem;

  /* Allow replacements that simplify operations on a vector or complex
     value to a component.  The most prominent case is
     (subreg ([vec_]concat ...)).   */
  if (REG_P (tem) && !HARD_REGISTER_P (tem)
      && (VECTOR_MODE_P (GET_MODE (new_rtx))
	  || COMPLEX_MODE_P (GET_MODE (new_rtx)))
      && GET_MODE (tem) == GET_MODE_INNER (GET_MODE (new_rtx)))
    return true;

  /* The replacement we made so far is valid, if all of the recursive
     replacements were valid, or we could simplify everything to
     a constant.  */
  return valid_ops || can_appear || CONSTANT_P (tem);
}
Beispiel #3
0
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
			 enum machine_mode mode, rtx size, rtx if_false_label,
			 rtx if_true_label, int prob)
{
  rtx tem;
  rtx dummy_label = NULL_RTX;
  rtx last;

  /* Reverse the comparison if that is safe and we want to jump if it is
     false.  Also convert to the reverse comparison if the target can
     implement it.  */
  if ((! if_true_label
       || ! can_compare_p (code, mode, ccp_jump))
      && (! FLOAT_MODE_P (mode)
	  || code == ORDERED || code == UNORDERED
	  || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
	  || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
    {
      enum rtx_code rcode;
      if (FLOAT_MODE_P (mode))
        rcode = reverse_condition_maybe_unordered (code);
      else
        rcode = reverse_condition (code);

      /* Canonicalize to UNORDERED for the libcall.  */
      if (can_compare_p (rcode, mode, ccp_jump)
	  || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
	{
          tem = if_true_label;
          if_true_label = if_false_label;
          if_false_label = tem;
	  code = rcode;
	  prob = inv (prob);
	}
    }

  /* If one operand is constant, make it the second one.  Only do this
     if the other operand is not constant as well.  */

  if (swap_commutative_operands_p (op0, op1))
    {
      tem = op0;
      op0 = op1;
      op1 = tem;
      code = swap_condition (code);
    }

  do_pending_stack_adjust ();

  code = unsignedp ? unsigned_condition (code) : code;
  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
						 op0, op1)))
    {
      if (CONSTANT_P (tem))
	{
	  rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
		      ? if_false_label : if_true_label;
	  if (label)
	    emit_jump (label);
	  return;
	}

      code = GET_CODE (tem);
      mode = GET_MODE (tem);
      op0 = XEXP (tem, 0);
      op1 = XEXP (tem, 1);
      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
    }

  if (! if_true_label)
    dummy_label = if_true_label = gen_label_rtx ();

  if (GET_MODE_CLASS (mode) == MODE_INT
      && ! can_compare_p (code, mode, ccp_jump))
    {
      switch (code)
	{
	case LTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
					if_false_label, if_true_label, prob);
	  break;

	case LEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case GTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
					if_false_label, if_true_label, prob);
	  break;

	case GEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case LT:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
					if_false_label, if_true_label, prob);
	  break;

	case LE:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case GT:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
					if_false_label, if_true_label, prob);
	  break;

	case GE:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case EQ:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
					 if_true_label, prob);
	  break;

	case NE:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
					 if_false_label, inv (prob));
	  break;

	default:
	  gcc_unreachable ();
	}
    }
  else
    {
      if (GET_MODE_CLASS (mode) == MODE_FLOAT
	  && ! can_compare_p (code, mode, ccp_jump)
	  && can_compare_p (swap_condition (code), mode, ccp_jump))
	{
	  rtx tmp;
	  code = swap_condition (code);
	  tmp = op0;
	  op0 = op1;
	  op1 = tmp;
	}

      else if (GET_MODE_CLASS (mode) == MODE_FLOAT
	       && ! can_compare_p (code, mode, ccp_jump)

	       /* Never split ORDERED and UNORDERED.  These must be implemented.  */
	       && (code != ORDERED && code != UNORDERED)

               /* Split a floating-point comparison if we can jump on other
	          conditions...  */
	       && (have_insn_for (COMPARE, mode)

	           /* ... or if there is no libcall for it.  */
	           || code_to_optab[code] == NULL))
        {
	  enum rtx_code first_code;
	  bool and_them = split_comparison (code, mode, &first_code, &code);

	  /* If there are no NaNs, the first comparison should always fall
	     through.  */
	  if (!HONOR_NANS (mode))
	    gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));

	  else
	    {
	      if (and_them)
		{
		  rtx dest_label;
		  /* If we only jump if true, just bypass the second jump.  */
		  if (! if_false_label)
		    {
		      if (! dummy_label)
		        dummy_label = gen_label_rtx ();
		      dest_label = dummy_label;
		    }
		  else
		    dest_label = if_false_label;
                  do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
					   size, dest_label, NULL_RTX, prob);
		}
              else
                do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
					 size, NULL_RTX, if_true_label, prob);
	    }
	}

      last = get_last_insn ();
      emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
			       if_true_label);
      if (prob != -1 && profile_status != PROFILE_ABSENT)
	{
	  for (last = NEXT_INSN (last);
	       last && NEXT_INSN (last);
	       last = NEXT_INSN (last))
	    if (JUMP_P (last))
	      break;
	  if (!last
	      || !JUMP_P (last)
	      || NEXT_INSN (last)
	      || !any_condjump_p (last))
	    {
	      if (dump_file)
		fprintf (dump_file, "Failed to add probability note\n");
	    }
	  else
	    {
	      gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
	      add_reg_note (last, REG_BR_PROB, GEN_INT (prob));
	    }
	}
    }

  if (if_false_label)
    emit_jump (if_false_label);
  if (dummy_label)
    emit_label (dummy_label);
}
Beispiel #4
0
static void
combine_stack_adjustments_for_block (basic_block bb)
{
    HOST_WIDE_INT last_sp_adjust = 0;
    rtx last_sp_set = NULL_RTX;
    rtx last2_sp_set = NULL_RTX;
    struct csa_reflist *reflist = NULL;
    rtx insn, next, set;
    struct record_stack_refs_data data;
    bool end_of_block = false;

    for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
    {
        end_of_block = insn == BB_END (bb);
        next = NEXT_INSN (insn);

        if (! INSN_P (insn))
            continue;

        set = single_set_for_csa (insn);
        if (set)
        {
            rtx dest = SET_DEST (set);
            rtx src = SET_SRC (set);

            /* Find constant additions to the stack pointer.  */
            if (dest == stack_pointer_rtx
                    && GET_CODE (src) == PLUS
                    && XEXP (src, 0) == stack_pointer_rtx
                    && CONST_INT_P (XEXP (src, 1)))
            {
                HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));

                /* If we've not seen an adjustment previously, record
                it now and continue.  */
                if (! last_sp_set)
                {
                    last_sp_set = insn;
                    last_sp_adjust = this_adjust;
                    continue;
                }

                /* If not all recorded refs can be adjusted, or the
                adjustment is now too large for a constant addition,
                 we cannot merge the two stack adjustments.

                 Also we need to be careful to not move stack pointer
                 such that we create stack accesses outside the allocated
                 area.  We can combine an allocation into the first insn,
                 or a deallocation into the second insn.  We can not
                 combine an allocation followed by a deallocation.

                 The only somewhat frequent occurrence of the later is when
                 a function allocates a stack frame but does not use it.
                 For this case, we would need to analyze rtl stream to be
                 sure that allocated area is really unused.  This means not
                 only checking the memory references, but also all registers
                 or global memory references possibly containing a stack
                 frame address.

                 Perhaps the best way to address this problem is to teach
                 gcc not to allocate stack for objects never used.  */

                /* Combine an allocation into the first instruction.  */
                if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
                {
                    if (try_apply_stack_adjustment (last_sp_set, reflist,
                                                    last_sp_adjust + this_adjust,
                                                    this_adjust))
                    {
                        /* It worked!  */
                        maybe_move_args_size_note (last_sp_set, insn, false);
                        delete_insn (insn);
                        last_sp_adjust += this_adjust;
                        continue;
                    }
                }

                /* Otherwise we have a deallocation.  Do not combine with
                a previous allocation.  Combine into the second insn.  */
                else if (STACK_GROWS_DOWNWARD
                         ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
                {
                    if (try_apply_stack_adjustment (insn, reflist,
                                                    last_sp_adjust + this_adjust,
                                                    -last_sp_adjust))
                    {
                        /* It worked!  */
                        maybe_move_args_size_note (insn, last_sp_set, true);
                        delete_insn (last_sp_set);
                        last_sp_set = insn;
                        last_sp_adjust += this_adjust;
                        free_csa_reflist (reflist);
                        reflist = NULL;
                        continue;
                    }
                }

                /* Combination failed.  Restart processing from here.  If
                deallocation+allocation conspired to cancel, we can
                 delete the old deallocation insn.  */
                if (last_sp_set)
                {
                    if (last_sp_adjust == 0)
                    {
                        maybe_move_args_size_note (insn, last_sp_set, true);
                        delete_insn (last_sp_set);
                    }
                    else
                        last2_sp_set = last_sp_set;
                }
                free_csa_reflist (reflist);
                reflist = NULL;
                last_sp_set = insn;
                last_sp_adjust = this_adjust;
                continue;
            }

            /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
               the previous adjustment and turn it into a simple store.  This
               is equivalent to anticipating the stack adjustment so this must
               be an allocation.  */
            if (MEM_P (dest)
                    && ((STACK_GROWS_DOWNWARD
                         ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
                            && last_sp_adjust
                            == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
                         : (GET_CODE (XEXP (dest, 0)) == PRE_INC
                            && last_sp_adjust
                            == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
                        || ((STACK_GROWS_DOWNWARD
                             ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
                            && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
                            && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
                            && XEXP (XEXP (XEXP (dest, 0), 1), 0)
                            == stack_pointer_rtx
                            && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
                            == CONST_INT
                            && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
                            == -last_sp_adjust))
                    && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
                    && !reg_mentioned_p (stack_pointer_rtx, src)
                    && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
                    && try_apply_stack_adjustment (insn, reflist, 0,
                                                   -last_sp_adjust))
            {
                if (last2_sp_set)
                    maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
                else
                    maybe_move_args_size_note (insn, last_sp_set, true);
                delete_insn (last_sp_set);
                free_csa_reflist (reflist);
                reflist = NULL;
                last_sp_set = NULL_RTX;
                last_sp_adjust = 0;
                continue;
            }
        }

        data.insn = insn;
        data.reflist = reflist;
        if (!CALL_P (insn) && last_sp_set
                && !for_each_rtx (&PATTERN (insn), record_stack_refs, &data))
        {
            reflist = data.reflist;
            continue;
        }
        reflist = data.reflist;

        /* Otherwise, we were not able to process the instruction.
        Do not continue collecting data across such a one.  */
        if (last_sp_set
                && (CALL_P (insn)
                    || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
        {
            if (last_sp_set && last_sp_adjust == 0)
            {
                force_move_args_size_note (bb, last2_sp_set, last_sp_set);
                delete_insn (last_sp_set);
            }
            free_csa_reflist (reflist);
            reflist = NULL;
            last2_sp_set = NULL_RTX;
            last_sp_set = NULL_RTX;
            last_sp_adjust = 0;
        }
    }

    if (last_sp_set && last_sp_adjust == 0)
    {
        force_move_args_size_note (bb, last2_sp_set, last_sp_set);
        delete_insn (last_sp_set);
    }

    if (reflist)
        free_csa_reflist (reflist);
}
void
crx_print_operand (FILE * file, rtx x, int code)
{
  switch (code)
    {
    case 'p' :
      if (GET_CODE (x) == REG) {
	if (GET_MODE (x) == DImode || GET_MODE (x) == DFmode)
	  {
	    int regno = REGNO (x);
	    if (regno + 1 >= SP_REGNUM) abort ();
	    fprintf (file, "{%s, %s}", reg_names[regno], reg_names[regno + 1]);
	    return;
	  }
	else
	  {
	    if (REGNO (x) >= SP_REGNUM) abort ();
	    fprintf (file, "%s", reg_names[REGNO (x)]);
	    return;
	  }
      }

    case 'd' :
	{
	  const char *crx_cmp_str;
	  switch (GET_CODE (x))
	    { /* MD: compare (reg, reg or imm) but CRX: cmp (reg or imm, reg)
	       * -> swap all non symmetric ops */
	    case EQ  : crx_cmp_str = "eq"; break;
	    case NE  : crx_cmp_str = "ne"; break;
	    case GT  : crx_cmp_str = "lt"; break;
	    case GTU : crx_cmp_str = "lo"; break;
	    case LT  : crx_cmp_str = "gt"; break;
	    case LTU : crx_cmp_str = "hi"; break;
	    case GE  : crx_cmp_str = "le"; break;
	    case GEU : crx_cmp_str = "ls"; break;
	    case LE  : crx_cmp_str = "ge"; break;
	    case LEU : crx_cmp_str = "hs"; break;
	    default : abort ();
	    }
	  fprintf (file, "%s", crx_cmp_str);
	  return;
	}

    case 'H':
      /* Print high part of a double precision value. */
      switch (GET_CODE (x))
	{
	case CONST_DOUBLE:
	  if (GET_MODE (x) == SFmode) abort ();
	  if (GET_MODE (x) == DFmode)
	    {
	      /* High part of a DF const. */
	      REAL_VALUE_TYPE r;
	      long l[2];

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_DOUBLE (r, l);

	      fprintf (file, "$0x%lx", l[1]);
	      return;
	    }

	  /* -- Fallthrough to handle DI consts -- */

	case CONST_INT:
	    {
	      rtx high, low;
	      split_double (x, &low, &high);
	      putc ('$', file);
	      output_addr_const (file, high);
	      return;
	    }

	case REG:
	  if (REGNO (x) + 1 >= FIRST_PSEUDO_REGISTER) abort ();
	  fprintf (file, "%s", reg_names[REGNO (x) + 1]);
	  return;

	case MEM:
	  /* Adjust memory address to high part.  */
	    {
	      rtx adj_mem = x;
	      adj_mem = adjust_address (adj_mem, GET_MODE (adj_mem), 4);

	      output_memory_reference_mode = GET_MODE (adj_mem);
	      output_address (XEXP (adj_mem, 0));
	      return;
	    }

	default:
	  abort ();
	}

    case 'L':
      /* Print low part of a double precision value. */
      switch (GET_CODE (x))
	{
	case CONST_DOUBLE:
	  if (GET_MODE (x) == SFmode) abort ();
	  if (GET_MODE (x) == DFmode)
	    {
	      /* High part of a DF const. */
	      REAL_VALUE_TYPE r;
	      long l[2];

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_DOUBLE (r, l);

	      fprintf (file, "$0x%lx", l[0]);
	      return;
	    }

	  /* -- Fallthrough to handle DI consts -- */

	case CONST_INT:
	    {
	      rtx high, low;
	      split_double (x, &low, &high);
	      putc ('$', file);
	      output_addr_const (file, low);
	      return;
	    }

	case REG:
	  fprintf (file, "%s", reg_names[REGNO (x)]);
	  return;

	case MEM:
	  output_memory_reference_mode = GET_MODE (x);
	  output_address (XEXP (x, 0));
	  return;

	default:
	  abort ();
	}

    case 0 : /* default */
      switch (GET_CODE (x))
	{
	case REG:
	  fprintf (file, "%s", reg_names[REGNO (x)]);
	  return;

	case MEM:
	  output_memory_reference_mode = GET_MODE (x);
	  output_address (XEXP (x, 0));
	  return;

	case CONST_DOUBLE:
	    {
	      REAL_VALUE_TYPE r;
	      long l;

	      /* Always use H and L for double precision - see above */
	      gcc_assert (GET_MODE (x) == SFmode);

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_SINGLE (r, l);

	      fprintf (file, "$0x%lx", l);
	      return;
	    }

	default:
	  putc ('$', file);
	  output_addr_const (file, x);
	  return;
	}

    default:
      output_operand_lossage ("invalid %%xn code");
    }

  abort ();
}
Beispiel #6
0
static void
adjust_frame_related_expr (rtx last_sp_set, rtx insn,
			   HOST_WIDE_INT this_adjust)
{
  rtx note = find_reg_note (last_sp_set, REG_FRAME_RELATED_EXPR, NULL_RTX);
  rtx new_expr = NULL_RTX;

  if (note == NULL_RTX && RTX_FRAME_RELATED_P (insn))
    return;

  if (note
      && GET_CODE (XEXP (note, 0)) == SEQUENCE
      && XVECLEN (XEXP (note, 0), 0) >= 2)
    {
      rtx expr = XEXP (note, 0);
      rtx last = XVECEXP (expr, 0, XVECLEN (expr, 0) - 1);
      int i;

      if (GET_CODE (last) == SET
	  && RTX_FRAME_RELATED_P (last) == RTX_FRAME_RELATED_P (insn)
	  && SET_DEST (last) == stack_pointer_rtx
	  && GET_CODE (SET_SRC (last)) == PLUS
	  && XEXP (SET_SRC (last), 0) == stack_pointer_rtx
	  && CONST_INT_P (XEXP (SET_SRC (last), 1)))
	{
	  XEXP (SET_SRC (last), 1)
	    = GEN_INT (INTVAL (XEXP (SET_SRC (last), 1)) + this_adjust);
	  return;
	}

      new_expr = gen_rtx_SEQUENCE (VOIDmode,
				   rtvec_alloc (XVECLEN (expr, 0) + 1));
      for (i = 0; i < XVECLEN (expr, 0); i++)
	XVECEXP (new_expr, 0, i) = XVECEXP (expr, 0, i);
    }
  else
    {
      new_expr = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
      if (note)
	XVECEXP (new_expr, 0, 0) = XEXP (note, 0);
      else
	{
	  rtx expr = copy_rtx (single_set_for_csa (last_sp_set));

	  XEXP (SET_SRC (expr), 1)
	    = GEN_INT (INTVAL (XEXP (SET_SRC (expr), 1)) - this_adjust);
	  RTX_FRAME_RELATED_P (expr) = 1;
	  XVECEXP (new_expr, 0, 0) = expr;
	}
    }

  XVECEXP (new_expr, 0, XVECLEN (new_expr, 0) - 1)
    = copy_rtx (single_set_for_csa (insn));
  RTX_FRAME_RELATED_P (XVECEXP (new_expr, 0, XVECLEN (new_expr, 0) - 1))
    = RTX_FRAME_RELATED_P (insn);
  if (note)
    XEXP (note, 0) = new_expr;
  else
    add_reg_note (last_sp_set, REG_FRAME_RELATED_EXPR, new_expr);
}
Beispiel #7
0
Datei: rtl.c Projekt: 0day-ci/gcc
int
rtx_equal_p (const_rtx x, const_rtx y)
{
  int i;
  int j;
  enum rtx_code code;
  const char *fmt;

  if (x == y)
    return 1;
  if (x == 0 || y == 0)
    return 0;

  code = GET_CODE (x);
  /* Rtx's of different codes cannot be equal.  */
  if (code != GET_CODE (y))
    return 0;

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
     (REG:SI x) and (REG:HI x) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

  /* MEMs referring to different address space are not equivalent.  */
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

  /* Some RTL can be compared nonrecursively.  */
  switch (code)
    {
    case REG:
      return (REGNO (x) == REGNO (y));

    case LABEL_REF:
      return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);

    case DEBUG_EXPR:
    case VALUE:
    case SCRATCH:
    CASE_CONST_UNIQUE:
      return 0;

    case DEBUG_IMPLICIT_PTR:
      return DEBUG_IMPLICIT_PTR_DECL (x)
	     == DEBUG_IMPLICIT_PTR_DECL (y);

    case DEBUG_PARAMETER_REF:
      return DEBUG_PARAMETER_REF_DECL (x)
	     == DEBUG_PARAMETER_REF_DECL (y);

    case ENTRY_VALUE:
      return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));

    default:
      break;
    }

  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole thing.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      switch (fmt[i])
	{
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

	case 'n':
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
	    {
#ifndef GENERATOR_FILE
	      if (((code == ASM_OPERANDS && i == 6)
		   || (code == ASM_INPUT && i == 1))
		  && XINT (x, i) == XINT (y, i))
		break;
#endif
	      return 0;
	    }
	  break;

	case 'V':
	case 'E':
	  /* Two vectors must have the same length.  */
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;

	  /* And the corresponding elements must match.  */
	  for (j = 0; j < XVECLEN (x, i); j++)
	    if (rtx_equal_p (XVECEXP (x, i, j),  XVECEXP (y, i, j)) == 0)
	      return 0;
	  break;

	case 'e':
	  if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0)
	    return 0;
	  break;

	case 'S':
	case 's':
	  if ((XSTR (x, i) || XSTR (y, i))
	      && (! XSTR (x, i) || ! XSTR (y, i)
		  || strcmp (XSTR (x, i), XSTR (y, i))))
	    return 0;
	  break;

	case 'u':
	  /* These are just backpointers, so they don't matter.  */
	  break;

	case '0':
	case 't':
	  break;

	  /* It is believed that rtx's at this level will never
	     contain anything but integers and other rtx's,
	     except for within LABEL_REFs and SYMBOL_REFs.  */
	default:
	  gcc_unreachable ();
	}
    }
  return 1;
}
Beispiel #8
0
static bool
combine_set_extension (ext_cand *cand, rtx_insn *curr_insn, rtx *orig_set)
{
  rtx orig_src = SET_SRC (*orig_set);
  machine_mode orig_mode = GET_MODE (SET_DEST (*orig_set));
  rtx new_set;
  rtx cand_pat = PATTERN (cand->insn);

  /* If the extension's source/destination registers are not the same
     then we need to change the original load to reference the destination
     of the extension.  Then we need to emit a copy from that destination
     to the original destination of the load.  */
  rtx new_reg;
  bool copy_needed
    = (REGNO (SET_DEST (cand_pat)) != REGNO (XEXP (SET_SRC (cand_pat), 0)));
  if (copy_needed)
    new_reg = gen_rtx_REG (cand->mode, REGNO (SET_DEST (cand_pat)));
  else
    new_reg = gen_rtx_REG (cand->mode, REGNO (SET_DEST (*orig_set)));

#if 0
  /* Rethinking test.  Temporarily disabled.  */
  /* We're going to be widening the result of DEF_INSN, ensure that doing so
     doesn't change the number of hard registers needed for the result.  */
  if (HARD_REGNO_NREGS (REGNO (new_reg), cand->mode)
      != HARD_REGNO_NREGS (REGNO (SET_DEST (*orig_set)),
			   GET_MODE (SET_DEST (*orig_set))))
	return false;
#endif

  /* Merge constants by directly moving the constant into the register under
     some conditions.  Recall that RTL constants are sign-extended.  */
  if (GET_CODE (orig_src) == CONST_INT
      && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (cand->mode))
    {
      if (INTVAL (orig_src) >= 0 || cand->code == SIGN_EXTEND)
	new_set = gen_rtx_SET (new_reg, orig_src);
      else
	{
	  /* Zero-extend the negative constant by masking out the bits outside
	     the source mode.  */
	  rtx new_const_int
	    = gen_int_mode (INTVAL (orig_src) & GET_MODE_MASK (orig_mode),
			    GET_MODE (new_reg));
	  new_set = gen_rtx_SET (new_reg, new_const_int);
	}
    }
  else if (GET_MODE (orig_src) == VOIDmode)
    {
      /* This is mostly due to a call insn that should not be optimized.  */
      return false;
    }
  else if (GET_CODE (orig_src) == cand->code)
    {
      /* Here is a sequence of two extensions.  Try to merge them.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, XEXP (orig_src, 0));
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (new_reg, temp_extension);
    }
  else if (GET_CODE (orig_src) == IF_THEN_ELSE)
    {
      /* Only IF_THEN_ELSE of phi-type copies are combined.  Otherwise,
         in general, IF_THEN_ELSE should not be combined.  */
      return false;
    }
  else
    {
      /* This is the normal case.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, orig_src);
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (new_reg, temp_extension);
    }

  /* This change is a part of a group of changes.  Hence,
     validate_change will not try to commit the change.  */
  if (validate_change (curr_insn, orig_set, new_set, true)
      && update_reg_equal_equiv_notes (curr_insn, cand->mode, orig_mode,
				       cand->code))
    {
      if (dump_file)
        {
          fprintf (dump_file,
		   "Tentatively merged extension with definition %s:\n",
		   (copy_needed) ? "(copy needed)" : "");
          print_rtl_single (dump_file, curr_insn);
        }
      return true;
    }

  return false;
}
Beispiel #9
0
Datei: ree.c Projekt: woepaul/gcc
static bool
combine_set_extension (ext_cand *cand, rtx curr_insn, rtx *orig_set)
{
  rtx orig_src = SET_SRC (*orig_set);
  rtx new_reg = gen_rtx_REG (cand->mode, REGNO (SET_DEST (*orig_set)));
  rtx new_set;

  /* Merge constants by directly moving the constant into the register under
     some conditions.  Recall that RTL constants are sign-extended.  */
  if (GET_CODE (orig_src) == CONST_INT
      && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (cand->mode))
    {
      if (INTVAL (orig_src) >= 0 || cand->code == SIGN_EXTEND)
	new_set = gen_rtx_SET (VOIDmode, new_reg, orig_src);
      else
	{
	  /* Zero-extend the negative constant by masking out the bits outside
	     the source mode.  */
	  enum machine_mode src_mode = GET_MODE (SET_DEST (*orig_set));
	  rtx new_const_int
	    = GEN_INT (INTVAL (orig_src) & GET_MODE_MASK (src_mode));
	  new_set = gen_rtx_SET (VOIDmode, new_reg, new_const_int);
	}
    }
  else if (GET_MODE (orig_src) == VOIDmode)
    {
      /* This is mostly due to a call insn that should not be optimized.  */
      return false;
    }
  else if (GET_CODE (orig_src) == cand->code)
    {
      /* Here is a sequence of two extensions.  Try to merge them.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, XEXP (orig_src, 0));
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (VOIDmode, new_reg, temp_extension);
    }
  else if (GET_CODE (orig_src) == IF_THEN_ELSE)
    {
      /* Only IF_THEN_ELSE of phi-type copies are combined.  Otherwise,
         in general, IF_THEN_ELSE should not be combined.  */
      return false;
    }
  else
    {
      /* This is the normal case.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, orig_src);
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (VOIDmode, new_reg, temp_extension);
    }

  /* This change is a part of a group of changes.  Hence,
     validate_change will not try to commit the change.  */
  if (validate_change (curr_insn, orig_set, new_set, true))
    {
      if (dump_file)
        {
          fprintf (dump_file,
		   "Tentatively merged extension with definition:\n");
          print_rtl_single (dump_file, curr_insn);
        }
      return true;
    }

  return false;
}
Beispiel #10
0
void
print_pattern (pretty_printer *pp, const_rtx x, int verbose)
{
  if (! x)
    {
      pp_string (pp, "(nil)");
      return;
    }

  switch (GET_CODE (x))
    {
    case SET:
      print_value (pp, SET_DEST (x), verbose);
      pp_equal (pp);
      print_value (pp, SET_SRC (x), verbose);
      break;
    case RETURN:
    case SIMPLE_RETURN:
    case EH_RETURN:
      pp_string (pp, GET_RTX_NAME (GET_CODE (x)));
      break;
    case CALL:
      print_exp (pp, x, verbose);
      break;
    case CLOBBER:
    case USE:
      pp_printf (pp, "%s ", GET_RTX_NAME (GET_CODE (x)));
      print_value (pp, XEXP (x, 0), verbose);
      break;
    case VAR_LOCATION:
      pp_string (pp, "loc ");
      print_value (pp, PAT_VAR_LOCATION_LOC (x), verbose);
      break;
    case COND_EXEC:
      pp_left_paren (pp);
      if (GET_CODE (COND_EXEC_TEST (x)) == NE
	  && XEXP (COND_EXEC_TEST (x), 1) == const0_rtx)
	print_value (pp, XEXP (COND_EXEC_TEST (x), 0), verbose);
      else if (GET_CODE (COND_EXEC_TEST (x)) == EQ
	       && XEXP (COND_EXEC_TEST (x), 1) == const0_rtx)
	{
	  pp_exclamation (pp);
	  print_value (pp, XEXP (COND_EXEC_TEST (x), 0), verbose);
	}
      else
	print_value (pp, COND_EXEC_TEST (x), verbose);
      pp_string (pp, ") ");
      print_pattern (pp, COND_EXEC_CODE (x), verbose);
      break;
    case PARALLEL:
      {
	int i;

	pp_left_brace (pp);
	for (i = 0; i < XVECLEN (x, 0); i++)
	  {
	    print_pattern (pp, XVECEXP (x, 0, i), verbose);
	    pp_semicolon (pp);
	  }
	pp_right_brace (pp);
      }
      break;
    case SEQUENCE:
      {
	pp_string (pp, "sequence{");
	if (INSN_P (XVECEXP (x, 0, 0)))
	  {
	    /* Print the sequence insns indented.  */
	    const char * save_print_rtx_head = print_rtx_head;
	    char indented_print_rtx_head[32];

	    pp_newline (pp);
	    gcc_assert (strlen (print_rtx_head) < sizeof (indented_print_rtx_head) - 4);
	    snprintf (indented_print_rtx_head,
		      sizeof (indented_print_rtx_head),
		      "%s     ", print_rtx_head);
	    print_rtx_head = indented_print_rtx_head;
	    for (int i = 0; i < XVECLEN (x, 0); i++)
	      print_insn_with_notes (pp, XVECEXP (x, 0, i));
	    pp_printf (pp, "%s      ", save_print_rtx_head);
	    print_rtx_head = save_print_rtx_head;
	  }
	else
	  {
	    for (int i = 0; i < XVECLEN (x, 0); i++)
	      {
		print_pattern (pp, XVECEXP (x, 0, i), verbose);
		pp_semicolon (pp);
	      }
	  }
	pp_right_brace (pp);
      }
      break;
    case ASM_INPUT:
      pp_printf (pp, "asm {%s}", XSTR (x, 0));
      break;
    case ADDR_VEC:
      /* Fall through.  */
    case ADDR_DIFF_VEC:
      print_value (pp, XEXP (x, 0), verbose);
      break;
    case TRAP_IF:
      pp_string (pp, "trap_if ");
      print_value (pp, TRAP_CONDITION (x), verbose);
      break;
    case UNSPEC:
    case UNSPEC_VOLATILE:
      /* Fallthru -- leave UNSPECs to print_exp.  */
    default:
      print_value (pp, x, verbose);
    }
}				/* print_pattern */
Beispiel #11
0
static void
print_exp (pretty_printer *pp, const_rtx x, int verbose)
{
  const char *st[4];
  const char *fun;
  rtx op[4];
  int i;

  fun = (char *) 0;
  for (i = 0; i < 4; i++)
    {
      st[i] = (char *) 0;
      op[i] = NULL_RTX;
    }

  switch (GET_CODE (x))
    {
    case PLUS:
      op[0] = XEXP (x, 0);
      if (CONST_INT_P (XEXP (x, 1))
	  && INTVAL (XEXP (x, 1)) < 0)
	{
	  st[1] = "-";
	  op[1] = GEN_INT (-INTVAL (XEXP (x, 1)));
	}
      else
	{
	  st[1] = "+";
	  op[1] = XEXP (x, 1);
	}
      break;
    case LO_SUM:
      op[0] = XEXP (x, 0);
      st[1] = "+low(";
      op[1] = XEXP (x, 1);
      st[2] = ")";
      break;
    case MINUS:
      op[0] = XEXP (x, 0);
      st[1] = "-";
      op[1] = XEXP (x, 1);
      break;
    case COMPARE:
      fun = "cmp";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case NEG:
      st[0] = "-";
      op[0] = XEXP (x, 0);
      break;
    case FMA:
      st[0] = "{";
      op[0] = XEXP (x, 0);
      st[1] = "*";
      op[1] = XEXP (x, 1);
      st[2] = "+";
      op[2] = XEXP (x, 2);
      st[3] = "}";
      break;
    case MULT:
      op[0] = XEXP (x, 0);
      st[1] = "*";
      op[1] = XEXP (x, 1);
      break;
    case DIV:
      op[0] = XEXP (x, 0);
      st[1] = "/";
      op[1] = XEXP (x, 1);
      break;
    case UDIV:
      fun = "udiv";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case MOD:
      op[0] = XEXP (x, 0);
      st[1] = "%";
      op[1] = XEXP (x, 1);
      break;
    case UMOD:
      fun = "umod";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case SMIN:
      fun = "smin";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case SMAX:
      fun = "smax";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case UMIN:
      fun = "umin";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case UMAX:
      fun = "umax";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      break;
    case NOT:
      st[0] = "!";
      op[0] = XEXP (x, 0);
      break;
    case AND:
      op[0] = XEXP (x, 0);
      st[1] = "&";
      op[1] = XEXP (x, 1);
      break;
    case IOR:
      op[0] = XEXP (x, 0);
      st[1] = "|";
      op[1] = XEXP (x, 1);
      break;
    case XOR:
      op[0] = XEXP (x, 0);
      st[1] = "^";
      op[1] = XEXP (x, 1);
      break;
    case ASHIFT:
      op[0] = XEXP (x, 0);
      st[1] = "<<";
      op[1] = XEXP (x, 1);
      break;
    case LSHIFTRT:
      op[0] = XEXP (x, 0);
      st[1] = " 0>>";
      op[1] = XEXP (x, 1);
      break;
    case ASHIFTRT:
      op[0] = XEXP (x, 0);
      st[1] = ">>";
      op[1] = XEXP (x, 1);
      break;
    case ROTATE:
      op[0] = XEXP (x, 0);
      st[1] = "<-<";
      op[1] = XEXP (x, 1);
      break;
    case ROTATERT:
      op[0] = XEXP (x, 0);
      st[1] = ">->";
      op[1] = XEXP (x, 1);
      break;
    case NE:
      op[0] = XEXP (x, 0);
      st[1] = "!=";
      op[1] = XEXP (x, 1);
      break;
    case EQ:
      op[0] = XEXP (x, 0);
      st[1] = "==";
      op[1] = XEXP (x, 1);
      break;
    case GE:
      op[0] = XEXP (x, 0);
      st[1] = ">=";
      op[1] = XEXP (x, 1);
      break;
    case GT:
      op[0] = XEXP (x, 0);
      st[1] = ">";
      op[1] = XEXP (x, 1);
      break;
    case LE:
      op[0] = XEXP (x, 0);
      st[1] = "<=";
      op[1] = XEXP (x, 1);
      break;
    case LT:
      op[0] = XEXP (x, 0);
      st[1] = "<";
      op[1] = XEXP (x, 1);
      break;
    case SIGN_EXTRACT:
      fun = (verbose) ? "sign_extract" : "sxt";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      op[2] = XEXP (x, 2);
      break;
    case ZERO_EXTRACT:
      fun = (verbose) ? "zero_extract" : "zxt";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      op[2] = XEXP (x, 2);
      break;
    case SIGN_EXTEND:
      fun = (verbose) ? "sign_extend" : "sxn";
      op[0] = XEXP (x, 0);
      break;
    case ZERO_EXTEND:
      fun = (verbose) ? "zero_extend" : "zxn";
      op[0] = XEXP (x, 0);
      break;
    case FLOAT_EXTEND:
      fun = (verbose) ? "float_extend" : "fxn";
      op[0] = XEXP (x, 0);
      break;
    case TRUNCATE:
      fun = (verbose) ? "trunc" : "trn";
      op[0] = XEXP (x, 0);
      break;
    case FLOAT_TRUNCATE:
      fun = (verbose) ? "float_trunc" : "ftr";
      op[0] = XEXP (x, 0);
      break;
    case FLOAT:
      fun = (verbose) ? "float" : "flt";
      op[0] = XEXP (x, 0);
      break;
    case UNSIGNED_FLOAT:
      fun = (verbose) ? "uns_float" : "ufl";
      op[0] = XEXP (x, 0);
      break;
    case FIX:
      fun = "fix";
      op[0] = XEXP (x, 0);
      break;
    case UNSIGNED_FIX:
      fun = (verbose) ? "uns_fix" : "ufx";
      op[0] = XEXP (x, 0);
      break;
    case PRE_DEC:
      st[0] = "--";
      op[0] = XEXP (x, 0);
      break;
    case PRE_INC:
      st[0] = "++";
      op[0] = XEXP (x, 0);
      break;
    case POST_DEC:
      op[0] = XEXP (x, 0);
      st[1] = "--";
      break;
    case POST_INC:
      op[0] = XEXP (x, 0);
      st[1] = "++";
      break;
    case PRE_MODIFY:
      st[0] = "pre ";
      op[0] = XEXP (XEXP (x, 1), 0);
      st[1] = "+=";
      op[1] = XEXP (XEXP (x, 1), 1);
      break;
    case POST_MODIFY:
      st[0] = "post ";
      op[0] = XEXP (XEXP (x, 1), 0);
      st[1] = "+=";
      op[1] = XEXP (XEXP (x, 1), 1);
      break;
    case CALL:
      st[0] = "call ";
      op[0] = XEXP (x, 0);
      if (verbose)
	{
	  st[1] = " argc:";
	  op[1] = XEXP (x, 1);
	}
      break;
    case IF_THEN_ELSE:
      st[0] = "{(";
      op[0] = XEXP (x, 0);
      st[1] = ")?";
      op[1] = XEXP (x, 1);
      st[2] = ":";
      op[2] = XEXP (x, 2);
      st[3] = "}";
      break;
    case TRAP_IF:
      fun = "trap_if";
      op[0] = TRAP_CONDITION (x);
      break;
    case PREFETCH:
      fun = "prefetch";
      op[0] = XEXP (x, 0);
      op[1] = XEXP (x, 1);
      op[2] = XEXP (x, 2);
      break;
    case UNSPEC:
    case UNSPEC_VOLATILE:
      {
	pp_string (pp, "unspec");
	if (GET_CODE (x) == UNSPEC_VOLATILE)
	  pp_string (pp, "/v");
	pp_left_bracket (pp);
	for (i = 0; i < XVECLEN (x, 0); i++)
	  {
	    if (i != 0)
	      pp_comma (pp);
	    print_pattern (pp, XVECEXP (x, 0, i), verbose);
	  }
	pp_string (pp, "] ");
	pp_decimal_int (pp, XINT (x, 1));
      }
      break;
    default:
      {
	/* Most unhandled codes can be printed as pseudo-functions.  */
        if (GET_RTX_CLASS (GET_CODE (x)) == RTX_UNARY)
	  {
	    fun = GET_RTX_NAME (GET_CODE (x));
	    op[0] = XEXP (x, 0);
	  }
        else if (GET_RTX_CLASS (GET_CODE (x)) == RTX_COMPARE
		 || GET_RTX_CLASS (GET_CODE (x)) == RTX_COMM_COMPARE
		 || GET_RTX_CLASS (GET_CODE (x)) == RTX_BIN_ARITH
		 || GET_RTX_CLASS (GET_CODE (x)) == RTX_COMM_ARITH)
	  {
	    fun = GET_RTX_NAME (GET_CODE (x));
	    op[0] = XEXP (x, 0);
	    op[1] = XEXP (x, 1);
	  }
        else if (GET_RTX_CLASS (GET_CODE (x)) == RTX_TERNARY)
	  {
	    fun = GET_RTX_NAME (GET_CODE (x));
	    op[0] = XEXP (x, 0);
	    op[1] = XEXP (x, 1);
	    op[2] = XEXP (x, 2);
	  }
	else
	  /* Give up, just print the RTX name.  */
	  st[0] = GET_RTX_NAME (GET_CODE (x));
      }
      break;
    }

  /* Print this as a function?  */
  if (fun)
    {
      pp_string (pp, fun);
      pp_left_paren (pp);
    }

  for (i = 0; i < 4; i++)
    {
      if (st[i])
        pp_string (pp, st[i]);

      if (op[i])
	{
	  if (fun && i != 0)
	    pp_comma (pp);
	  print_value (pp, op[i], verbose);
	}
    }

  if (fun)
    pp_right_paren (pp);
}		/* print_exp */
Beispiel #12
0
void
print_value (pretty_printer *pp, const_rtx x, int verbose)
{
  char tmp[1024];

  if (!x)
    {
      pp_string (pp, "(nil)");
      return;
    }
  switch (GET_CODE (x))
    {
    case CONST_INT:
      pp_scalar (pp, HOST_WIDE_INT_PRINT_HEX,
		 (unsigned HOST_WIDE_INT) INTVAL (x));
      break;
    case CONST_DOUBLE:
      if (FLOAT_MODE_P (GET_MODE (x)))
	{
	  real_to_decimal (tmp, CONST_DOUBLE_REAL_VALUE (x),
			   sizeof (tmp), 0, 1);
	  pp_string (pp, tmp);
	}
      else
	pp_printf (pp, "<%wx,%wx>",
		   (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x),
		   (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x));
      break;
    case CONST_FIXED:
      fixed_to_decimal (tmp, CONST_FIXED_VALUE (x), sizeof (tmp));
      pp_string (pp, tmp);
      break;
    case CONST_STRING:
      pp_printf (pp, "\"%s\"", XSTR (x, 0));
      break;
    case SYMBOL_REF:
      pp_printf (pp, "`%s'", XSTR (x, 0));
      break;
    case LABEL_REF:
      pp_printf (pp, "L%d", INSN_UID (XEXP (x, 0)));
      break;
    case CONST:
    case HIGH:
    case STRICT_LOW_PART:
      pp_printf (pp, "%s(", GET_RTX_NAME (GET_CODE (x)));
      print_value (pp, XEXP (x, 0), verbose);
      pp_right_paren (pp);
      break;
    case REG:
      if (REGNO (x) < FIRST_PSEUDO_REGISTER)
	{
	  if (ISDIGIT (reg_names[REGNO (x)][0]))
	    pp_modulo (pp);
	  pp_string (pp, reg_names[REGNO (x)]);
	}
      else
	pp_printf (pp, "r%d", REGNO (x));
      if (verbose)
	pp_printf (pp, ":%s", GET_MODE_NAME (GET_MODE (x)));
      break;
    case SUBREG:
      print_value (pp, SUBREG_REG (x), verbose);
      pp_printf (pp, "#%d", SUBREG_BYTE (x));
      break;
    case SCRATCH:
    case CC0:
    case PC:
      pp_string (pp, GET_RTX_NAME (GET_CODE (x)));
      break;
    case MEM:
      pp_left_bracket (pp);
      print_value (pp, XEXP (x, 0), verbose);
      pp_right_bracket (pp);
      break;
    case DEBUG_EXPR:
      pp_printf (pp, "D#%i", DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)));
      break;
    default:
      print_exp (pp, x, verbose);
      break;
    }
}				/* print_value */
static void
mark_all_labels (rtx f)
{
  rtx insn;
  rtx prev_nonjump_insn = NULL;

  for (insn = f; insn; insn = NEXT_INSN (insn))
    if (NONDEBUG_INSN_P (insn))
      {
	mark_jump_label (PATTERN (insn), insn, 0);

	/* If the previous non-jump insn sets something to a label,
	   something that this jump insn uses, make that label the primary
	   target of this insn if we don't yet have any.  That previous
	   insn must be a single_set and not refer to more than one label.
	   The jump insn must not refer to other labels as jump targets
	   and must be a plain (set (pc) ...), maybe in a parallel, and
	   may refer to the item being set only directly or as one of the
	   arms in an IF_THEN_ELSE.  */
	if (! INSN_DELETED_P (insn)
	    && JUMP_P (insn)
	    && JUMP_LABEL (insn) == NULL)
	  {
	    rtx label_note = NULL;
	    rtx pc = pc_set (insn);
	    rtx pc_src = pc != NULL ? SET_SRC (pc) : NULL;

	    if (prev_nonjump_insn != NULL)
	      label_note
		= find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);

	    if (label_note != NULL && pc_src != NULL)
	      {
		rtx label_set = single_set (prev_nonjump_insn);
		rtx label_dest
		  = label_set != NULL ? SET_DEST (label_set) : NULL;

		if (label_set != NULL
		    /* The source must be the direct LABEL_REF, not a
		       PLUS, UNSPEC, IF_THEN_ELSE etc.  */
		    && GET_CODE (SET_SRC (label_set)) == LABEL_REF
		    && (rtx_equal_p (label_dest, pc_src)
			|| (GET_CODE (pc_src) == IF_THEN_ELSE
			    && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
				|| rtx_equal_p (label_dest,
						XEXP (pc_src, 2))))))

		  {
		    /* The CODE_LABEL referred to in the note must be the
		       CODE_LABEL in the LABEL_REF of the "set".  We can
		       conveniently use it for the marker function, which
		       requires a LABEL_REF wrapping.  */
		    gcc_assert (XEXP (label_note, 0)
				== XEXP (SET_SRC (label_set), 0));

		    mark_jump_label_1 (label_set, insn, false, true);
		    gcc_assert (JUMP_LABEL (insn)
				== XEXP (SET_SRC (label_set), 0));
		  }
	      }
	  }
	else if (! INSN_DELETED_P (insn))
	  prev_nonjump_insn = insn;
      }
    else if (LABEL_P (insn))
      prev_nonjump_insn = NULL;

  /* If we are in cfglayout mode, there may be non-insns between the
     basic blocks.  If those non-insns represent tablejump data, they
     contain label references that we must record.  */
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
    {
      basic_block bb;
      rtx insn;
      FOR_EACH_BB (bb)
	{
	  for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }

	  for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
	    if (INSN_P (insn))
	      {
		gcc_assert (JUMP_TABLE_DATA_P (insn));
		mark_jump_label (PATTERN (insn), insn, 0);
	      }
	}
    }
PyObject *
get_operand_as_object(const_rtx in_rtx, int idx, char fmt)
{
    const char *str;

    /* The operand types are described in gcc/rtl.c */
    switch (fmt) {

    case 'T': /* pointer to a string, with special meaning */
        str = XTMPL (in_rtx, idx);
        goto string;

    case 'S': /* optional pointer to a string */
    case 's': /* a pointer to a string */
        str = XSTR (in_rtx, idx);
    string:
        return PyGccStringOrNone(str);

    case '0': /* unused, or used in a phase-dependent manner */
        Py_RETURN_NONE; /* for now */

    case 'e': /* pointer to an rtl expression */
        /* Nested expression: */
        return PyGccRtl_New(
                   gcc_private_make_rtl_insn(XEXP (in_rtx, idx)));

    case 'E':
    case 'V':
        /* Nested list of expressions */
        {
            PyObject *list = PyList_New(XVECLEN (in_rtx, idx));
            int j;
            if (!list) {
                return NULL;
            }
            for (j = 0; j < XVECLEN (in_rtx, idx); j++) {
                PyObject *item = PyGccRtl_New(
                                     gcc_private_make_rtl_insn(XVECEXP (in_rtx, idx, j)));
                if (!item) {
                    Py_DECREF(list);
                    return NULL;
                }
                if (-1 == PyList_Append(list, item)) {
                    Py_DECREF(item);
                    Py_DECREF(list);
                    return NULL;
                }
                Py_DECREF(item);
            }
            return list;
        }

    case 'w':
        return PyGccInt_FromLong(XWINT (in_rtx, idx));

    case 'i':
        return PyGccInt_FromLong(XINT (in_rtx, idx));

    case 'n':
        /* Return NOTE_INSN names rather than integer codes.  */
        return PyGccStringOrNone(GET_NOTE_INSN_NAME (XINT (in_rtx, idx)));

    case 'u': /* a pointer to another insn */
        Py_RETURN_NONE; /* for now */

    case 't':
        return PyGccTree_New(gcc_private_make_tree(XTREE (in_rtx, idx)));

    case '*':
        Py_RETURN_NONE; /* for now */

    case 'B':
        return PyGccBasicBlock_New(
            gcc_private_make_cfg_block(XBBDEF (in_rtx, idx)));

    default:
        gcc_unreachable ();
    }
}
Beispiel #15
0
rtx
copy_rtx (rtx orig)
{
  rtx copy;
  int i, j;
  RTX_CODE code;
  const char *format_ptr;

  code = GET_CODE (orig);

  switch (code)
    {
    case REG:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
    case SCRATCH:
      /* SCRATCH must be shared because they represent distinct values.  */
      return orig;
    case CLOBBER:
      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
        return orig;
      break;

    case CONST:
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
         a LABEL_REF, it isn't sharable.  */
      if (GET_CODE (XEXP (orig, 0)) == PLUS
          && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
          && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
        return orig;
      break;

      /* A MEM with a constant address is not sharable.  The problem is that
         the constant address may need to be reloaded.  If the mem is shared,
         then reloading one copy of this mem will cause all copies to appear
         to have been reloaded.  */

    default:
      break;
    }

  /* Copy the various flags, fields, and other information.  We assume
     that all fields need copying, and then clear the fields that should
     not be copied.  That is the sensible default behavior, and forces
     us to explicitly document why we are *not* copying a flag.  */
  copy = shallow_copy_rtx (orig);

  /* We do not copy the USED flag, which is used as a mark bit during
     walks over the RTL.  */
  RTX_FLAG (copy, used) = 0;

  /* We do not copy FRAME_RELATED for INSNs.  */
  if (INSN_P (orig))
    RTX_FLAG (copy, frame_related) = 0;
  RTX_FLAG (copy, jump) = RTX_FLAG (orig, jump);
  RTX_FLAG (copy, call) = RTX_FLAG (orig, call);

  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));

  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
    switch (*format_ptr++)
      {
      case 'e':
        if (XEXP (orig, i) != NULL)
          XEXP (copy, i) = copy_rtx (XEXP (orig, i));
        break;

      case 'E':
      case 'V':
        if (XVEC (orig, i) != NULL)
          {
            XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
            for (j = 0; j < XVECLEN (copy, i); j++)
              XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
          }
        break;

      case 't':
      case 'w':
      case 'i':
      case 's':
      case 'S':
      case 'T':
      case 'u':
      case 'B':
      case '0':
        /* These are left unchanged.  */
        break;

      default:
        gcc_unreachable ();
      }
  return copy;
}
Beispiel #16
0
void
find_comparison_dom_walker::before_dom_children (basic_block bb)
{
  struct comparison *last_cmp;
  rtx insn, next, last_clobber;
  bool last_cmp_valid;
  bool need_purge = false;
  bitmap killed;

  killed = BITMAP_ALLOC (NULL);

  /* The last comparison that was made.  Will be reset to NULL
     once the flags are clobbered.  */
  last_cmp = NULL;

  /* True iff the last comparison has not been clobbered, nor
     have its inputs.  Used to eliminate duplicate compares.  */
  last_cmp_valid = false;

  /* The last insn that clobbered the flags, if that insn is of
     a form that may be valid for eliminating a following compare.
     To be reset to NULL once the flags are set otherwise.  */
  last_clobber = NULL;

  /* Propagate the last live comparison throughout the extended basic block. */
  if (single_pred_p (bb))
    {
      last_cmp = (struct comparison *) single_pred (bb)->aux;
      if (last_cmp)
	last_cmp_valid = last_cmp->inputs_valid;
    }

  for (insn = BB_HEAD (bb); insn; insn = next)
    {
      rtx src;

      next = (insn == BB_END (bb) ? NULL_RTX : NEXT_INSN (insn));
      if (!NONDEBUG_INSN_P (insn))
	continue;

      /* Compute the set of registers modified by this instruction.  */
      bitmap_clear (killed);
      df_simulate_find_defs (insn, killed);

      src = conforming_compare (insn);
      if (src)
	{
	  enum machine_mode src_mode = GET_MODE (src);
	  rtx eh_note = NULL;

	  if (flag_non_call_exceptions)
	    eh_note = find_reg_note (insn, REG_EH_REGION, NULL);

	  if (!last_cmp_valid)
	    goto dont_delete;

	  /* Take care that it's in the same EH region.  */
	  if (flag_non_call_exceptions
	      && !rtx_equal_p (eh_note, last_cmp->eh_note))
	    goto dont_delete;

	  /* Make sure the compare is redundant with the previous.  */
	  if (!rtx_equal_p (last_cmp->in_a, XEXP (src, 0))
	      || !rtx_equal_p (last_cmp->in_b, XEXP (src, 1)))
	    goto dont_delete;

	  /* New mode must be compatible with the previous compare mode.  */
	  {
	    enum machine_mode new_mode
	      = targetm.cc_modes_compatible (last_cmp->orig_mode, src_mode);
	    if (new_mode == VOIDmode)
	      goto dont_delete;

	    if (new_mode != last_cmp->orig_mode)
	      {
		rtx x, flags = gen_rtx_REG (src_mode, targetm.flags_regnum);

		/* Generate new comparison for substitution.  */
		x = gen_rtx_COMPARE (new_mode, XEXP (src, 0), XEXP (src, 1));
		x = gen_rtx_SET (VOIDmode, flags, x);

		if (!validate_change (last_cmp->insn,
				      &PATTERN (last_cmp->insn), x, false))
		  goto dont_delete;

		last_cmp->orig_mode = new_mode;
	      }
	  }

	  /* All tests and substitutions succeeded!  */
	  if (eh_note)
	    need_purge = true;
	  delete_insn (insn);
	  continue;

	dont_delete:
	  last_cmp = XCNEW (struct comparison);
	  last_cmp->insn = insn;
	  last_cmp->prev_clobber = last_clobber;
	  last_cmp->in_a = XEXP (src, 0);
	  last_cmp->in_b = XEXP (src, 1);
	  last_cmp->eh_note = eh_note;
	  last_cmp->orig_mode = src_mode;
	  all_compares.safe_push (last_cmp);

	  /* It's unusual, but be prepared for comparison patterns that
	     also clobber an input, or perhaps a scratch.  */
	  last_clobber = NULL;
	  last_cmp_valid = true;
	}

      /* Notice if this instruction kills the flags register.  */
      else if (bitmap_bit_p (killed, targetm.flags_regnum))
	{
	  /* See if this insn could be the "clobber" that eliminates
	     a future comparison.   */
	  last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);

	  /* In either case, the previous compare is no longer valid.  */
	  last_cmp = NULL;
	  last_cmp_valid = false;
	  continue;
	}

      /* Notice if this instruction uses the flags register.  */
      else if (last_cmp)
	find_flags_uses_in_insn (last_cmp, insn);

      /* Notice if any of the inputs to the comparison have changed.  */
      if (last_cmp_valid
	  && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
	      || (REG_P (last_cmp->in_b)
		  && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
	last_cmp_valid = false;
    }
Beispiel #17
0
int
rtx_equal_p (rtx x, rtx y)
{
  int i;
  int j;
  enum rtx_code code;
  const char *fmt;

  if (x == y)
    return 1;
  if (x == 0 || y == 0)
    return 0;

  code = GET_CODE (x);
  /* Rtx's of different codes cannot be equal.  */
  if (code != GET_CODE (y))
    return 0;

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
     (REG:SI x) and (REG:HI x) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

  /* Some RTL can be compared nonrecursively.  */
  switch (code)
    {
    case REG:
      return (REGNO (x) == REGNO (y));

    case LABEL_REF:
      return XEXP (x, 0) == XEXP (y, 0);

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);

    case SCRATCH:
    case CONST_DOUBLE:
    case CONST_INT:
      return 0;

    default:
      break;
    }

  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole thing.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      switch (fmt[i])
        {
        case 'w':
          if (XWINT (x, i) != XWINT (y, i))
            return 0;
          break;

        case 'n':
        case 'i':
          if (XINT (x, i) != XINT (y, i))
            return 0;
          break;

        case 'V':
        case 'E':
          /* Two vectors must have the same length.  */
          if (XVECLEN (x, i) != XVECLEN (y, i))
            return 0;

          /* And the corresponding elements must match.  */
          for (j = 0; j < XVECLEN (x, i); j++)
            if (rtx_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
              return 0;
          break;

        case 'e':
          if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0)
            return 0;
          break;

        case 'S':
        case 's':
          if ((XSTR (x, i) || XSTR (y, i))
              && (! XSTR (x, i) || ! XSTR (y, i)
                  || strcmp (XSTR (x, i), XSTR (y, i))))
            return 0;
          break;

        case 'u':
          /* These are just backpointers, so they don't matter.  */
          break;

        case '0':
        case 't':
          break;

          /* It is believed that rtx's at this level will never
             contain anything but integers and other rtx's,
             except for within LABEL_REFs and SYMBOL_REFs.  */
        default:
          gcc_unreachable ();
        }
    }
  return 1;
}
Beispiel #18
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
         See if the former is preferred for jump tests and restore it
         if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
        {
          tree exp0 = TREE_OPERAND (exp, 0);
          rtx set_label, clr_label;

          /* Strip narrowing integral type conversions.  */
          while ((TREE_CODE (exp0) == NOP_EXPR
                  || TREE_CODE (exp0) == CONVERT_EXPR
                  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
                 && TREE_OPERAND (exp0, 0) != error_mark_node
                 && TYPE_PRECISION (TREE_TYPE (exp0))
                    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
            exp0 = TREE_OPERAND (exp0, 0);

          /* "exp0 ^ 1" inverts the sense of the single bit test.  */
          if (TREE_CODE (exp0) == BIT_XOR_EXPR
              && integer_onep (TREE_OPERAND (exp0, 1)))
            {
              exp0 = TREE_OPERAND (exp0, 0);
              clr_label = if_true_label;
              set_label = if_false_label;
            }
          else
            {
              clr_label = if_false_label;
              set_label = if_true_label;
            }

          if (TREE_CODE (exp0) == RSHIFT_EXPR)
            {
              tree arg = TREE_OPERAND (exp0, 0);
              tree shift = TREE_OPERAND (exp0, 1);
              tree argtype = TREE_TYPE (arg);
              if (TREE_CODE (shift) == INTEGER_CST
                  && compare_tree_int (shift, 0) >= 0
                  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
                  && prefer_and_bit_test (TYPE_MODE (argtype),
                                          TREE_INT_CST_LOW (shift)))
                {
                  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
                                       << TREE_INT_CST_LOW (shift);
                  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
                                   build_int_cst_type (argtype, mask)),
                           clr_label, set_label);
                  break;
                }
            }
        }

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (fold_convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case COND_EXPR:
      {
        rtx label1 = gen_label_rtx ();
        if (!if_true_label || !if_false_label)
          {
            drop_through_label = gen_label_rtx ();
            if (!if_true_label)
              if_true_label = drop_through_label;
            if (!if_false_label)
              if_false_label = drop_through_label;
          }

        do_pending_stack_adjust ();
        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
        do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        emit_label (label1);
        do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
        break;
      }

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
                != CODE_FOR_nothing))
          {
            do_jump (fold_convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      exp = build2 (NE_EXPR, TREE_TYPE (exp),
                    TREE_OPERAND (exp, 0),
                    TREE_OPERAND (exp, 1));
      /* FALLTHRU */
    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode1, tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode1 = UNORDERED_EXPR;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;
      case LTGT_EXPR:
        /* It is ok for LTGT_EXPR to trap when the result is unordered,
           so expand to (a < b) || (a > b).  */
        rcode1 = LTGT;
        tcode1 = LT_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into two comparisons.  */
            if (if_true_label == 0)
              drop_through_label = if_true_label = gen_label_rtx ();
              
            cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
            cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
            do_jump (cmp0, 0, if_true_label);
            do_jump (cmp1, if_false_label, if_true_label);
          }
      }
      break;

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_false_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_true_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)        and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
        tree fndecl = get_callee_fndecl (exp);
        tree arglist = TREE_OPERAND (exp, 1);

        if (fndecl
            && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
            && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
            && arglist != NULL_TREE
            && TREE_CHAIN (arglist) != NULL_TREE)
          {
            rtx seq = expand_builtin_expect_jump (exp, if_false_label,
                                                  if_true_label);

            if (seq != NULL_RTX)
              {
                emit_insn (seq);
                return;
              }
          }
      }
 
      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
        {
          /* Compare promoted variables in their promoted mode.  */
          if (SUBREG_PROMOTED_VAR_P (temp)
              && REG_P (XEXP (temp, 0)))
            temp = XEXP (temp, 0);
          else
            temp = copy_to_reg (temp);
        }
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
                               NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
                               GET_MODE (temp), NULL_RTX,
                               if_false_label, if_true_label);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Beispiel #19
0
Datei: rtl.c Projekt: 0day-ci/gcc
rtx
copy_rtx (rtx orig)
{
  rtx copy;
  int i, j;
  RTX_CODE code;
  const char *format_ptr;

  code = GET_CODE (orig);

  switch (code)
    {
    case REG:
    case DEBUG_EXPR:
    case VALUE:
    CASE_CONST_ANY:
    case SYMBOL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
    case RETURN:
    case SIMPLE_RETURN:
    case SCRATCH:
      /* SCRATCH must be shared because they represent distinct values.  */
      return orig;
    case CLOBBER:
      /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
         clobbers or clobbers of hard registers that originated as pseudos.
         This is needed to allow safe register renaming.  */
      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
	  && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
	return orig;
      break;

    case CONST:
      if (shared_const_p (orig))
	return orig;
      break;

      /* A MEM with a constant address is not sharable.  The problem is that
	 the constant address may need to be reloaded.  If the mem is shared,
	 then reloading one copy of this mem will cause all copies to appear
	 to have been reloaded.  */

    default:
      break;
    }

  /* Copy the various flags, fields, and other information.  We assume
     that all fields need copying, and then clear the fields that should
     not be copied.  That is the sensible default behavior, and forces
     us to explicitly document why we are *not* copying a flag.  */
  copy = shallow_copy_rtx (orig);

  /* We do not copy the USED flag, which is used as a mark bit during
     walks over the RTL.  */
  RTX_FLAG (copy, used) = 0;

  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));

  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
    switch (*format_ptr++)
      {
      case 'e':
	if (XEXP (orig, i) != NULL)
	  XEXP (copy, i) = copy_rtx (XEXP (orig, i));
	break;

      case 'E':
      case 'V':
	if (XVEC (orig, i) != NULL)
	  {
	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
	    for (j = 0; j < XVECLEN (copy, i); j++)
	      XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
	  }
	break;

      case 't':
      case 'w':
      case 'i':
      case 's':
      case 'S':
      case 'T':
      case 'u':
      case 'B':
      case '0':
	/* These are left unchanged.  */
	break;

      default:
	gcc_unreachable ();
      }
  return copy;
}
Beispiel #20
0
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
                         enum machine_mode mode, rtx size, rtx if_false_label,
                         rtx if_true_label)
{
  rtx tem;
  int dummy_true_label = 0;

  /* Reverse the comparison if that is safe and we want to jump if it is
     false.  */
  if (! if_true_label && ! FLOAT_MODE_P (mode))
    {
      if_true_label = if_false_label;
      if_false_label = 0;
      code = reverse_condition (code);
    }

  /* If one operand is constant, make it the second one.  Only do this
     if the other operand is not constant as well.  */

  if (swap_commutative_operands_p (op0, op1))
    {
      tem = op0;
      op0 = op1;
      op1 = tem;
      code = swap_condition (code);
    }

  do_pending_stack_adjust ();

  code = unsignedp ? unsigned_condition (code) : code;
  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
                                                 op0, op1)))
    {
      if (CONSTANT_P (tem))
        {
          rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
                      ? if_false_label : if_true_label;
          if (label)
            emit_jump (label);
          return;
        }

      code = GET_CODE (tem);
      mode = GET_MODE (tem);
      op0 = XEXP (tem, 0);
      op1 = XEXP (tem, 1);
      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
    }


  if (! if_true_label)
    {
      dummy_true_label = 1;
      if_true_label = gen_label_rtx ();
    }

  if (GET_MODE_CLASS (mode) == MODE_INT
      && ! can_compare_p (code, mode, ccp_jump))
    {
      switch (code)
        {
        case LTU:
          do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
                                        if_false_label, if_true_label);
          break;

        case LEU:
          do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
                                        if_true_label, if_false_label);
          break;

        case GTU:
          do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
                                        if_false_label, if_true_label);
          break;

        case GEU:
          do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
                                        if_true_label, if_false_label);
          break;

        case LT:
          do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
                                        if_false_label, if_true_label);
          break;

        case LE:
          do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
                                        if_true_label, if_false_label);
          break;

        case GT:
          do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
                                        if_false_label, if_true_label);
          break;

        case GE:
          do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
                                        if_true_label, if_false_label);
          break;

        case EQ:
          do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
                                         if_true_label);
          break;

        case NE:
          do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
                                         if_false_label);
          break;

        default:
          gcc_unreachable ();
        }
    }
  else
    emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
                             if_true_label);

  if (if_false_label)
    emit_jump (if_false_label);
  if (dummy_true_label)
    emit_label (if_true_label);
}
Beispiel #21
0
static void
force_move_args_size_note (basic_block bb, rtx prev, rtx insn)
{
    rtx note, test, next_candidate, prev_candidate;

    /* If PREV exists, tail-call to the logic in the other function.  */
    if (prev)
    {
        maybe_move_args_size_note (prev, insn, false);
        return;
    }

    /* First, make sure there's anything that needs doing.  */
    note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
    if (note == NULL)
        return;

    /* We need to find a spot between the previous and next exception points
       where we can place the note and "properly" deallocate the arguments.  */
    next_candidate = prev_candidate = NULL;

    /* It is often the case that we have insns in the order:
    call
    add sp (previous deallocation)
    sub sp (align for next arglist)
    push arg
       and the add/sub cancel.  Therefore we begin by searching forward.  */

    test = insn;
    while ((test = next_active_insn_bb (bb, test)) != NULL)
    {
        /* Found an existing note: nothing to do.  */
        if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
            return;
        /* Found something that affects unwinding.  Stop searching.  */
        if (CALL_P (test) || !insn_nothrow_p (test))
            break;
        if (next_candidate == NULL)
            next_candidate = test;
    }

    test = insn;
    while ((test = prev_active_insn_bb (bb, test)) != NULL)
    {
        rtx tnote;
        /* Found a place that seems logical to adjust the stack.  */
        tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
        if (tnote)
        {
            XEXP (tnote, 0) = XEXP (note, 0);
            return;
        }
        if (prev_candidate == NULL)
            prev_candidate = test;
        /* Found something that affects unwinding.  Stop searching.  */
        if (CALL_P (test) || !insn_nothrow_p (test))
            break;
    }

    if (prev_candidate)
        test = prev_candidate;
    else if (next_candidate)
        test = next_candidate;
    else
    {
        /* ??? We *must* have a place, lest we ICE on the lost adjustment.
        Options are: dummy clobber insn, nop, or prevent the removal of
         the sp += 0 insn.  */
        /* TODO: Find another way to indicate to the dwarf2 code that we
        have not in fact lost an adjustment.  */
        test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
    }
    add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
}
Beispiel #22
0
static bool
find_mem (rtx *address_of_x)
{
  rtx x = *address_of_x;
  enum rtx_code code = GET_CODE (x);
  const char *const fmt = GET_RTX_FORMAT (code);
  int i;

  if (code == MEM && REG_P (XEXP (x, 0)))
    {
      /* Match with *reg0.  */
      mem_insn.mem_loc = address_of_x;
      mem_insn.reg0 = XEXP (x, 0);
      mem_insn.reg1_is_const = true;
      mem_insn.reg1_val = 0;
      mem_insn.reg1 = GEN_INT (0);
      if (find_inc (true))
	return true;
    }
  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
      && REG_P (XEXP (XEXP (x, 0), 0)))
    {
      rtx reg1 = XEXP (XEXP (x, 0), 1);
      mem_insn.mem_loc = address_of_x;
      mem_insn.reg0 = XEXP (XEXP (x, 0), 0);
      mem_insn.reg1 = reg1;
      if (CONST_INT_P (reg1))
	{
	  mem_insn.reg1_is_const = true;
	  /* Match with *(reg0 + c) where c is a const. */
	  mem_insn.reg1_val = INTVAL (reg1);
	  if (find_inc (true))
	    return true;
	}
      else if (REG_P (reg1))
	{
	  /* Match with *(reg0 + reg1).  */
	  mem_insn.reg1_is_const = false;
	  if (find_inc (true))
	    return true;
	}
    }

  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
    {
      /* If REG occurs inside a MEM used in a bit-field reference,
	 that is unacceptable.  */
      return false;
    }

  /* Time for some deep diving.  */
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
	{
	  if (find_mem (&XEXP (x, i)))
	    return true;
	}
      else if (fmt[i] == 'E')
	{
	  int j;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    if (find_mem (&XVECEXP (x, i, j)))
	      return true;
	}
    }
  return false;
}
enum crx_addrtype
crx_decompose_address (rtx addr, struct crx_address *out)
{
  rtx base = NULL_RTX, index = NULL_RTX, disp = NULL_RTX;
  rtx scale_rtx = NULL_RTX, side_effect = NULL_RTX;
  int scale = -1;
  
  enum crx_addrtype retval = CRX_INVALID;

  switch (GET_CODE (addr))
    {
    case CONST_INT:
      /* Absolute address (known at compile time) */
      retval = CRX_ABSOLUTE;
      disp = addr;
      if (!UNSIGNED_INT_FITS_N_BITS (INTVAL (disp), GET_MODE_BITSIZE (Pmode)))
	return CRX_INVALID;
      break;
      
    case CONST:
    case SYMBOL_REF:
    case LABEL_REF:
      /* Absolute address (known at link time) */
      retval = CRX_ABSOLUTE;
      disp = addr;
      break;

    case REG:
    case SUBREG:
      /* Register relative address */
      retval = CRX_REG_REL;
      base = addr;
      break;

    case PLUS:
      switch (GET_CODE (XEXP (addr, 0)))
	{
	case REG:
	case SUBREG:
	  if (REG_P (XEXP (addr, 1)))
	    {
	      /* Scaled index with scale = 1 and disp. = 0 */
	      retval = CRX_SCALED_INDX;
	      base = XEXP (addr, 1);
	      index = XEXP (addr, 0); 
	      scale = 1;
	    }
	  else if (RTX_SIGNED_INT_FITS_N_BITS (XEXP (addr, 1), 28))
	    {
	      /* Register relative address and <= 28-bit disp. */
	      retval = CRX_REG_REL;
	      base = XEXP (addr, 0);
	      disp = XEXP (addr, 1);
	    }
	  else
	    return CRX_INVALID;
	  break;

	case PLUS:
	  /* Scaled index and <= 22-bit disp. */
	  retval = CRX_SCALED_INDX;
	  base = XEXP (XEXP (addr, 0), 1); 
	  disp = XEXP (addr, 1);
	  if (!RTX_SIGNED_INT_FITS_N_BITS (disp, 22))
	    return CRX_INVALID;
	  switch (GET_CODE (XEXP (XEXP (addr, 0), 0)))
	    {
	    case REG:
	      /* Scaled index with scale = 0 and <= 22-bit disp. */
	      index = XEXP (XEXP (addr, 0), 0); 
	      scale = 1;
	      break;
	      
	    case MULT:
	      /* Scaled index with scale >= 0 and <= 22-bit disp. */
	      index = XEXP (XEXP (XEXP (addr, 0), 0), 0); 
	      scale_rtx = XEXP (XEXP (XEXP (addr, 0), 0), 1); 
	      if ((scale = SCALE_FOR_INDEX_P (scale_rtx)) == -1)
		return CRX_INVALID;
	      break;

	    default:
	      return CRX_INVALID;
	    }
	  break;
	  
	case MULT:
	  /* Scaled index with scale >= 0 */
	  retval = CRX_SCALED_INDX;
	  base = XEXP (addr, 1); 
	  index = XEXP (XEXP (addr, 0), 0); 
	  scale_rtx = XEXP (XEXP (addr, 0), 1); 
	  /* Scaled index with scale >= 0 and <= 22-bit disp. */
	  if ((scale = SCALE_FOR_INDEX_P (scale_rtx)) == -1)
	    return CRX_INVALID;
	  break;

	default:
	  return CRX_INVALID;
	}
      break;

    case POST_INC:
    case POST_DEC:
      /* Simple post-increment */
      retval = CRX_POST_INC;
      base = XEXP (addr, 0);
      side_effect = addr;
      break;

    case POST_MODIFY:
      /* Generic post-increment with <= 12-bit disp. */
      retval = CRX_POST_INC;
      base = XEXP (addr, 0);
      side_effect = XEXP (addr, 1);
      if (base != XEXP (side_effect, 0))
	return CRX_INVALID;
      switch (GET_CODE (side_effect))
	{
	case PLUS:
	case MINUS:
	  disp = XEXP (side_effect, 1);
	  if (!RTX_SIGNED_INT_FITS_N_BITS (disp, 12))
	    return CRX_INVALID;
	  break;

	default:
	  /* CRX only supports PLUS and MINUS */
	  return CRX_INVALID;
	}
      break;

    default:
      return CRX_INVALID;
    }

  if (base && !crx_addr_reg_p (base)) return CRX_INVALID;
  if (index && !crx_addr_reg_p (index)) return CRX_INVALID;
  
  out->base = base;
  out->index = index;
  out->disp = disp;
  out->scale = scale;
  out->side_effect = side_effect;

  return retval;
}
Beispiel #24
0
static bool
attempt_change (rtx new_addr, rtx inc_reg)
{
  /* There are four cases: For the two cases that involve an add
     instruction, we are going to have to delete the add and insert a
     mov.  We are going to assume that the mov is free.  This is
     fairly early in the backend and there are a lot of opportunities
     for removing that move later.  In particular, there is the case
     where the move may be dead, this is what dead code elimination
     passes are for.  The two cases where we have an inc insn will be
     handled mov free.  */

  basic_block bb = BLOCK_FOR_INSN (mem_insn.insn);
  rtx mov_insn = NULL;
  int regno;
  rtx mem = *mem_insn.mem_loc;
  enum machine_mode mode = GET_MODE (mem);
  rtx new_mem;
  int old_cost = 0;
  int new_cost = 0;
  bool speed = optimize_bb_for_speed_p (bb);

  PUT_MODE (mem_tmp, mode);
  XEXP (mem_tmp, 0) = new_addr;

  old_cost = (set_src_cost (mem, speed)
	      + set_rtx_cost (PATTERN (inc_insn.insn), speed));
  new_cost = set_src_cost (mem_tmp, speed);

  /* The first item of business is to see if this is profitable.  */
  if (old_cost < new_cost)
    {
      if (dump_file)
	fprintf (dump_file, "cost failure old=%d new=%d\n", old_cost, new_cost);
      return false;
    }

  /* Jump through a lot of hoops to keep the attributes up to date.  We
     do not want to call one of the change address variants that take
     an offset even though we know the offset in many cases.  These
     assume you are changing where the address is pointing by the
     offset.  */
  new_mem = replace_equiv_address_nv (mem, new_addr);
  if (! validate_change (mem_insn.insn, mem_insn.mem_loc, new_mem, 0))
    {
      if (dump_file)
	fprintf (dump_file, "validation failure\n");
      return false;
    }

  /* From here to the end of the function we are committed to the
     change, i.e. nothing fails.  Generate any necessary movs, move
     any regnotes, and fix up the reg_next_{use,inc_use,def}.  */
  switch (inc_insn.form)
    {
    case FORM_PRE_ADD:
      /* Replace the addition with a move.  Do it at the location of
	 the addition since the operand of the addition may change
	 before the memory reference.  */
      mov_insn = insert_move_insn_before (inc_insn.insn,
					  inc_insn.reg_res, inc_insn.reg0);
      move_dead_notes (mov_insn, inc_insn.insn, inc_insn.reg0);

      regno = REGNO (inc_insn.reg_res);
      reg_next_def[regno] = mov_insn;
      reg_next_use[regno] = NULL;
      regno = REGNO (inc_insn.reg0);
      reg_next_use[regno] = mov_insn;
      df_recompute_luids (bb);
      break;

    case FORM_POST_INC:
      regno = REGNO (inc_insn.reg_res);
      if (reg_next_use[regno] == reg_next_inc_use[regno])
	reg_next_inc_use[regno] = NULL;

      /* Fallthru.  */
    case FORM_PRE_INC:
      regno = REGNO (inc_insn.reg_res);
      reg_next_def[regno] = mem_insn.insn;
      reg_next_use[regno] = NULL;

      break;

    case FORM_POST_ADD:
      mov_insn = insert_move_insn_before (mem_insn.insn,
					  inc_insn.reg_res, inc_insn.reg0);
      move_dead_notes (mov_insn, inc_insn.insn, inc_insn.reg0);

      /* Do not move anything to the mov insn because the instruction
	 pointer for the main iteration has not yet hit that.  It is
	 still pointing to the mem insn. */
      regno = REGNO (inc_insn.reg_res);
      reg_next_def[regno] = mem_insn.insn;
      reg_next_use[regno] = NULL;

      regno = REGNO (inc_insn.reg0);
      reg_next_use[regno] = mem_insn.insn;
      if ((reg_next_use[regno] == reg_next_inc_use[regno])
	  || (reg_next_inc_use[regno] == inc_insn.insn))
	reg_next_inc_use[regno] = NULL;
      df_recompute_luids (bb);
      break;

    case FORM_last:
    default:
      gcc_unreachable ();
    }

  if (!inc_insn.reg1_is_const)
    {
      regno = REGNO (inc_insn.reg1);
      reg_next_use[regno] = mem_insn.insn;
      if ((reg_next_use[regno] == reg_next_inc_use[regno])
	  || (reg_next_inc_use[regno] == inc_insn.insn))
	reg_next_inc_use[regno] = NULL;
    }

  delete_insn (inc_insn.insn);

  if (dump_file && mov_insn)
    {
      fprintf (dump_file, "inserting mov ");
      dump_insn_slim (dump_file, mov_insn);
    }

  /* Record that this insn has an implicit side effect.  */
  add_reg_note (mem_insn.insn, REG_INC, inc_reg);

  if (dump_file)
    {
      fprintf (dump_file, "****success ");
      dump_insn_slim (dump_file, mem_insn.insn);
    }

  return true;
}
Beispiel #25
0
static void
canonicalize_address (rtx x)
{
  for (;;)
    switch (GET_CODE (x))
      {
      case ASHIFT:
        if (CONST_INT_P (XEXP (x, 1))
            && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))
            && INTVAL (XEXP (x, 1)) >= 0)
	  {
	    HOST_WIDE_INT shift = INTVAL (XEXP (x, 1));
	    PUT_CODE (x, MULT);
	    XEXP (x, 1) = gen_int_mode (HOST_WIDE_INT_1 << shift,
					GET_MODE (x));
	  }

	x = XEXP (x, 0);
        break;

      case PLUS:
        if (GET_CODE (XEXP (x, 0)) == PLUS
	    || GET_CODE (XEXP (x, 0)) == ASHIFT
	    || GET_CODE (XEXP (x, 0)) == CONST)
	  canonicalize_address (XEXP (x, 0));

	x = XEXP (x, 1);
        break;

      case CONST:
	x = XEXP (x, 0);
        break;

      default:
        return;
      }
}
Beispiel #26
0
static bool
parse_add_or_inc (rtx insn, bool before_mem)
{
  rtx pat = single_set (insn);
  if (!pat)
    return false;

  /* Result must be single reg.  */
  if (!REG_P (SET_DEST (pat)))
    return false;

  if ((GET_CODE (SET_SRC (pat)) != PLUS)
      && (GET_CODE (SET_SRC (pat)) != MINUS))
    return false;

  if (!REG_P (XEXP (SET_SRC (pat), 0)))
    return false;

  inc_insn.insn = insn;
  inc_insn.pat = pat;
  inc_insn.reg_res = SET_DEST (pat);
  inc_insn.reg0 = XEXP (SET_SRC (pat), 0);
  if (rtx_equal_p (inc_insn.reg_res, inc_insn.reg0))
    inc_insn.form = before_mem ? FORM_PRE_INC : FORM_POST_INC;
  else
    inc_insn.form = before_mem ? FORM_PRE_ADD : FORM_POST_ADD;

  if (CONST_INT_P (XEXP (SET_SRC (pat), 1)))
    {
      /* Process a = b + c where c is a const.  */
      inc_insn.reg1_is_const = true;
      if (GET_CODE (SET_SRC (pat)) == PLUS)
	{
	  inc_insn.reg1 = XEXP (SET_SRC (pat), 1);
	  inc_insn.reg1_val = INTVAL (inc_insn.reg1);
	}
      else
	{
	  inc_insn.reg1_val = -INTVAL (XEXP (SET_SRC (pat), 1));
	  inc_insn.reg1 = GEN_INT (inc_insn.reg1_val);
	}
      return true;
    }
  else if ((HAVE_PRE_MODIFY_REG || HAVE_POST_MODIFY_REG)
	   && (REG_P (XEXP (SET_SRC (pat), 1)))
	   && GET_CODE (SET_SRC (pat)) == PLUS)
    {
      /* Process a = b + c where c is a reg.  */
      inc_insn.reg1 = XEXP (SET_SRC (pat), 1);
      inc_insn.reg1_is_const = false;

      if (inc_insn.form == FORM_PRE_INC
	  || inc_insn.form == FORM_POST_INC)
	return true;
      else if (rtx_equal_p (inc_insn.reg_res, inc_insn.reg1))
	{
	  /* Reverse the two operands and turn *_ADD into *_INC since
	     a = c + a.  */
	  reverse_inc ();
	  inc_insn.form = before_mem ? FORM_PRE_INC : FORM_POST_INC;
	  return true;
	}
      else
	return true;
    }

  return false;
}
Beispiel #27
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && have_insn_for (COMPARE, TYPE_MODE (type)))
          {
	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		     prob);
            break;
          }
        goto normal;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Beispiel #28
0
static int
find_address (rtx *address_of_x)
{
  rtx x = *address_of_x;
  enum rtx_code code = GET_CODE (x);
  const char *const fmt = GET_RTX_FORMAT (code);
  int i;
  int value = 0;
  int tem;

  if (code == MEM && rtx_equal_p (XEXP (x, 0), inc_insn.reg_res))
    {
      /* Match with *reg0.  */
      mem_insn.mem_loc = address_of_x;
      mem_insn.reg0 = inc_insn.reg_res;
      mem_insn.reg1_is_const = true;
      mem_insn.reg1_val = 0;
      mem_insn.reg1 = GEN_INT (0);
      return -1;
    }
  if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
      && rtx_equal_p (XEXP (XEXP (x, 0), 0), inc_insn.reg_res))
    {
      rtx b = XEXP (XEXP (x, 0), 1);
      mem_insn.mem_loc = address_of_x;
      mem_insn.reg0 = inc_insn.reg_res;
      mem_insn.reg1 = b;
      mem_insn.reg1_is_const = inc_insn.reg1_is_const;
      if (CONST_INT_P (b))
	{
	  /* Match with *(reg0 + reg1) where reg1 is a const. */
	  HOST_WIDE_INT val = INTVAL (b);
	  if (inc_insn.reg1_is_const
	      && (inc_insn.reg1_val == val || inc_insn.reg1_val == -val))
	    {
	      mem_insn.reg1_val = val;
	      return -1;
	    }
	}
      else if (!inc_insn.reg1_is_const
	       && rtx_equal_p (inc_insn.reg1, b))
	/* Match with *(reg0 + reg1). */
	return -1;
    }

  if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
    {
      /* If REG occurs inside a MEM used in a bit-field reference,
	 that is unacceptable.  */
      if (find_address (&XEXP (x, 0)))
	return 1;
    }

  if (x == inc_insn.reg_res)
    return 1;

  /* Time for some deep diving.  */
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
	{
	  tem = find_address (&XEXP (x, i));
	  /* If this is the first use, let it go so the rest of the
	     insn can be checked.  */
	  if (value == 0)
	    value = tem;
	  else if (tem != 0)
	    /* More than one match was found.  */
	    return 1;
	}
      else if (fmt[i] == 'E')
	{
	  int j;
	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	    {
	      tem = find_address (&XVECEXP (x, i, j));
	      /* If this is the first use, let it go so the rest of
		 the insn can be checked.  */
	      if (value == 0)
		value = tem;
	      else if (tem != 0)
		/* More than one match was found.  */
		return 1;
	    }
	}
    }
  return value;
}
Beispiel #29
0
#include "output.h"
#include "df.h"

/* Determine if the stack pointer is constant over the life of the function.
   Only useful before prologues have been emitted.  */

static void
notice_stack_pointer_modification_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
				     void *data ATTRIBUTE_UNUSED)
{
  if (x == stack_pointer_rtx
      /* The stack pointer is only modified indirectly as the result
	 of a push until later.  See the comments in rtl.texi
	 regarding Embedded Side-Effects on Addresses.  */
      || (MEM_P (x)
	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
    crtl->sp_is_unchanging = 0;
}

  /* Some targets can emit simpler epilogues if they know that sp was
     not ever modified during the function.  After reload, of course,
     we've already emitted the epilogue so there's no sense searching.  */

namespace {

const pass_data pass_data_stack_ptr_mod =
{
  RTL_PASS, /* type */
  "*stack_ptr_mod", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
Beispiel #30
0
static bool
replace_oldest_value_addr (rtx *loc, enum reg_class cl,
			   enum machine_mode mode, addr_space_t as,
			   rtx insn, struct value_data *vd)
{
  rtx x = *loc;
  RTX_CODE code = GET_CODE (x);
  const char *fmt;
  int i, j;
  bool changed = false;

  switch (code)
    {
    case PLUS:
      if (DEBUG_INSN_P (insn))
	break;

      {
	rtx orig_op0 = XEXP (x, 0);
	rtx orig_op1 = XEXP (x, 1);
	RTX_CODE code0 = GET_CODE (orig_op0);
	RTX_CODE code1 = GET_CODE (orig_op1);
	rtx op0 = orig_op0;
	rtx op1 = orig_op1;
	rtx *locI = NULL;
	rtx *locB = NULL;
	enum rtx_code index_code = SCRATCH;

	if (GET_CODE (op0) == SUBREG)
	  {
	    op0 = SUBREG_REG (op0);
	    code0 = GET_CODE (op0);
	  }

	if (GET_CODE (op1) == SUBREG)
	  {
	    op1 = SUBREG_REG (op1);
	    code1 = GET_CODE (op1);
	  }

	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
	    || code0 == ZERO_EXTEND || code1 == MEM)
	  {
	    locI = &XEXP (x, 0);
	    locB = &XEXP (x, 1);
	    index_code = GET_CODE (*locI);
	  }
	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
		 || code1 == ZERO_EXTEND || code0 == MEM)
	  {
	    locI = &XEXP (x, 1);
	    locB = &XEXP (x, 0);
	    index_code = GET_CODE (*locI);
	  }
	else if (code0 == CONST_INT || code0 == CONST
		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
	  {
	    locB = &XEXP (x, 1);
	    index_code = GET_CODE (XEXP (x, 0));
	  }
	else if (code1 == CONST_INT || code1 == CONST
		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
	  {
	    locB = &XEXP (x, 0);
	    index_code = GET_CODE (XEXP (x, 1));
	  }
	else if (code0 == REG && code1 == REG)
	  {
	    int index_op;
	    unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);

	    if (REGNO_OK_FOR_INDEX_P (regno1)
		&& regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
	      index_op = 1;
	    else if (REGNO_OK_FOR_INDEX_P (regno0)
		     && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
	      index_op = 0;
	    else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
		     || REGNO_OK_FOR_INDEX_P (regno1))
	      index_op = 1;
	    else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
	      index_op = 0;
	    else
	      index_op = 1;

	    locI = &XEXP (x, index_op);
	    locB = &XEXP (x, !index_op);
	    index_code = GET_CODE (*locI);
	  }
	else if (code0 == REG)
	  {
	    locI = &XEXP (x, 0);
	    locB = &XEXP (x, 1);
	    index_code = GET_CODE (*locI);
	  }
	else if (code1 == REG)
	  {
	    locI = &XEXP (x, 1);
	    locB = &XEXP (x, 0);
	    index_code = GET_CODE (*locI);
	  }

	if (locI)
	  changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
						mode, as, insn, vd);
	if (locB)
	  changed |= replace_oldest_value_addr (locB,
						base_reg_class (mode, as, PLUS,
								index_code),
						mode, as, insn, vd);
	return changed;
      }

    case POST_INC:
    case POST_DEC:
    case POST_MODIFY:
    case PRE_INC:
    case PRE_DEC:
    case PRE_MODIFY:
      return false;

    case MEM:
      return replace_oldest_value_mem (x, insn, vd);

    case REG:
      return replace_oldest_value_reg (loc, cl, insn, vd);

    default:
      break;
    }

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
	changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
					      insn, vd);
      else if (fmt[i] == 'E')
	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	  changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
						mode, as, insn, vd);
    }

  return changed;
}