int
ix86_comparison_int_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
{
  switch (GET_CODE (op))
    {
    case NE:
    case EQ:
    case GE:
    case GT:
    case LE:
    case LT:
      break;
    default:
      return false;
    }
  return 
(mode == VOIDmode || GET_MODE (op) == mode);
}
Exemple #2
0
/* Function to generate PC relative jump table.
   Refer to nds32.md for more details.

   The following is the sample for the case that diff value
   can be presented in '.short' size.

     addi    $r1, $r1, -(case_lower_bound)
     slti    $ta, $r1, (case_number)
     beqz    $ta, .L_skip_label

     la      $ta, .L35             ! get jump table address
     lh      $r1, [$ta + $r1 << 1] ! load symbol diff from jump table entry
     addi    $ta, $r1, $ta
     jr5     $ta

     ! jump table entry
   L35:
     .short  .L25-.L35
     .short  .L26-.L35
     .short  .L27-.L35
     .short  .L28-.L35
     .short  .L29-.L35
     .short  .L30-.L35
     .short  .L31-.L35
     .short  .L32-.L35
     .short  .L33-.L35
     .short  .L34-.L35 */
const char *
nds32_output_casesi_pc_relative (rtx *operands)
{
  enum machine_mode mode;
  rtx diff_vec;

  diff_vec = PATTERN (NEXT_INSN (as_a <rtx_insn *> (operands[1])));

  gcc_assert (GET_CODE (diff_vec) == ADDR_DIFF_VEC);

  /* Step C: "t <-- operands[1]".  */
  output_asm_insn ("la\t$ta, %l1", operands);

  /* Get the mode of each element in the difference vector.  */
  mode = GET_MODE (diff_vec);

  /* Step D: "z <-- (mem (plus (operands[0] << m) t))",
     where m is 0, 1, or 2 to load address-diff value from table.  */
  switch (mode)
    {
    case QImode:
      output_asm_insn ("lb\t%2, [$ta + %0 << 0]", operands);
      break;
    case HImode:
      output_asm_insn ("lh\t%2, [$ta + %0 << 1]", operands);
      break;
    case SImode:
      output_asm_insn ("lw\t%2, [$ta + %0 << 2]", operands);
      break;
    default:
      gcc_unreachable ();
    }

  /* Step E: "t <-- z + t".
     Add table label_ref with address-diff value to
     obtain target case address.  */
  output_asm_insn ("add\t$ta, %2, $ta", operands);

  /* Step F: jump to target with register t.  */
  if (TARGET_16_BIT)
    return "jr5\t$ta";
  else
    return "jr\t$ta";
}
Exemple #3
0
int
default_unspec_may_trap_p (const_rtx x, unsigned flags)
{
    int i;

    if (GET_CODE (x) == UNSPEC_VOLATILE
            /* Any floating arithmetic may trap.  */
            || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
                && flag_trapping_math))
        return 1;

    for (i = 0; i < XVECLEN (x, 0); ++i)
    {
        if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
            return 1;
    }

    return 0;
}
static rtx
find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
{
  unsigned int regno = REGNO (reg);
  enum machine_mode mode = GET_MODE (reg);
  unsigned int i;

  /* If we are accessing REG in some mode other that what we set it in,
     make sure that the replacement is valid.  In particular, consider
	(set (reg:DI r11) (...))
	(set (reg:SI r9) (reg:SI r11))
	(set (reg:SI r10) (...))
	(set (...) (reg:DI r9))
     Replacing r9 with r11 is invalid.  */
  if (mode != vd->e[regno].mode)
    {
      if (hard_regno_nregs[regno][mode]
	  > hard_regno_nregs[regno][vd->e[regno].mode])
	return NULL_RTX;
    }

  for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
    {
      enum machine_mode oldmode = vd->e[i].mode;
      rtx new_rtx;

      if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
	continue;

      new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
      if (new_rtx)
	{
	  ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
	  REG_ATTRS (new_rtx) = REG_ATTRS (reg);
	  REG_POINTER (new_rtx) = REG_POINTER (reg);
	  return new_rtx;
	}
    }

  return NULL_RTX;
}
int
commutative_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
{
  switch (GET_CODE (op))
    {
    case PLUS:
    case MULT:
    case AND:
    case IOR:
    case XOR:
    case SMIN:
    case SMAX:
    case UMIN:
    case UMAX:
      break;
    default:
      return false;
    }
  return 
(mode == VOIDmode || GET_MODE (op) == mode);
}
Exemple #6
0
static bool
transform_ifelse (ext_cand *cand, rtx def_insn)
{
  rtx set_insn = PATTERN (def_insn);
  rtx srcreg, dstreg, srcreg2;
  rtx map_srcreg, map_dstreg, map_srcreg2;
  rtx ifexpr;
  rtx cond;
  rtx new_set;

  gcc_assert (GET_CODE (set_insn) == SET);

  cond = XEXP (SET_SRC (set_insn), 0);
  dstreg = SET_DEST (set_insn);
  srcreg = XEXP (SET_SRC (set_insn), 1);
  srcreg2 = XEXP (SET_SRC (set_insn), 2);
  /* If the conditional move already has the right or wider mode,
     there is nothing to do.  */
  if (GET_MODE_SIZE (GET_MODE (dstreg)) >= GET_MODE_SIZE (cand->mode))
    return true;

  map_srcreg = gen_rtx_REG (cand->mode, REGNO (srcreg));
  map_srcreg2 = gen_rtx_REG (cand->mode, REGNO (srcreg2));
  map_dstreg = gen_rtx_REG (cand->mode, REGNO (dstreg));
  ifexpr = gen_rtx_IF_THEN_ELSE (cand->mode, cond, map_srcreg, map_srcreg2);
  new_set = gen_rtx_SET (VOIDmode, map_dstreg, ifexpr);

  if (validate_change (def_insn, &PATTERN (def_insn), new_set, true))
    {
      if (dump_file)
        {
          fprintf (dump_file,
		   "Mode of conditional move instruction extended:\n");
          print_rtl_single (dump_file, def_insn);
        }
      return true;
    }

  return false;
}
Exemple #7
0
static rtx
entry_register (struct web_entry *entry, struct ref *ref, char *used)
{
  struct web_entry *root;
  rtx reg, newreg;

  /* Find the corresponding web and see if it has been visited.  */
  root = unionfind_root (entry);
  if (root->reg)
    return root->reg;

  /* We are seeing this web for the first time, do the assignment.  */
  reg = DF_REF_REAL_REG (ref);

  /* In case the original register is already assigned, generate new one.  */
  if (!used[REGNO (reg)])
    newreg = reg, used[REGNO (reg)] = 1;
  else if (REG_USERVAR_P (reg) && 0/*&& !flag_messy_debugging*/)
    {
      newreg = reg;
      if (dump_file)
	fprintf (dump_file,
		 "New web forced to keep reg=%i (user variable)\n",
		 REGNO (reg));
    }
  else
    {
      newreg = gen_reg_rtx (GET_MODE (reg));
      REG_USERVAR_P (newreg) = REG_USERVAR_P (reg);
      REG_POINTER (newreg) = REG_POINTER (reg);
      REG_LOOP_TEST_P (newreg) = REG_LOOP_TEST_P (reg);
      REG_ATTRS (newreg) = REG_ATTRS (reg);
      if (dump_file)
	fprintf (dump_file, "Web oldreg=%i newreg=%i\n", REGNO (reg),
		 REGNO (newreg));
    }

  root->reg = newreg;
  return newreg;
}
static inline int
ix86_comparison_operator_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
#line 1006 "../.././gcc/config/i386/predicates.md"
{
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
  enum rtx_code code = GET_CODE (op);

  if (inmode == CCFPmode || inmode == CCFPUmode)
    {
      enum rtx_code second_code, bypass_code;
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
      return (bypass_code == UNKNOWN && second_code == UNKNOWN);
    }
  switch (code)
    {
    case EQ: case NE:
      return 1;
    case LT: case GE:
      if (inmode == CCmode || inmode == CCGCmode
	  || inmode == CCGOCmode || inmode == CCNOmode)
	return 1;
      return 0;
    case LTU: case GTU: case LEU: case GEU:
      if (inmode == CCmode || inmode == CCCmode)
	return 1;
      return 0;
    case ORDERED: case UNORDERED:
      if (inmode == CCmode)
	return 1;
      return 0;
    case GT: case LE:
      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
	return 1;
      return 0;
    default:
      return 0;
    }
}
Exemple #9
0
static bool
insn_prefetch_values_to_profile (rtx insn, histogram_values *values)
{
  rtx mem, address;
  int write;
  histogram_value hist;

  /* It only makes sense to look for memory references in ordinary insns.  */
  if (GET_CODE (insn) != INSN)
    return false;

  if (!find_mem_reference (insn, &mem, &write))
    return false;

  address = XEXP (mem, 0);
  if (side_effects_p (address))
    return false;

  /* APPLE LOCAL begin should be in FSF, and has been submitted.  */
  if (GET_CODE (PATTERN (insn)) == CLOBBER)
    return false;
  /* APPLE LOCAL end should be in FSF, and has been submitted.  */
      
  if (CONSTANT_P (address))
    return false;

  hist = ggc_alloc (sizeof (*hist));
  hist->value = address;
  hist->mode = GET_MODE (address);
  hist->seq = NULL_RTX;
  hist->insn = insn;
  hist->type = HIST_TYPE_CONST_DELTA;
  VEC_safe_push (histogram_value, *values, hist);

  return true;
}
Exemple #10
0
/* In SET, assign the bit for the register number of REG the value VALUE.
   If REG is a hard register, do so for all its constituent registers.
   Return the number of registers that have become included (as a positive
   number) or excluded (as a negative number).  */
static int
assign_reg_reg_set (regset set, rtx reg, int value)
{
  unsigned regno = REGNO (reg);
  int nregs, i, old;

  if (regno >= FIRST_PSEUDO_REGISTER)
    {
      gcc_assert (!reload_completed);
      nregs = 1;
    }
  else
    nregs = hard_regno_nregs[regno][GET_MODE (reg)];
  for (old = 0, i = nregs; --i >= 0; regno++)
    {
      if ((value != 0) == REGNO_REG_SET_P (set, regno))
	continue;
      if (value)
	old++, SET_REGNO_REG_SET (set, regno);
      else
	old--, CLEAR_REGNO_REG_SET (set, regno);
    }
  return old;
}
Exemple #11
0
rtx
gen_lowpart_general (machine_mode mode, rtx x)
{
  rtx result = gen_lowpart_common (mode, x);

  if (result)
    return result;
  /* Handle SUBREGs and hard REGs that were rejected by
     simplify_gen_subreg.  */
  else if (REG_P (x) || GET_CODE (x) == SUBREG)
    {
      result = gen_lowpart_common (mode, copy_to_reg (x));
      gcc_assert (result != 0);
      return result;
    }
  else
    {
      int offset = 0;

      /* The only additional case we can do is MEM.  */
      gcc_assert (MEM_P (x));

      /* The following exposes the use of "x" to CSE.  */
      if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
	  && SCALAR_INT_MODE_P (GET_MODE (x))
	  && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
	  && !reload_completed)
	return gen_lowpart_general (mode, force_reg (GET_MODE (x), x));

      if (WORDS_BIG_ENDIAN)
	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));

      if (BYTES_BIG_ENDIAN)
	/* Adjust the address so that the address-after-the-data
	   is unchanged.  */
	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));

      return adjust_address (x, mode, offset);
    }
}
rtx
gen_lowpart_general (enum machine_mode mode, rtx x)
{
  rtx result = gen_lowpart_common (mode, x);

  if (result)
    return result;
  else if (REG_P (x))
    {
      /* Must be a hard reg that's not valid in MODE.  */
      result = gen_lowpart_common (mode, copy_to_reg (x));
      gcc_assert (result != 0);
      return result;
    }
  else
    {
      int offset = 0;

      /* The only additional case we can do is MEM.  */
      gcc_assert (MEM_P (x));

      /* The following exposes the use of "x" to CSE.  */
      if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
	  && SCALAR_INT_MODE_P (GET_MODE (x))
	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
				    GET_MODE_BITSIZE (GET_MODE (x)))
	  && ! no_new_pseudos)
	return gen_lowpart_general (mode, force_reg (GET_MODE (x), x));

      if (WORDS_BIG_ENDIAN)
	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));

      if (BYTES_BIG_ENDIAN)
	/* Adjust the address so that the address-after-the-data
	   is unchanged.  */
	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));

      return adjust_address (x, mode, offset);
    }
}
Exemple #13
0
static inline rtx
emit_partition_copy (rtx dest, rtx src, int unsignedsrcp, tree sizeexp)
{
  rtx seq;

  start_sequence ();

  if (GET_MODE (src) != VOIDmode && GET_MODE (src) != GET_MODE (dest))
    src = convert_to_mode (GET_MODE (dest), src, unsignedsrcp);
  if (GET_MODE (src) == BLKmode)
    {
      gcc_assert (GET_MODE (dest) == BLKmode);
      emit_block_move (dest, src, expr_size (sizeexp), BLOCK_OP_NORMAL);
    }
  else
    emit_move_insn (dest, src);

  seq = get_insns ();
  end_sequence ();

  return seq;
}
static bool
copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
{
  bool anything_changed = false;
  rtx insn;

  for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
    {
      int n_ops, i, alt, predicated;
      bool is_asm, any_replacements;
      rtx set;
      bool replaced[MAX_RECOG_OPERANDS];
      bool changed = false;
      struct kill_set_value_data ksvd;

      if (!NONDEBUG_INSN_P (insn))
	{
	  if (DEBUG_INSN_P (insn))
	    {
	      rtx loc = INSN_VAR_LOCATION_LOC (insn);
	      if (!VAR_LOC_UNKNOWN_P (loc))
		replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
					   ALL_REGS, GET_MODE (loc),
					   ADDR_SPACE_GENERIC, insn, vd);
	    }

	  if (insn == BB_END (bb))
	    break;
	  else
	    continue;
	}

      set = single_set (insn);
      extract_insn (insn);
      if (! constrain_operands (1))
	fatal_insn_not_found (insn);
      preprocess_constraints ();
      alt = which_alternative;
      n_ops = recog_data.n_operands;
      is_asm = asm_noperands (PATTERN (insn)) >= 0;

      /* Simplify the code below by rewriting things to reflect
	 matching constraints.  Also promote OP_OUT to OP_INOUT
	 in predicated instructions.  */

      predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
      for (i = 0; i < n_ops; ++i)
	{
	  int matches = recog_op_alt[i][alt].matches;
	  if (matches >= 0)
	    recog_op_alt[i][alt].cl = recog_op_alt[matches][alt].cl;
	  if (matches >= 0 || recog_op_alt[i][alt].matched >= 0
	      || (predicated && recog_data.operand_type[i] == OP_OUT))
	    recog_data.operand_type[i] = OP_INOUT;
	}

      /* Apply changes to earlier DEBUG_INSNs if possible.  */
      if (vd->n_debug_insn_changes)
	note_uses (&PATTERN (insn), cprop_find_used_regs, vd);

      /* For each earlyclobber operand, zap the value data.  */
      for (i = 0; i < n_ops; i++)
	if (recog_op_alt[i][alt].earlyclobber)
	  kill_value (recog_data.operand[i], vd);

      /* Within asms, a clobber cannot overlap inputs or outputs.
	 I wouldn't think this were true for regular insns, but
	 scan_rtx treats them like that...  */
      note_stores (PATTERN (insn), kill_clobbered_value, vd);

      /* Kill all auto-incremented values.  */
      /* ??? REG_INC is useless, since stack pushes aren't done that way.  */
      for_each_rtx (&PATTERN (insn), kill_autoinc_value, vd);

      /* Kill all early-clobbered operands.  */
      for (i = 0; i < n_ops; i++)
	if (recog_op_alt[i][alt].earlyclobber)
	  kill_value (recog_data.operand[i], vd);

      /* Special-case plain move instructions, since we may well
	 be able to do the move from a different register class.  */
      if (set && REG_P (SET_SRC (set)))
	{
	  rtx src = SET_SRC (set);
	  unsigned int regno = REGNO (src);
	  enum machine_mode mode = GET_MODE (src);
	  unsigned int i;
	  rtx new_rtx;

	  /* If we are accessing SRC in some mode other that what we
	     set it in, make sure that the replacement is valid.  */
	  if (mode != vd->e[regno].mode)
	    {
	      if (hard_regno_nregs[regno][mode]
		  > hard_regno_nregs[regno][vd->e[regno].mode])
		goto no_move_special_case;

	      /* And likewise, if we are narrowing on big endian the transformation
		 is also invalid.  */
	      if (hard_regno_nregs[regno][mode]
		  < hard_regno_nregs[regno][vd->e[regno].mode]
		  && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
		      ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
		goto no_move_special_case;
	    }

	  /* If the destination is also a register, try to find a source
	     register in the same class.  */
	  if (REG_P (SET_DEST (set)))
	    {
	      new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
	      if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
		{
		  if (dump_file)
		    fprintf (dump_file,
			     "insn %u: replaced reg %u with %u\n",
			     INSN_UID (insn), regno, REGNO (new_rtx));
		  changed = true;
		  goto did_replacement;
		}
	      /* We need to re-extract as validate_change clobbers
		 recog_data.  */
	      extract_insn (insn);
	      if (! constrain_operands (1))
		fatal_insn_not_found (insn);
	      preprocess_constraints ();
	    }

	  /* Otherwise, try all valid registers and see if its valid.  */
	  for (i = vd->e[regno].oldest_regno; i != regno;
	       i = vd->e[i].next_regno)
	    {
	      new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
				       mode, i, regno);
	      if (new_rtx != NULL_RTX)
		{
		  if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
		    {
		      ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
		      REG_ATTRS (new_rtx) = REG_ATTRS (src);
		      REG_POINTER (new_rtx) = REG_POINTER (src);
		      if (dump_file)
			fprintf (dump_file,
				 "insn %u: replaced reg %u with %u\n",
				 INSN_UID (insn), regno, REGNO (new_rtx));
		      changed = true;
		      goto did_replacement;
		    }
		  /* We need to re-extract as validate_change clobbers
		     recog_data.  */
		  extract_insn (insn);
		  if (! constrain_operands (1))
		    fatal_insn_not_found (insn);
		  preprocess_constraints ();
		}
	    }
	}
      no_move_special_case:

      any_replacements = false;

      /* For each input operand, replace a hard register with the
	 eldest live copy that's in an appropriate register class.  */
      for (i = 0; i < n_ops; i++)
	{
	  replaced[i] = false;

	  /* Don't scan match_operand here, since we've no reg class
	     information to pass down.  Any operands that we could
	     substitute in will be represented elsewhere.  */
	  if (recog_data.constraints[i][0] == '\0')
	    continue;

	  /* Don't replace in asms intentionally referencing hard regs.  */
	  if (is_asm && REG_P (recog_data.operand[i])
	      && (REGNO (recog_data.operand[i])
		  == ORIGINAL_REGNO (recog_data.operand[i])))
	    continue;

	  if (recog_data.operand_type[i] == OP_IN)
	    {
	      if (recog_op_alt[i][alt].is_address)
		replaced[i]
		  = replace_oldest_value_addr (recog_data.operand_loc[i],
					       recog_op_alt[i][alt].cl,
					       VOIDmode, ADDR_SPACE_GENERIC,
					       insn, vd);
	      else if (REG_P (recog_data.operand[i]))
		replaced[i]
		  = replace_oldest_value_reg (recog_data.operand_loc[i],
					      recog_op_alt[i][alt].cl,
					      insn, vd);
	      else if (MEM_P (recog_data.operand[i]))
		replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
							insn, vd);
	    }
	  else if (MEM_P (recog_data.operand[i]))
	    replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
						    insn, vd);

	  /* If we performed any replacement, update match_dups.  */
	  if (replaced[i])
	    {
	      int j;
	      rtx new_rtx;

	      new_rtx = *recog_data.operand_loc[i];
	      recog_data.operand[i] = new_rtx;
	      for (j = 0; j < recog_data.n_dups; j++)
		if (recog_data.dup_num[j] == i)
		  validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);

	      any_replacements = true;
	    }
	}

      if (any_replacements)
	{
	  if (! apply_change_group ())
	    {
	      for (i = 0; i < n_ops; i++)
		if (replaced[i])
		  {
		    rtx old = *recog_data.operand_loc[i];
		    recog_data.operand[i] = old;
		  }

	      if (dump_file)
		fprintf (dump_file,
			 "insn %u: reg replacements not verified\n",
			 INSN_UID (insn));
	    }
	  else
	    changed = true;
	}

    did_replacement:
      if (changed)
	{
	  anything_changed = true;

	  /* If something changed, perhaps further changes to earlier
	     DEBUG_INSNs can be applied.  */
	  if (vd->n_debug_insn_changes)
	    note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
	}

      ksvd.vd = vd;
      ksvd.ignore_set_reg = NULL_RTX;

      /* Clobber call-clobbered registers.  */
      if (CALL_P (insn))
	{
	  unsigned int set_regno = INVALID_REGNUM;
	  unsigned int set_nregs = 0;
	  unsigned int regno;
	  rtx exp;
	  hard_reg_set_iterator hrsi;

	  for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
	    {
	      rtx x = XEXP (exp, 0);
	      if (GET_CODE (x) == SET)
		{
		  rtx dest = SET_DEST (x);
		  kill_value (dest, vd);
		  set_value_regno (REGNO (dest), GET_MODE (dest), vd);
		  copy_value (dest, SET_SRC (x), vd);
		  ksvd.ignore_set_reg = dest;
		  set_regno = REGNO (dest);
		  set_nregs
		    = hard_regno_nregs[set_regno][GET_MODE (dest)];
		  break;
		}
	    }

	  EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
	    if (regno < set_regno || regno >= set_regno + set_nregs)
	      kill_value_regno (regno, 1, vd);

	  /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
	     of the SET isn't in regs_invalidated_by_call hard reg set,
	     but instead among CLOBBERs on the CALL_INSN, we could wrongly
	     assume the value in it is still live.  */
	  if (ksvd.ignore_set_reg)
	    note_stores (PATTERN (insn), kill_clobbered_value, vd);
	}

      /* Notice stores.  */
      note_stores (PATTERN (insn), kill_set_value, &ksvd);

      /* Notice copies.  */
      if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
	copy_value (SET_DEST (set), SET_SRC (set), vd);

      if (insn == BB_END (bb))
	break;
    }

  return anything_changed;
}
Exemple #15
0
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
			  tree exp, builtin_simd_arg *args)
{
  rtx pat;
  tree arg[SIMD_MAX_BUILTIN_ARGS];
  rtx op[SIMD_MAX_BUILTIN_ARGS];
  machine_mode tmode = insn_data[icode].operand[0].mode;
  machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
  int argc = 0;

  if (have_retval
      && (!target
	  || GET_MODE (target) != tmode
	  || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
    target = gen_reg_rtx (tmode);

  for (;;)
    {
      builtin_simd_arg thisarg = args[argc];

      if (thisarg == SIMD_ARG_STOP)
	break;
      else
	{
	  arg[argc] = CALL_EXPR_ARG (exp, argc);
	  op[argc] = expand_normal (arg[argc]);
	  mode[argc] = insn_data[icode].operand[argc + have_retval].mode;

	  switch (thisarg)
	    {
	    case SIMD_ARG_COPY_TO_REG:
	      if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
		op[argc] = convert_memory_address (Pmode, op[argc]);
	      /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
		op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
	      break;

	    case SIMD_ARG_LANE_INDEX:
	      /* Must be a previous operand into which this is an index.  */
	      gcc_assert (argc > 0);
	      if (CONST_INT_P (op[argc]))
		{
		  enum machine_mode vmode = mode[argc - 1];
		  aarch64_simd_lane_bounds (op[argc],
					    0, GET_MODE_NUNITS (vmode));
		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
		  op[argc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[argc])));
		}
	      /* Fall through - if the lane index isn't a constant then
		 the next case will error.  */
	    case SIMD_ARG_CONSTANT:
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
	      {
		error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
		       "expected %<const int%>", argc + 1);
		return const0_rtx;
	      }
	      break;

	    case SIMD_ARG_STOP:
	      gcc_unreachable ();
	    }

	  argc++;
	}
    }

  if (have_retval)
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (target, op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (target, op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }
  else
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }

  if (!pat)
    return NULL_RTX;

  emit_insn (pat);

  return target;
}
static void
copy_value (rtx dest, rtx src, struct value_data *vd)
{
  unsigned int dr = REGNO (dest);
  unsigned int sr = REGNO (src);
  unsigned int dn, sn;
  unsigned int i;

  /* ??? At present, it's possible to see noop sets.  It'd be nice if
     this were cleaned up beforehand...  */
  if (sr == dr)
    return;

  /* Do not propagate copies to the stack pointer, as that can leave
     memory accesses with no scheduling dependency on the stack update.  */
  if (dr == STACK_POINTER_REGNUM)
    return;

  /* Likewise with the frame pointer, if we're using one.  */
  if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
    return;

  /* Do not propagate copies to fixed or global registers, patterns
     can be relying to see particular fixed register or users can
     expect the chosen global register in asm.  */
  if (fixed_regs[dr] || global_regs[dr])
    return;

  /* If SRC and DEST overlap, don't record anything.  */
  dn = hard_regno_nregs[dr][GET_MODE (dest)];
  sn = hard_regno_nregs[sr][GET_MODE (dest)];
  if ((dr > sr && dr < sr + sn)
      || (sr > dr && sr < dr + dn))
    return;

  /* If SRC had no assigned mode (i.e. we didn't know it was live)
     assign it now and assume the value came from an input argument
     or somesuch.  */
  if (vd->e[sr].mode == VOIDmode)
    set_value_regno (sr, vd->e[dr].mode, vd);

  /* If we are narrowing the input to a smaller number of hard regs,
     and it is in big endian, we are really extracting a high part.
     Since we generally associate a low part of a value with the value itself,
     we must not do the same for the high part.
     Note we can still get low parts for the same mode combination through
     a two-step copy involving differently sized hard regs.
     Assume hard regs fr* are 32 bits bits each, while r* are 64 bits each:
     (set (reg:DI r0) (reg:DI fr0))
     (set (reg:SI fr2) (reg:SI r0))
     loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
     (set (reg:SI fr2) (reg:SI fr0))
     loads the high part of (reg:DI fr0) into fr2.

     We can't properly represent the latter case in our tables, so don't
     record anything then.  */
  else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
	   && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
	       ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
    return;

  /* If SRC had been assigned a mode narrower than the copy, we can't
     link DEST into the chain, because not all of the pieces of the
     copy came from oldest_regno.  */
  else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
    return;

  /* Link DR at the end of the value chain used by SR.  */

  vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;

  for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
    continue;
  vd->e[i].next_regno = dr;

#ifdef ENABLE_CHECKING
  validate_value_data (vd);
#endif
}
void
crx_print_operand (FILE * file, rtx x, int code)
{
  switch (code)
    {
    case 'p' :
      if (GET_CODE (x) == REG) {
	if (GET_MODE (x) == DImode || GET_MODE (x) == DFmode)
	  {
	    int regno = REGNO (x);
	    if (regno + 1 >= SP_REGNUM) abort ();
	    fprintf (file, "{%s, %s}", reg_names[regno], reg_names[regno + 1]);
	    return;
	  }
	else
	  {
	    if (REGNO (x) >= SP_REGNUM) abort ();
	    fprintf (file, "%s", reg_names[REGNO (x)]);
	    return;
	  }
      }

    case 'd' :
	{
	  const char *crx_cmp_str;
	  switch (GET_CODE (x))
	    { /* MD: compare (reg, reg or imm) but CRX: cmp (reg or imm, reg)
	       * -> swap all non symmetric ops */
	    case EQ  : crx_cmp_str = "eq"; break;
	    case NE  : crx_cmp_str = "ne"; break;
	    case GT  : crx_cmp_str = "lt"; break;
	    case GTU : crx_cmp_str = "lo"; break;
	    case LT  : crx_cmp_str = "gt"; break;
	    case LTU : crx_cmp_str = "hi"; break;
	    case GE  : crx_cmp_str = "le"; break;
	    case GEU : crx_cmp_str = "ls"; break;
	    case LE  : crx_cmp_str = "ge"; break;
	    case LEU : crx_cmp_str = "hs"; break;
	    default : abort ();
	    }
	  fprintf (file, "%s", crx_cmp_str);
	  return;
	}

    case 'H':
      /* Print high part of a double precision value. */
      switch (GET_CODE (x))
	{
	case CONST_DOUBLE:
	  if (GET_MODE (x) == SFmode) abort ();
	  if (GET_MODE (x) == DFmode)
	    {
	      /* High part of a DF const. */
	      REAL_VALUE_TYPE r;
	      long l[2];

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_DOUBLE (r, l);

	      fprintf (file, "$0x%lx", l[1]);
	      return;
	    }

	  /* -- Fallthrough to handle DI consts -- */

	case CONST_INT:
	    {
	      rtx high, low;
	      split_double (x, &low, &high);
	      putc ('$', file);
	      output_addr_const (file, high);
	      return;
	    }

	case REG:
	  if (REGNO (x) + 1 >= FIRST_PSEUDO_REGISTER) abort ();
	  fprintf (file, "%s", reg_names[REGNO (x) + 1]);
	  return;

	case MEM:
	  /* Adjust memory address to high part.  */
	    {
	      rtx adj_mem = x;
	      adj_mem = adjust_address (adj_mem, GET_MODE (adj_mem), 4);

	      output_memory_reference_mode = GET_MODE (adj_mem);
	      output_address (XEXP (adj_mem, 0));
	      return;
	    }

	default:
	  abort ();
	}

    case 'L':
      /* Print low part of a double precision value. */
      switch (GET_CODE (x))
	{
	case CONST_DOUBLE:
	  if (GET_MODE (x) == SFmode) abort ();
	  if (GET_MODE (x) == DFmode)
	    {
	      /* High part of a DF const. */
	      REAL_VALUE_TYPE r;
	      long l[2];

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_DOUBLE (r, l);

	      fprintf (file, "$0x%lx", l[0]);
	      return;
	    }

	  /* -- Fallthrough to handle DI consts -- */

	case CONST_INT:
	    {
	      rtx high, low;
	      split_double (x, &low, &high);
	      putc ('$', file);
	      output_addr_const (file, low);
	      return;
	    }

	case REG:
	  fprintf (file, "%s", reg_names[REGNO (x)]);
	  return;

	case MEM:
	  output_memory_reference_mode = GET_MODE (x);
	  output_address (XEXP (x, 0));
	  return;

	default:
	  abort ();
	}

    case 0 : /* default */
      switch (GET_CODE (x))
	{
	case REG:
	  fprintf (file, "%s", reg_names[REGNO (x)]);
	  return;

	case MEM:
	  output_memory_reference_mode = GET_MODE (x);
	  output_address (XEXP (x, 0));
	  return;

	case CONST_DOUBLE:
	    {
	      REAL_VALUE_TYPE r;
	      long l;

	      /* Always use H and L for double precision - see above */
	      gcc_assert (GET_MODE (x) == SFmode);

	      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
	      REAL_VALUE_TO_TARGET_SINGLE (r, l);

	      fprintf (file, "$0x%lx", l);
	      return;
	    }

	default:
	  putc ('$', file);
	  output_addr_const (file, x);
	  return;
	}

    default:
      output_operand_lossage ("invalid %%xn code");
    }

  abort ();
}
Exemple #18
0
Fichier : rtl.c Projet : keparo/gcc
hashval_t
iterative_hash_rtx (const_rtx x, hashval_t hash)
{
  enum rtx_code code;
  enum machine_mode mode;
  int i, j;
  const char *fmt;

  if (x == NULL_RTX)
    return hash;
  code = GET_CODE (x);
  hash = iterative_hash_object (code, hash);
  mode = GET_MODE (x);
  hash = iterative_hash_object (mode, hash);
  switch (code)
    {
    case REG:
      i = REGNO (x);
      return iterative_hash_object (i, hash);
    case CONST_INT:
      return iterative_hash_object (INTVAL (x), hash);
    case SYMBOL_REF:
      if (XSTR (x, 0))
	return iterative_hash (XSTR (x, 0), strlen (XSTR (x, 0)) + 1,
			       hash);
      return hash;
    case LABEL_REF:
    case DEBUG_EXPR:
    case VALUE:
    case SCRATCH:
    case CONST_DOUBLE:
    case CONST_FIXED:
    case DEBUG_IMPLICIT_PTR:
    case DEBUG_PARAMETER_REF:
      return hash;
    default:
      break;
    }

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    switch (fmt[i])
      {
      case 'w':
	hash = iterative_hash_object (XWINT (x, i), hash);
	break;
      case 'n':
      case 'i':
	hash = iterative_hash_object (XINT (x, i), hash);
	break;
      case 'V':
      case 'E':
	j = XVECLEN (x, i);
	hash = iterative_hash_object (j, hash);
	for (j = 0; j < XVECLEN (x, i); j++)
	  hash = iterative_hash_rtx (XVECEXP (x, i, j), hash);
	break;
      case 'e':
	hash = iterative_hash_rtx (XEXP (x, i), hash);
	break;
      case 'S':
      case 's':
	if (XSTR (x, i))
	  hash = iterative_hash (XSTR (x, 0), strlen (XSTR (x, 0)) + 1,
				 hash);
	break;
      default:
	break;
      }
  return hash;
}
Exemple #19
0
void
find_comparison_dom_walker::before_dom_children (basic_block bb)
{
  struct comparison *last_cmp;
  rtx_insn *insn, *next, *last_clobber;
  bool last_cmp_valid;
  bool need_purge = false;
  bitmap killed;

  killed = BITMAP_ALLOC (NULL);

  /* The last comparison that was made.  Will be reset to NULL
     once the flags are clobbered.  */
  last_cmp = NULL;

  /* True iff the last comparison has not been clobbered, nor
     have its inputs.  Used to eliminate duplicate compares.  */
  last_cmp_valid = false;

  /* The last insn that clobbered the flags, if that insn is of
     a form that may be valid for eliminating a following compare.
     To be reset to NULL once the flags are set otherwise.  */
  last_clobber = NULL;

  /* Propagate the last live comparison throughout the extended basic block. */
  if (single_pred_p (bb))
    {
      last_cmp = (struct comparison *) single_pred (bb)->aux;
      if (last_cmp)
	last_cmp_valid = last_cmp->inputs_valid;
    }

  for (insn = BB_HEAD (bb); insn; insn = next)
    {
      rtx src;

      next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
      if (!NONDEBUG_INSN_P (insn))
	continue;

      /* Compute the set of registers modified by this instruction.  */
      bitmap_clear (killed);
      df_simulate_find_defs (insn, killed);

      src = conforming_compare (insn);
      if (src)
	{
	  rtx eh_note = NULL;

	  if (cfun->can_throw_non_call_exceptions)
	    eh_note = find_reg_note (insn, REG_EH_REGION, NULL);

	  if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
	    {
	      if (eh_note)
		need_purge = true;
	      delete_insn (insn);
	      continue;
	    }

	  last_cmp = XCNEW (struct comparison);
	  last_cmp->insn = insn;
	  last_cmp->prev_clobber = last_clobber;
	  last_cmp->in_a = XEXP (src, 0);
	  last_cmp->in_b = XEXP (src, 1);
	  last_cmp->eh_note = eh_note;
	  last_cmp->orig_mode = GET_MODE (src);
	  all_compares.safe_push (last_cmp);

	  /* It's unusual, but be prepared for comparison patterns that
	     also clobber an input, or perhaps a scratch.  */
	  last_clobber = NULL;
	  last_cmp_valid = true;
	}

      /* Notice if this instruction kills the flags register.  */
      else if (bitmap_bit_p (killed, targetm.flags_regnum))
	{
	  /* See if this insn could be the "clobber" that eliminates
	     a future comparison.   */
	  last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);

	  /* In either case, the previous compare is no longer valid.  */
	  last_cmp = NULL;
	  last_cmp_valid = false;
	}

      /* Notice if this instruction uses the flags register.  */
      else if (last_cmp)
	find_flags_uses_in_insn (last_cmp, insn);

      /* Notice if any of the inputs to the comparison have changed.  */
      if (last_cmp_valid
	  && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
	      || (REG_P (last_cmp->in_b)
		  && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
	last_cmp_valid = false;
    }
Exemple #20
0
rtx
fr30_move_double (rtx * operands)
{
  rtx src  = operands[1];
  rtx dest = operands[0];
  enum rtx_code src_code = GET_CODE (src);
  enum rtx_code dest_code = GET_CODE (dest);
  enum machine_mode mode = GET_MODE (dest);
  rtx val;

  start_sequence ();

  if (dest_code == REG)
    {
      if (src_code == REG)
	{
	  int reverse = (REGNO (dest) == REGNO (src) + 1);
	  
	  /* We normally copy the low-numbered register first.  However, if
	     the first register of operand 0 is the same as the second register
	     of operand 1, we must copy in the opposite order.  */
	  emit_insn (gen_rtx_SET (VOIDmode,
				  operand_subword (dest, reverse, TRUE, mode),
				  operand_subword (src,  reverse, TRUE, mode)));
	  
	  emit_insn (gen_rtx_SET (VOIDmode,
			      operand_subword (dest, !reverse, TRUE, mode),
			      operand_subword (src,  !reverse, TRUE, mode)));
	}
      else if (src_code == MEM)
	{
	  rtx addr = XEXP (src, 0);
	  int dregno = REGNO (dest);
	  rtx dest0;
	  rtx dest1;
	  rtx new_mem;
	  
	  /* If the high-address word is used in the address, we
	     must load it last.  Otherwise, load it first.  */
	  int reverse = (refers_to_regno_p (dregno, dregno + 1, addr, 0) != 0);

	  gcc_assert (GET_CODE (addr) == REG);
	  
	  dest0 = operand_subword (dest, reverse, TRUE, mode);
	  dest1 = operand_subword (dest, !reverse, TRUE, mode);

	  if (reverse)
	    {
	      emit_insn (gen_rtx_SET (VOIDmode, dest1,
				      adjust_address (src, SImode, 0)));
	      emit_insn (gen_rtx_SET (SImode, dest0,
				      gen_rtx_REG (SImode, REGNO (addr))));
	      emit_insn (gen_rtx_SET (SImode, dest0,
				      plus_constant (dest0, UNITS_PER_WORD)));

	      new_mem = gen_rtx_MEM (SImode, dest0);
	      MEM_COPY_ATTRIBUTES (new_mem, src);
	      
	      emit_insn (gen_rtx_SET (VOIDmode, dest0, new_mem));
	    }
	  else
	    {
	      emit_insn (gen_rtx_SET (VOIDmode, dest0,
				      adjust_address (src, SImode, 0)));
	      emit_insn (gen_rtx_SET (SImode, dest1,
				      gen_rtx_REG (SImode, REGNO (addr))));
	      emit_insn (gen_rtx_SET (SImode, dest1,
				      plus_constant (dest1, UNITS_PER_WORD)));

	      new_mem = gen_rtx_MEM (SImode, dest1);
	      MEM_COPY_ATTRIBUTES (new_mem, src);
	      
	      emit_insn (gen_rtx_SET (VOIDmode, dest1, new_mem));
	    }
	}
      else if (src_code == CONST_INT || src_code == CONST_DOUBLE)
	{
	  rtx words[2];
	  split_double (src, &words[0], &words[1]);
	  emit_insn (gen_rtx_SET (VOIDmode,
				  operand_subword (dest, 0, TRUE, mode),
				  words[0]));
      
	  emit_insn (gen_rtx_SET (VOIDmode,
				  operand_subword (dest, 1, TRUE, mode),
				  words[1]));
	}
    }
  else if (src_code == REG && dest_code == MEM)
    {
      rtx addr = XEXP (dest, 0);
      rtx src0;
      rtx src1;

      gcc_assert (GET_CODE (addr) == REG);
      
      src0 = operand_subword (src, 0, TRUE, mode);
      src1 = operand_subword (src, 1, TRUE, mode);
      
      emit_insn (gen_rtx_SET (VOIDmode, adjust_address (dest, SImode, 0),
			      src0));

      if (REGNO (addr) == STACK_POINTER_REGNUM
	  || REGNO (addr) == FRAME_POINTER_REGNUM)
	emit_insn (gen_rtx_SET (VOIDmode,
				adjust_address (dest, SImode, UNITS_PER_WORD),
				src1));
      else
	{
	  rtx new_mem;
	  
	  /* We need a scratch register to hold the value of 'address + 4'.
	     We ought to allow gcc to find one for us, but for now, just
	     push one of the source registers.  */
	  emit_insn (gen_movsi_push (src0));
	  emit_insn (gen_movsi_internal (src0, addr));
	  emit_insn (gen_addsi_small_int (src0, src0, GEN_INT (UNITS_PER_WORD)));
	  
	  new_mem = gen_rtx_MEM (SImode, src0);
	  MEM_COPY_ATTRIBUTES (new_mem, dest);
	  
	  emit_insn (gen_rtx_SET (VOIDmode, new_mem, src1));
	  emit_insn (gen_movsi_pop (src0));
	}
    }
  else
    /* This should have been prevented by the constraints on movdi_insn.  */
    gcc_unreachable ();
  
  val = get_insns ();
  end_sequence ();

  return val;
}
Exemple #21
0
Fichier : rtl.c Projet : keparo/gcc
int
rtx_equal_p (const_rtx x, const_rtx y)
{
  int i;
  int j;
  enum rtx_code code;
  const char *fmt;

  if (x == y)
    return 1;
  if (x == 0 || y == 0)
    return 0;

  code = GET_CODE (x);
  /* Rtx's of different codes cannot be equal.  */
  if (code != GET_CODE (y))
    return 0;

  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
     (REG:SI x) and (REG:HI x) are NOT equivalent.  */

  if (GET_MODE (x) != GET_MODE (y))
    return 0;

  /* MEMs referring to different address space are not equivalent.  */
  if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
    return 0;

  /* Some RTL can be compared nonrecursively.  */
  switch (code)
    {
    case REG:
      return (REGNO (x) == REGNO (y));

    case LABEL_REF:
      return XEXP (x, 0) == XEXP (y, 0);

    case SYMBOL_REF:
      return XSTR (x, 0) == XSTR (y, 0);

    case DEBUG_EXPR:
    case VALUE:
    case SCRATCH:
    case CONST_DOUBLE:
    case CONST_INT:
    case CONST_FIXED:
      return 0;

    case DEBUG_IMPLICIT_PTR:
      return DEBUG_IMPLICIT_PTR_DECL (x)
	     == DEBUG_IMPLICIT_PTR_DECL (y);

    case DEBUG_PARAMETER_REF:
      return DEBUG_PARAMETER_REF_DECL (x)
	     == DEBUG_PARAMETER_REF_DECL (y);

    case ENTRY_VALUE:
      return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));

    default:
      break;
    }

  /* Compare the elements.  If any pair of corresponding elements
     fail to match, return 0 for the whole thing.  */

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      switch (fmt[i])
	{
	case 'w':
	  if (XWINT (x, i) != XWINT (y, i))
	    return 0;
	  break;

	case 'n':
	case 'i':
	  if (XINT (x, i) != XINT (y, i))
	    {
#ifndef GENERATOR_FILE
	      if (((code == ASM_OPERANDS && i == 6)
		   || (code == ASM_INPUT && i == 1))
		  && locator_eq (XINT (x, i), XINT (y, i)))
		break;
#endif
	      return 0;
	    }
	  break;

	case 'V':
	case 'E':
	  /* Two vectors must have the same length.  */
	  if (XVECLEN (x, i) != XVECLEN (y, i))
	    return 0;

	  /* And the corresponding elements must match.  */
	  for (j = 0; j < XVECLEN (x, i); j++)
	    if (rtx_equal_p (XVECEXP (x, i, j),  XVECEXP (y, i, j)) == 0)
	      return 0;
	  break;

	case 'e':
	  if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0)
	    return 0;
	  break;

	case 'S':
	case 's':
	  if ((XSTR (x, i) || XSTR (y, i))
	      && (! XSTR (x, i) || ! XSTR (y, i)
		  || strcmp (XSTR (x, i), XSTR (y, i))))
	    return 0;
	  break;

	case 'u':
	  /* These are just backpointers, so they don't matter.  */
	  break;

	case '0':
	case 't':
	  break;

	  /* It is believed that rtx's at this level will never
	     contain anything but integers and other rtx's,
	     except for within LABEL_REFs and SYMBOL_REFs.  */
	default:
	  gcc_unreachable ();
	}
    }
  return 1;
}
Exemple #22
0
void
fr30_print_operand (FILE *file, rtx x, int code)
{
  rtx x0;
  
  switch (code)
    {
    case '#':
      /* Output a :D if this instruction is delayed.  */
      if (dbr_sequence_length () != 0)
	fputs (":D", file);
      return;
      
    case 'p':
      /* Compute the register name of the second register in a hi/lo
	 register pair.  */
      if (GET_CODE (x) != REG)
	output_operand_lossage ("fr30_print_operand: unrecognized %%p code");
      else
	fprintf (file, "r%d", REGNO (x) + 1);
      return;
      
    case 'b':
      /* Convert GCC's comparison operators into FR30 comparison codes.  */
      switch (GET_CODE (x))
	{
	case EQ:  fprintf (file, "eq"); break;
	case NE:  fprintf (file, "ne"); break;
	case LT:  fprintf (file, "lt"); break;
	case LE:  fprintf (file, "le"); break;
	case GT:  fprintf (file, "gt"); break;
	case GE:  fprintf (file, "ge"); break;
	case LTU: fprintf (file, "c"); break;
	case LEU: fprintf (file, "ls"); break;
	case GTU: fprintf (file, "hi"); break;
	case GEU: fprintf (file, "nc");  break;
	default:
	  output_operand_lossage ("fr30_print_operand: unrecognized %%b code");
	  break;
	}
      return;
      
    case 'B':
      /* Convert GCC's comparison operators into the complimentary FR30
	 comparison codes.  */
      switch (GET_CODE (x))
	{
	case EQ:  fprintf (file, "ne"); break;
	case NE:  fprintf (file, "eq"); break;
	case LT:  fprintf (file, "ge"); break;
	case LE:  fprintf (file, "gt"); break;
	case GT:  fprintf (file, "le"); break;
	case GE:  fprintf (file, "lt"); break;
	case LTU: fprintf (file, "nc"); break;
	case LEU: fprintf (file, "hi"); break;
	case GTU: fprintf (file, "ls"); break;
	case GEU: fprintf (file, "c"); break;
	default:
	  output_operand_lossage ("fr30_print_operand: unrecognized %%B code");
	  break;
	}
      return;

    case 'A':
      /* Print a signed byte value as an unsigned value.  */
      if (GET_CODE (x) != CONST_INT)
	output_operand_lossage ("fr30_print_operand: invalid operand to %%A code");
      else
	{
	  HOST_WIDE_INT val;
	  
	  val = INTVAL (x);

	  val &= 0xff;

	  fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
	}
      return;
      
    case 'x':
      if (GET_CODE (x) != CONST_INT
	  || INTVAL (x) < 16
	  || INTVAL (x) > 32)
	output_operand_lossage ("fr30_print_operand: invalid %%x code");
      else
	fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) - 16);
      return;

    case 'F':
      if (GET_CODE (x) != CONST_DOUBLE)
	output_operand_lossage ("fr30_print_operand: invalid %%F code");
      else
	{
	  char str[30];

	  real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x),
			   sizeof (str), 0, 1);
	  fputs (str, file);
	}
      return;
      
    case 0:
      /* Handled below.  */
      break;
      
    default:
      fprintf (stderr, "unknown code = %x\n", code);
      output_operand_lossage ("fr30_print_operand: unknown code");
      return;
    }

  switch (GET_CODE (x))
    {
    case REG:
      fputs (reg_names [REGNO (x)], file);
      break;

    case MEM:
      x0 = XEXP (x,0);
      
      switch (GET_CODE (x0))
	{
	case REG:
	  gcc_assert ((unsigned) REGNO (x0) < ARRAY_SIZE (reg_names));
	  fprintf (file, "@%s", reg_names [REGNO (x0)]);
	  break;

	case PLUS:
	  if (GET_CODE (XEXP (x0, 0)) != REG
	      || REGNO (XEXP (x0, 0)) < FRAME_POINTER_REGNUM
	      || REGNO (XEXP (x0, 0)) > STACK_POINTER_REGNUM
	      || GET_CODE (XEXP (x0, 1)) != CONST_INT)
	    {
	      fprintf (stderr, "bad INDEXed address:");
	      debug_rtx (x);
	      output_operand_lossage ("fr30_print_operand: unhandled MEM");
	    }
	  else if (REGNO (XEXP (x0, 0)) == FRAME_POINTER_REGNUM)
	    {
	      HOST_WIDE_INT val = INTVAL (XEXP (x0, 1));
	      if (val < -(1 << 9) || val > ((1 << 9) - 4))
		{
		  fprintf (stderr, "frame INDEX out of range:");
		  debug_rtx (x);
		  output_operand_lossage ("fr30_print_operand: unhandled MEM");
		}
	      fprintf (file, "@(r14, #" HOST_WIDE_INT_PRINT_DEC ")", val);
	    }
	  else
	    {
	      HOST_WIDE_INT val = INTVAL (XEXP (x0, 1));
	      if (val < 0 || val > ((1 << 6) - 4))
		{
		  fprintf (stderr, "stack INDEX out of range:");
		  debug_rtx (x);
		  output_operand_lossage ("fr30_print_operand: unhandled MEM");
		}
	      fprintf (file, "@(r15, #" HOST_WIDE_INT_PRINT_DEC ")", val);
	    }
	  break;
	  
	case SYMBOL_REF:
	  output_address (x0);
	  break;
	  
	default:
	  fprintf (stderr, "bad MEM code = %x\n", GET_CODE (x0));
	  debug_rtx (x);
	  output_operand_lossage ("fr30_print_operand: unhandled MEM");
	  break;
	}
      break;
      
    case CONST_DOUBLE :
      /* We handle SFmode constants here as output_addr_const doesn't.  */
      if (GET_MODE (x) == SFmode)
	{
	  REAL_VALUE_TYPE d;
	  long l;

	  REAL_VALUE_FROM_CONST_DOUBLE (d, x);
	  REAL_VALUE_TO_TARGET_SINGLE (d, l);
	  fprintf (file, "0x%08lx", l);
	  break;
	}

      /* Fall through.  Let output_addr_const deal with it.  */
    default:
      output_addr_const (file, x);
      break;
    }

  return;
}
Exemple #23
0
Fichier : ree.c Projet : aixoss/gcc
static bool
combine_set_extension (ext_cand *cand, rtx curr_insn, rtx *orig_set)
{
  rtx orig_src = SET_SRC (*orig_set);
  enum machine_mode orig_mode = GET_MODE (SET_DEST (*orig_set));
  rtx new_reg = gen_rtx_REG (cand->mode, REGNO (SET_DEST (*orig_set)));
  rtx new_set;

  /* Merge constants by directly moving the constant into the register under
     some conditions.  Recall that RTL constants are sign-extended.  */
  if (GET_CODE (orig_src) == CONST_INT
      && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (cand->mode))
    {
      if (INTVAL (orig_src) >= 0 || cand->code == SIGN_EXTEND)
	new_set = gen_rtx_SET (VOIDmode, new_reg, orig_src);
      else
	{
	  /* Zero-extend the negative constant by masking out the bits outside
	     the source mode.  */
	  rtx new_const_int
	    = GEN_INT (INTVAL (orig_src) & GET_MODE_MASK (orig_mode));
	  new_set = gen_rtx_SET (VOIDmode, new_reg, new_const_int);
	}
    }
  else if (GET_MODE (orig_src) == VOIDmode)
    {
      /* This is mostly due to a call insn that should not be optimized.  */
      return false;
    }
  else if (GET_CODE (orig_src) == cand->code)
    {
      /* Here is a sequence of two extensions.  Try to merge them.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, XEXP (orig_src, 0));
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (VOIDmode, new_reg, temp_extension);
    }
  else if (GET_CODE (orig_src) == IF_THEN_ELSE)
    {
      /* Only IF_THEN_ELSE of phi-type copies are combined.  Otherwise,
         in general, IF_THEN_ELSE should not be combined.  */
      return false;
    }
  else
    {
      /* This is the normal case.  */
      rtx temp_extension
	= gen_rtx_fmt_e (cand->code, cand->mode, orig_src);
      rtx simplified_temp_extension = simplify_rtx (temp_extension);
      if (simplified_temp_extension)
        temp_extension = simplified_temp_extension;
      new_set = gen_rtx_SET (VOIDmode, new_reg, temp_extension);
    }

  /* This change is a part of a group of changes.  Hence,
     validate_change will not try to commit the change.  */
  if (validate_change (curr_insn, orig_set, new_set, true)
      && update_reg_equal_equiv_notes (curr_insn, cand->mode, orig_mode,
				       cand->code))
    {
      if (dump_file)
        {
          fprintf (dump_file,
		   "Tentatively merged extension with definition:\n");
          print_rtl_single (dump_file, curr_insn);
        }
      return true;
    }

  return false;
}
Exemple #24
0
static bool
propagate_rtx_1 (rtx *px, rtx old_rtx, rtx new_rtx, int flags)
{
  rtx x = *px, tem = NULL_RTX, op0, op1, op2;
  enum rtx_code code = GET_CODE (x);
  machine_mode mode = GET_MODE (x);
  machine_mode op_mode;
  bool can_appear = (flags & PR_CAN_APPEAR) != 0;
  bool valid_ops = true;

  if (!(flags & PR_HANDLE_MEM) && MEM_P (x) && !MEM_READONLY_P (x))
    {
      /* If unsafe, change MEMs to CLOBBERs or SCRATCHes (to preserve whether
	 they have side effects or not).  */
      *px = (side_effects_p (x)
	     ? gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)
	     : gen_rtx_SCRATCH (GET_MODE (x)));
      return false;
    }

  /* If X is OLD_RTX, return NEW_RTX.  But not if replacing only within an
     address, and we are *not* inside one.  */
  if (x == old_rtx)
    {
      *px = new_rtx;
      return can_appear;
    }

  /* If this is an expression, try recursive substitution.  */
  switch (GET_RTX_CLASS (code))
    {
    case RTX_UNARY:
      op0 = XEXP (x, 0);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0))
	return true;
      tem = simplify_gen_unary (code, mode, op0, op_mode);
      break;

    case RTX_BIN_ARITH:
    case RTX_COMM_ARITH:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_binary (code, mode, op0, op1);
      break;

    case RTX_COMPARE:
    case RTX_COMM_COMPARE:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_relational (code, mode, op_mode, op0, op1);
      break;

    case RTX_TERNARY:
    case RTX_BITFIELD_OPS:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op2 = XEXP (x, 2);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op2, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
	return true;
      if (op_mode == VOIDmode)
	op_mode = GET_MODE (op0);
      tem = simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
      break;

    case RTX_EXTRA:
      /* The only case we try to handle is a SUBREG.  */
      if (code == SUBREG)
	{
          op0 = XEXP (x, 0);
	  valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0))
	    return true;
	  tem = simplify_gen_subreg (mode, op0, GET_MODE (SUBREG_REG (x)),
				     SUBREG_BYTE (x));
	}
      break;

    case RTX_OBJ:
      if (code == MEM && x != new_rtx)
	{
	  rtx new_op0;
	  op0 = XEXP (x, 0);

	  /* There are some addresses that we cannot work on.  */
	  if (!can_simplify_addr (op0))
	    return true;

	  op0 = new_op0 = targetm.delegitimize_address (op0);
	  valid_ops &= propagate_rtx_1 (&new_op0, old_rtx, new_rtx,
					flags | PR_CAN_APPEAR);

	  /* Dismiss transformation that we do not want to carry on.  */
	  if (!valid_ops
	      || new_op0 == op0
	      || !(GET_MODE (new_op0) == GET_MODE (op0)
		   || GET_MODE (new_op0) == VOIDmode))
	    return true;

	  canonicalize_address (new_op0);

	  /* Copy propagations are always ok.  Otherwise check the costs.  */
	  if (!(REG_P (old_rtx) && REG_P (new_rtx))
	      && !should_replace_address (op0, new_op0, GET_MODE (x),
					  MEM_ADDR_SPACE (x),
	      			 	  flags & PR_OPTIMIZE_FOR_SPEED))
	    return true;

	  tem = replace_equiv_address_nv (x, new_op0);
	}

      else if (code == LO_SUM)
	{
          op0 = XEXP (x, 0);
          op1 = XEXP (x, 1);

	  /* The only simplification we do attempts to remove references to op0
	     or make it constant -- in both cases, op0's invalidity will not
	     make the result invalid.  */
	  propagate_rtx_1 (&op0, old_rtx, new_rtx, flags | PR_CAN_APPEAR);
	  valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	    return true;

	  /* (lo_sum (high x) x) -> x  */
	  if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
	    tem = op1;
	  else
	    tem = gen_rtx_LO_SUM (mode, op0, op1);

	  /* OP1 is likely not a legitimate address, otherwise there would have
	     been no LO_SUM.  We want it to disappear if it is invalid, return
	     false in that case.  */
	  return memory_address_p (mode, tem);
	}

      else if (code == REG)
	{
	  if (rtx_equal_p (x, old_rtx))
	    {
              *px = new_rtx;
              return can_appear;
	    }
	}
      break;

    default:
      break;
    }

  /* No change, no trouble.  */
  if (tem == NULL_RTX)
    return true;

  *px = tem;

  /* Allow replacements that simplify operations on a vector or complex
     value to a component.  The most prominent case is
     (subreg ([vec_]concat ...)).   */
  if (REG_P (tem) && !HARD_REGISTER_P (tem)
      && (VECTOR_MODE_P (GET_MODE (new_rtx))
	  || COMPLEX_MODE_P (GET_MODE (new_rtx)))
      && GET_MODE (tem) == GET_MODE_INNER (GET_MODE (new_rtx)))
    return true;

  /* The replacement we made so far is valid, if all of the recursive
     replacements were valid, or we could simplify everything to
     a constant.  */
  return valid_ops || can_appear || CONSTANT_P (tem);
}
Exemple #25
0
static basic_block
create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
{
  edge eg;
  edge_iterator ei;
  basic_block pre_exit;

  /* The only non-call predecessor at this stage is a block with a
     fallthrough edge; there can be at most one, but there could be
     none at all, e.g. when exit is called.  */
  pre_exit = 0;
  FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    if (eg->flags & EDGE_FALLTHRU)
      {
	basic_block src_bb = eg->src;
	rtx_insn *last_insn;
	rtx ret_reg;

	gcc_assert (!pre_exit);
	/* If this function returns a value at the end, we have to
	   insert the final mode switch before the return value copy
	   to its hard register.  */
	if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1
	    && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
	    && GET_CODE (PATTERN (last_insn)) == USE
	    && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
	  {
	    int ret_start = REGNO (ret_reg);
	    int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
	    int ret_end = ret_start + nregs;
	    bool short_block = false;
	    bool multi_reg_return = false;
	    bool forced_late_switch = false;
	    rtx_insn *before_return_copy;

	    do
	      {
		rtx_insn *return_copy = PREV_INSN (last_insn);
		rtx return_copy_pat, copy_reg;
		int copy_start, copy_num;
		int j;

		if (NONDEBUG_INSN_P (return_copy))
		  {
		    /* When using SJLJ exceptions, the call to the
		       unregister function is inserted between the
		       clobber of the return value and the copy.
		       We do not want to split the block before this
		       or any other call; if we have not found the
		       copy yet, the copy must have been deleted.  */
		    if (CALL_P (return_copy))
		      {
			short_block = true;
			break;
		      }
		    return_copy_pat = PATTERN (return_copy);
		    switch (GET_CODE (return_copy_pat))
		      {
		      case USE:
			/* Skip USEs of multiple return registers.
			   __builtin_apply pattern is also handled here.  */
			if (GET_CODE (XEXP (return_copy_pat, 0)) == REG
			    && (targetm.calls.function_value_regno_p
				(REGNO (XEXP (return_copy_pat, 0)))))
			  {
			    multi_reg_return = true;
			    last_insn = return_copy;
			    continue;
			  }
			break;

		      case ASM_OPERANDS:
			/* Skip barrier insns.  */
			if (!MEM_VOLATILE_P (return_copy_pat))
			  break;

			/* Fall through.  */

		      case ASM_INPUT:
		      case UNSPEC_VOLATILE:
			last_insn = return_copy;
			continue;

		      default:
			break;
		      }

		    /* If the return register is not (in its entirety)
		       likely spilled, the return copy might be
		       partially or completely optimized away.  */
		    return_copy_pat = single_set (return_copy);
		    if (!return_copy_pat)
		      {
			return_copy_pat = PATTERN (return_copy);
			if (GET_CODE (return_copy_pat) != CLOBBER)
			  break;
			else if (!optimize)
			  {
			    /* This might be (clobber (reg [<result>]))
			       when not optimizing.  Then check if
			       the previous insn is the clobber for
			       the return register.  */
			    copy_reg = SET_DEST (return_copy_pat);
			    if (GET_CODE (copy_reg) == REG
				&& !HARD_REGISTER_NUM_P (REGNO (copy_reg)))
			      {
				if (INSN_P (PREV_INSN (return_copy)))
				  {
				    return_copy = PREV_INSN (return_copy);
				    return_copy_pat = PATTERN (return_copy);
				    if (GET_CODE (return_copy_pat) != CLOBBER)
				      break;
				  }
			      }
			  }
		      }
		    copy_reg = SET_DEST (return_copy_pat);
		    if (GET_CODE (copy_reg) == REG)
		      copy_start = REGNO (copy_reg);
		    else if (GET_CODE (copy_reg) == SUBREG
			     && GET_CODE (SUBREG_REG (copy_reg)) == REG)
		      copy_start = REGNO (SUBREG_REG (copy_reg));
		    else
		      {
			/* When control reaches end of non-void function,
			   there are no return copy insns at all.  This
			   avoids an ice on that invalid function.  */
			if (ret_start + nregs == ret_end)
			  short_block = true;
			break;
		      }
		    if (!targetm.calls.function_value_regno_p (copy_start))
		      copy_num = 0;
		    else
		      copy_num
			= hard_regno_nregs[copy_start][GET_MODE (copy_reg)];

		    /* If the return register is not likely spilled, - as is
		       the case for floating point on SH4 - then it might
		       be set by an arithmetic operation that needs a
		       different mode than the exit block.  */
		    for (j = n_entities - 1; j >= 0; j--)
		      {
			int e = entity_map[j];
			int mode =
			  targetm.mode_switching.needed (e, return_copy);

			if (mode != num_modes[e]
			    && mode != targetm.mode_switching.exit (e))
			  break;
		      }
		    if (j >= 0)
		      {
			/* __builtin_return emits a sequence of loads to all
			   return registers.  One of them might require
			   another mode than MODE_EXIT, even if it is
			   unrelated to the return value, so we want to put
			   the final mode switch after it.  */
			if (multi_reg_return
			    && targetm.calls.function_value_regno_p
			        (copy_start))
			  forced_late_switch = true;

			/* For the SH4, floating point loads depend on fpscr,
			   thus we might need to put the final mode switch
			   after the return value copy.  That is still OK,
			   because a floating point return value does not
			   conflict with address reloads.  */
			if (copy_start >= ret_start
			    && copy_start + copy_num <= ret_end
			    && OBJECT_P (SET_SRC (return_copy_pat)))
			  forced_late_switch = true;
			break;
		      }
		    if (copy_num == 0)
		      {
			last_insn = return_copy;
			continue;
		      }

		    if (copy_start >= ret_start
			&& copy_start + copy_num <= ret_end)
		      nregs -= copy_num;
		    else if (!multi_reg_return
			     || !targetm.calls.function_value_regno_p
				 (copy_start))
		      break;
		    last_insn = return_copy;
		  }
		/* ??? Exception handling can lead to the return value
		   copy being already separated from the return value use,
		   as in  unwind-dw2.c .
		   Similarly, conditionally returning without a value,
		   and conditionally using builtin_return can lead to an
		   isolated use.  */
		if (return_copy == BB_HEAD (src_bb))
		  {
		    short_block = true;
		    break;
		  }
		last_insn = return_copy;
	      }
	    while (nregs);

	    /* If we didn't see a full return value copy, verify that there
	       is a plausible reason for this.  If some, but not all of the
	       return register is likely spilled, we can expect that there
	       is a copy for the likely spilled part.  */
	    gcc_assert (!nregs
			|| forced_late_switch
			|| short_block
			|| !(targetm.class_likely_spilled_p
			     (REGNO_REG_CLASS (ret_start)))
			|| (nregs
			    != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
			/* For multi-hard-register floating point
		   	   values, sometimes the likely-spilled part
		   	   is ordinarily copied first, then the other
		   	   part is set with an arithmetic operation.
		   	   This doesn't actually cause reload
		   	   failures, so let it pass.  */
			|| (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
			    && nregs != 1));

	    if (!NOTE_INSN_BASIC_BLOCK_P (last_insn))
	      {
		before_return_copy
		  = emit_note_before (NOTE_INSN_DELETED, last_insn);
		/* Instructions preceding LAST_INSN in the same block might
		   require a different mode than MODE_EXIT, so if we might
		   have such instructions, keep them in a separate block
		   from pre_exit.  */
		src_bb = split_block (src_bb,
				      PREV_INSN (before_return_copy))->dest;
	      }
	    else
	      before_return_copy = last_insn;
	    pre_exit = split_block (src_bb, before_return_copy)->src;
	  }
	else
	  {
	    pre_exit = split_edge (eg);
	  }
      }

  return pre_exit;
}
Exemple #26
0
static void
gen_insn (rtx insn, int lineno)
{
  struct pattern_stats stats;
  int i;

  /* See if the pattern for this insn ends with a group of CLOBBERs of (hard)
     registers or MATCH_SCRATCHes.  If so, store away the information for
     later.  */

  if (XVEC (insn, 1))
    {
      int has_hard_reg = 0;

      for (i = XVECLEN (insn, 1) - 1; i > 0; i--)
	{
	  if (GET_CODE (XVECEXP (insn, 1, i)) != CLOBBER)
	    break;

	  if (REG_P (XEXP (XVECEXP (insn, 1, i), 0)))
	    has_hard_reg = 1;
	  else if (GET_CODE (XEXP (XVECEXP (insn, 1, i), 0)) != MATCH_SCRATCH)
	    break;
	}

      if (i != XVECLEN (insn, 1) - 1)
	{
	  struct clobber_pat *p;
	  struct clobber_ent *link = XNEW (struct clobber_ent);
	  int j;

	  link->code_number = insn_code_number;

	  /* See if any previous CLOBBER_LIST entry is the same as this
	     one.  */

	  for (p = clobber_list; p; p = p->next)
	    {
	      if (p->first_clobber != i + 1
		  || XVECLEN (p->pattern, 1) != XVECLEN (insn, 1))
		continue;

	      for (j = i + 1; j < XVECLEN (insn, 1); j++)
		{
		  rtx old_rtx = XEXP (XVECEXP (p->pattern, 1, j), 0);
		  rtx new_rtx = XEXP (XVECEXP (insn, 1, j), 0);

		  /* OLD and NEW_INSN are the same if both are to be a SCRATCH
		     of the same mode,
		     or if both are registers of the same mode and number.  */
		  if (! (GET_MODE (old_rtx) == GET_MODE (new_rtx)
			 && ((GET_CODE (old_rtx) == MATCH_SCRATCH
			      && GET_CODE (new_rtx) == MATCH_SCRATCH)
			     || (REG_P (old_rtx) && REG_P (new_rtx)
				 && REGNO (old_rtx) == REGNO (new_rtx)))))
		    break;
		}

	      if (j == XVECLEN (insn, 1))
		break;
	    }

	  if (p == 0)
	    {
	      p = XNEW (struct clobber_pat);

	      p->insns = 0;
	      p->pattern = insn;
	      p->first_clobber = i + 1;
	      p->next = clobber_list;
	      p->has_hard_reg = has_hard_reg;
	      clobber_list = p;
	    }

	  link->next = p->insns;
	  p->insns = link;
	}
Exemple #27
0
static void
gen_exp (rtx x, enum rtx_code subroutine_type, char *used)
{
  RTX_CODE code;
  int i;
  int len;
  const char *fmt;

  if (x == 0)
    {
      printf ("NULL_RTX");
      return;
    }

  code = GET_CODE (x);

  switch (code)
    {
    case MATCH_OPERAND:
    case MATCH_DUP:
      if (used)
	{
	  if (used[XINT (x, 0)])
	    {
	      printf ("copy_rtx (operand%d)", XINT (x, 0));
	      return;
	    }
	  used[XINT (x, 0)] = 1;
	}
      printf ("operand%d", XINT (x, 0));
      return;

    case MATCH_OP_DUP:
      printf ("gen_rtx_fmt_");
      for (i = 0; i < XVECLEN (x, 1); i++)
	printf ("e");
      printf (" (GET_CODE (operand%d), ", XINT (x, 0));
      if (GET_MODE (x) == VOIDmode)
	printf ("GET_MODE (operand%d)", XINT (x, 0));
      else
	printf ("%smode", GET_MODE_NAME (GET_MODE (x)));
      for (i = 0; i < XVECLEN (x, 1); i++)
	{
	  printf (",\n\t\t");
	  gen_exp (XVECEXP (x, 1, i), subroutine_type, used);
	}
      printf (")");
      return;

    case MATCH_OPERATOR:
      printf ("gen_rtx_fmt_");
      for (i = 0; i < XVECLEN (x, 2); i++)
	printf ("e");
      printf (" (GET_CODE (operand%d)", XINT (x, 0));
      printf (", %smode", GET_MODE_NAME (GET_MODE (x)));
      for (i = 0; i < XVECLEN (x, 2); i++)
	{
	  printf (",\n\t\t");
	  gen_exp (XVECEXP (x, 2, i), subroutine_type, used);
	}
      printf (")");
      return;

    case MATCH_PARALLEL:
    case MATCH_PAR_DUP:
      printf ("operand%d", XINT (x, 0));
      return;

    case MATCH_SCRATCH:
      gen_rtx_scratch (x, subroutine_type);
      return;

    case PC:
      printf ("pc_rtx");
      return;
    case RETURN:
      printf ("ret_rtx");
      return;
    case SIMPLE_RETURN:
      printf ("simple_return_rtx");
      return;
    case CLOBBER:
      if (REG_P (XEXP (x, 0)))
	{
	  printf ("gen_hard_reg_clobber (%smode, %i)", GET_MODE_NAME (GET_MODE (XEXP (x, 0))),
			  			     REGNO (XEXP (x, 0)));
	  return;
	}
      break;

    case CC0:
      printf ("cc0_rtx");
      return;

    case CONST_INT:
      if (INTVAL (x) == 0)
	printf ("const0_rtx");
      else if (INTVAL (x) == 1)
	printf ("const1_rtx");
      else if (INTVAL (x) == -1)
	printf ("constm1_rtx");
      else if (-MAX_SAVED_CONST_INT <= INTVAL (x)
	  && INTVAL (x) <= MAX_SAVED_CONST_INT)
	printf ("const_int_rtx[MAX_SAVED_CONST_INT + (%d)]",
		(int) INTVAL (x));
      else if (INTVAL (x) == STORE_FLAG_VALUE)
	printf ("const_true_rtx");
      else
	{
	  printf ("GEN_INT (");
	  printf (HOST_WIDE_INT_PRINT_DEC_C, INTVAL (x));
	  printf (")");
	}
      return;

    case CONST_DOUBLE:
    case CONST_FIXED:
    case CONST_WIDE_INT:
      /* These shouldn't be written in MD files.  Instead, the appropriate
	 routines in varasm.c should be called.  */
      gcc_unreachable ();

    default:
      break;
    }

  printf ("gen_rtx_");
  print_code (code);
  printf (" (%smode", GET_MODE_NAME (GET_MODE (x)));

  fmt = GET_RTX_FORMAT (code);
  len = GET_RTX_LENGTH (code);
  for (i = 0; i < len; i++)
    {
      if (fmt[i] == '0')
	break;
      printf (",\n\t");
      switch (fmt[i])
	{
	case 'e': case 'u':
	  gen_exp (XEXP (x, i), subroutine_type, used);
	  break;

	case 'i':
	  printf ("%u", XINT (x, i));
	  break;

	case 's':
	  printf ("\"%s\"", XSTR (x, i));
	  break;

	case 'E':
	  {
	    int j;
	    printf ("gen_rtvec (%d", XVECLEN (x, i));
	    for (j = 0; j < XVECLEN (x, i); j++)
	      {
		printf (",\n\t\t");
		gen_exp (XVECEXP (x, i, j), subroutine_type, used);
	      }
	    printf (")");
	    break;
	  }

	default:
	  gcc_unreachable ();
	}
    }
  printf (")");
}
Exemple #28
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && have_insn_for (COMPARE, TYPE_MODE (type)))
          {
	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		     prob);
            break;
          }
        goto normal;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Exemple #29
0
static void
gen_int_relational (enum rtx_code code,	
		    rtx result,	
		    rtx cmp0,	
		    rtx cmp1,	
		    rtx destination)	
{
  machine_mode mode;
  int branch_p;

  mode = GET_MODE (cmp0);
  if (mode == VOIDmode)
    mode = GET_MODE (cmp1);

  /* Is this a branch or compare.  */
  branch_p = (destination != 0);

  /* Instruction set doesn't support LE or LT, so swap operands and use 
     GE, GT.  */
  switch (code)
    {
    case LE:
    case LT:
    case LEU:
    case LTU:
      {
	rtx temp;

	code = swap_condition (code);
	temp = cmp0;
	cmp0 = cmp1;
	cmp1 = temp;
	break;
      }
    default:
      break;
    }

  if (branch_p)
    {
      rtx insn, cond, label;

      /* Operands must be in registers.  */
      if (!register_operand (cmp0, mode))
	cmp0 = force_reg (mode, cmp0);
      if (!register_operand (cmp1, mode))
	cmp1 = force_reg (mode, cmp1);

      /* Generate conditional branch instruction.  */
      cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
      label = gen_rtx_LABEL_REF (VOIDmode, destination);
      insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
							cond, label, pc_rtx));
      emit_jump_insn (insn);
    }
  else
    {
      /* We can't have const_ints in cmp0, other than 0.  */
      if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
	cmp0 = force_reg (mode, cmp0);

      /* If the comparison is against an int not in legal range
         move it into a register.  */
      if (GET_CODE (cmp1) == CONST_INT)
	{
	  switch (code)
	    {
	    case EQ:
	    case NE:
	    case LE:
	    case LT:
	    case GE:
	    case GT:
	      if (!satisfies_constraint_K (cmp1))
		cmp1 = force_reg (mode, cmp1);
	      break;
	    case LEU:
	    case LTU:
	    case GEU:
	    case GTU:
	      if (!satisfies_constraint_L (cmp1))
		cmp1 = force_reg (mode, cmp1);
	      break;
	    default:
	      gcc_unreachable ();
	    }
	}

      /* Generate compare instruction.  */
      emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
    }
}
Exemple #30
0
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
			 enum machine_mode mode, rtx size, rtx if_false_label,
			 rtx if_true_label, int prob)
{
  rtx tem;
  rtx dummy_label = NULL_RTX;
  rtx last;

  /* Reverse the comparison if that is safe and we want to jump if it is
     false.  Also convert to the reverse comparison if the target can
     implement it.  */
  if ((! if_true_label
       || ! can_compare_p (code, mode, ccp_jump))
      && (! FLOAT_MODE_P (mode)
	  || code == ORDERED || code == UNORDERED
	  || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
	  || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
    {
      enum rtx_code rcode;
      if (FLOAT_MODE_P (mode))
        rcode = reverse_condition_maybe_unordered (code);
      else
        rcode = reverse_condition (code);

      /* Canonicalize to UNORDERED for the libcall.  */
      if (can_compare_p (rcode, mode, ccp_jump)
	  || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
	{
          tem = if_true_label;
          if_true_label = if_false_label;
          if_false_label = tem;
	  code = rcode;
	  prob = inv (prob);
	}
    }

  /* If one operand is constant, make it the second one.  Only do this
     if the other operand is not constant as well.  */

  if (swap_commutative_operands_p (op0, op1))
    {
      tem = op0;
      op0 = op1;
      op1 = tem;
      code = swap_condition (code);
    }

  do_pending_stack_adjust ();

  code = unsignedp ? unsigned_condition (code) : code;
  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
						 op0, op1)))
    {
      if (CONSTANT_P (tem))
	{
	  rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
		      ? if_false_label : if_true_label;
	  if (label)
	    emit_jump (label);
	  return;
	}

      code = GET_CODE (tem);
      mode = GET_MODE (tem);
      op0 = XEXP (tem, 0);
      op1 = XEXP (tem, 1);
      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
    }

  if (! if_true_label)
    dummy_label = if_true_label = gen_label_rtx ();

  if (GET_MODE_CLASS (mode) == MODE_INT
      && ! can_compare_p (code, mode, ccp_jump))
    {
      switch (code)
	{
	case LTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
					if_false_label, if_true_label, prob);
	  break;

	case LEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case GTU:
	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
					if_false_label, if_true_label, prob);
	  break;

	case GEU:
	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case LT:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
					if_false_label, if_true_label, prob);
	  break;

	case LE:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case GT:
	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
					if_false_label, if_true_label, prob);
	  break;

	case GE:
	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
					if_true_label, if_false_label,
					inv (prob));
	  break;

	case EQ:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
					 if_true_label, prob);
	  break;

	case NE:
	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
					 if_false_label, inv (prob));
	  break;

	default:
	  gcc_unreachable ();
	}
    }
  else
    {
      if (GET_MODE_CLASS (mode) == MODE_FLOAT
	  && ! can_compare_p (code, mode, ccp_jump)
	  && can_compare_p (swap_condition (code), mode, ccp_jump))
	{
	  rtx tmp;
	  code = swap_condition (code);
	  tmp = op0;
	  op0 = op1;
	  op1 = tmp;
	}

      else if (GET_MODE_CLASS (mode) == MODE_FLOAT
	       && ! can_compare_p (code, mode, ccp_jump)

	       /* Never split ORDERED and UNORDERED.  These must be implemented.  */
	       && (code != ORDERED && code != UNORDERED)

               /* Split a floating-point comparison if we can jump on other
	          conditions...  */
	       && (have_insn_for (COMPARE, mode)

	           /* ... or if there is no libcall for it.  */
	           || code_to_optab[code] == NULL))
        {
	  enum rtx_code first_code;
	  bool and_them = split_comparison (code, mode, &first_code, &code);

	  /* If there are no NaNs, the first comparison should always fall
	     through.  */
	  if (!HONOR_NANS (mode))
	    gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));

	  else
	    {
	      if (and_them)
		{
		  rtx dest_label;
		  /* If we only jump if true, just bypass the second jump.  */
		  if (! if_false_label)
		    {
		      if (! dummy_label)
		        dummy_label = gen_label_rtx ();
		      dest_label = dummy_label;
		    }
		  else
		    dest_label = if_false_label;
                  do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
					   size, dest_label, NULL_RTX, prob);
		}
              else
                do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
					 size, NULL_RTX, if_true_label, prob);
	    }
	}

      last = get_last_insn ();
      emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
			       if_true_label);
      if (prob != -1 && profile_status != PROFILE_ABSENT)
	{
	  for (last = NEXT_INSN (last);
	       last && NEXT_INSN (last);
	       last = NEXT_INSN (last))
	    if (JUMP_P (last))
	      break;
	  if (!last
	      || !JUMP_P (last)
	      || NEXT_INSN (last)
	      || !any_condjump_p (last))
	    {
	      if (dump_file)
		fprintf (dump_file, "Failed to add probability note\n");
	    }
	  else
	    {
	      gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
	      add_reg_note (last, REG_BR_PROB, GEN_INT (prob));
	    }
	}
    }

  if (if_false_label)
    emit_jump (if_false_label);
  if (dummy_label)
    emit_label (dummy_label);
}