/* Process MEMs in SET_DEST destinations. We must not process this together with REG SET_DESTs, but must do it separately, lest when we see [(set (reg:SI foo) (bar)) (set (mem:SI (reg:SI foo) (baz)))] struct_equiv_block_eq could get confused to assume that (reg:SI foo) is not live before this instruction. */ static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info) { enum rtx_code code = GET_CODE (x); int length; const char *format; int i; if (code != GET_CODE (y)) return false; if (code == MEM) return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info); /* Process subexpressions. */ length = GET_RTX_LENGTH (code); format = GET_RTX_FORMAT (code); for (i = 0; i < length; ++i) { switch (format[i]) { case 'V': case 'E': if (XVECLEN (x, i) != XVECLEN (y, i)) return false; if (XVEC (x, i) != 0) { int j; for (j = 0; j < XVECLEN (x, i); ++j) { if (! set_dest_addr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j), info)) return false; } } break; case 'e': if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info)) return false; break; default: break; } } return true; }
/* Change pseudos in *LOC on their coalescing group representatives. */ static bool substitute (rtx *loc) { int i, regno; const char *fmt; enum rtx_code code; bool res; if (*loc == NULL_RTX) return false; code = GET_CODE (*loc); if (code == REG) { regno = REGNO (*loc); if (regno < FIRST_PSEUDO_REGISTER || first_coalesced_pseudo[regno] == regno) return false; *loc = regno_reg_rtx[first_coalesced_pseudo[regno]]; return true; } res = false; fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e') { if (substitute (&XEXP (*loc, i))) res = true; } else if (fmt[i] == 'E') { int j; for (j = XVECLEN (*loc, i) - 1; j >= 0; j--) if (substitute (&XVECEXP (*loc, i, j))) res = true; } } return res; }
int references_value_p (const_rtx x, int only_useless) { const enum rtx_code code = GET_CODE (x); const char *fmt = GET_RTX_FORMAT (code); int i, j; if (GET_CODE (x) == VALUE && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0)) return 1; for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless)) return 1; else if (fmt[i] == 'E') for (j = 0; j < XVECLEN (x, i); j++) if (references_value_p (XVECEXP (x, i, j), only_useless)) return 1; } return 0; }
static int uses_addressof (rtx x) { RTX_CODE code; int i, j; const char *fmt; if (x == NULL_RTX) return 0; code = GET_CODE (x); if (code == ADDRESSOF || x == current_function_internal_arg_pointer) return 1; if (code == MEM) return 0; /* Scan all subexpressions. */ fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') { if (uses_addressof (XEXP (x, i))) return 1; } else if (*fmt == 'E') { for (j = 0; j < XVECLEN (x, i); j++) if (uses_addressof (XVECEXP (x, i, j))) return 1; } } return 0; }
static void walk_insn_part (rtx part, int recog_p, int non_pc_set_src) { int i, j; RTX_CODE code; const char *format_ptr; if (part == 0) return; code = GET_CODE (part); switch (code) { case CLOBBER: clobbers_seen_this_insn++; break; case MATCH_OPERAND: if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); return; case MATCH_OP_DUP: case MATCH_PAR_DUP: ++dup_operands_seen_this_insn; /* FALLTHRU */ case MATCH_SCRATCH: case MATCH_PARALLEL: case MATCH_OPERATOR: if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); /* Now scan the rtl's in the vector inside the MATCH_OPERATOR or MATCH_PARALLEL. */ break; case LABEL_REF: if (GET_CODE (LABEL_REF_LABEL (part)) == MATCH_OPERAND || GET_CODE (LABEL_REF_LABEL (part)) == MATCH_DUP) break; return; case MATCH_DUP: ++dup_operands_seen_this_insn; if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); return; case CC0: if (recog_p) have_cc0_flag = 1; return; case LO_SUM: if (recog_p) have_lo_sum_flag = 1; return; case ROTATE: if (recog_p) have_rotate_flag = 1; return; case ROTATERT: if (recog_p) have_rotatert_flag = 1; return; case SET: walk_insn_part (SET_DEST (part), 0, recog_p); walk_insn_part (SET_SRC (part), recog_p, GET_CODE (SET_DEST (part)) != PC); return; case IF_THEN_ELSE: /* Only consider this machine as having a conditional move if the two arms of the IF_THEN_ELSE are both MATCH_OPERAND. Otherwise, we have some specific IF_THEN_ELSE construct (like the doz instruction on the RS/6000) that can't be used in the general context we want it for. */ if (recog_p && non_pc_set_src && GET_CODE (XEXP (part, 1)) == MATCH_OPERAND && GET_CODE (XEXP (part, 2)) == MATCH_OPERAND) have_cmove_flag = 1; break; case COND_EXEC: if (recog_p) have_cond_exec_flag = 1; break; case REG: case CONST_INT: case SYMBOL_REF: case PC: return; default: break; } format_ptr = GET_RTX_FORMAT (GET_CODE (part)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (part)); i++) switch (*format_ptr++) { case 'e': case 'u': walk_insn_part (XEXP (part, i), recog_p, non_pc_set_src); break; case 'E': if (XVEC (part, i) != NULL) for (j = 0; j < XVECLEN (part, i); j++) walk_insn_part (XVECEXP (part, i, j), recog_p, non_pc_set_src); break; } }
/* Check if *XP is equivalent to Y. Until an an unreconcilable difference is found, use in-group changes with validate_change on *XP to make register assignments agree. It is the (not necessarily direct) callers responsibility to verify / confirm / cancel these changes, as appropriate. RVALUE indicates if the processed piece of rtl is used as a destination, in which case we can't have different registers being an input. Returns nonzero if the two blocks have been identified as equivalent, zero otherwise. RVALUE == 0: destination RVALUE == 1: source RVALUE == -1: source, ignore SET_DEST of SET / clobber. */ bool rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info) { rtx x = *xp; enum rtx_code code; int length; const char *format; int i; if (!y || !x) return x == y; code = GET_CODE (y); if (code != REG && x == y) return true; if (GET_CODE (x) != code || GET_MODE (x) != GET_MODE (y)) return false; /* ??? could extend to allow CONST_INT inputs. */ switch (code) { case REG: { unsigned x_regno = REGNO (x); unsigned y_regno = REGNO (y); int x_common_live, y_common_live; if (reload_completed && (x_regno >= FIRST_PSEUDO_REGISTER || y_regno >= FIRST_PSEUDO_REGISTER)) { /* We should only see this in REG_NOTEs. */ gcc_assert (!info->live_update); /* Returning false will cause us to remove the notes. */ return false; } #ifdef STACK_REGS /* After reg-stack, can only accept literal matches of stack regs. */ if (info->mode & CLEANUP_POST_REGSTACK && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG) || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG))) return x_regno == y_regno; #endif /* If the register is a locally live one in one block, the corresponding one must be locally live in the other, too, and match of identical regnos doesn't apply. */ if (REGNO_REG_SET_P (info->x_local_live, x_regno)) { if (!REGNO_REG_SET_P (info->y_local_live, y_regno)) return false; } else if (REGNO_REG_SET_P (info->y_local_live, y_regno)) return false; else if (x_regno == y_regno) { if (!rvalue && info->cur.input_valid && (reg_overlap_mentioned_p (x, info->x_input) || reg_overlap_mentioned_p (x, info->y_input))) return false; /* Update liveness information. */ if (info->live_update && assign_reg_reg_set (info->common_live, x, rvalue)) info->cur.version++; return true; } x_common_live = REGNO_REG_SET_P (info->common_live, x_regno); y_common_live = REGNO_REG_SET_P (info->common_live, y_regno); if (x_common_live != y_common_live) return false; else if (x_common_live) { if (! rvalue || info->input_cost < 0 || no_new_pseudos) return false; /* If info->live_update is not set, we are processing notes. We then allow a match with x_input / y_input found in a previous pass. */ if (info->live_update && !info->cur.input_valid) { info->cur.input_valid = true; info->x_input = x; info->y_input = y; info->cur.input_count += optimize_size ? 2 : 1; if (info->input_reg && GET_MODE (info->input_reg) != GET_MODE (info->x_input)) info->input_reg = NULL_RTX; if (!info->input_reg) info->input_reg = gen_reg_rtx (GET_MODE (info->x_input)); } else if ((info->live_update ? ! info->cur.input_valid : ! info->x_input) || ! rtx_equal_p (x, info->x_input) || ! rtx_equal_p (y, info->y_input)) return false; validate_change (info->cur.x_start, xp, info->input_reg, 1); } else { int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]); int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]); int size = GET_MODE_SIZE (GET_MODE (x)); enum machine_mode x_mode = GET_MODE (x); unsigned x_regno_i, y_regno_i; int x_nregs_i, y_nregs_i, size_i; int local_count = info->cur.local_count; /* This might be a register local to each block. See if we have it already registered. */ for (i = local_count - 1; i >= 0; i--) { x_regno_i = REGNO (info->x_local[i]); x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]); y_regno_i = REGNO (info->y_local[i]); y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]); size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i])); /* If we have a new pair of registers that is wider than an old pair and enclosing it with matching offsets, remove the old pair. If we find a matching, wider, old pair, use the old one. If the width is the same, use the old one if the modes match, but the new if they don't. We don't want to get too fancy with subreg_regno_offset here, so we just test two straightforward cases each. */ if (info->live_update && (x_mode != GET_MODE (info->x_local[i]) ? size >= size_i : size > size_i)) { /* If the new pair is fully enclosing a matching existing pair, remove the old one. N.B. because we are removing one entry here, the check below if we have space for a new entry will succeed. */ if ((x_regno <= x_regno_i && x_regno + x_nregs >= x_regno_i + x_nregs_i && x_nregs == y_nregs && x_nregs_i == y_nregs_i && x_regno - x_regno_i == y_regno - y_regno_i) || (x_regno == x_regno_i && y_regno == y_regno_i && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i)) { info->cur.local_count = --local_count; info->x_local[i] = info->x_local[local_count]; info->y_local[i] = info->y_local[local_count]; continue; } } else { /* If the new pair is fully enclosed within a matching existing pair, succeed. */ if (x_regno >= x_regno_i && x_regno + x_nregs <= x_regno_i + x_nregs_i && x_nregs == y_nregs && x_nregs_i == y_nregs_i && x_regno - x_regno_i == y_regno - y_regno_i) break; if (x_regno == x_regno_i && y_regno == y_regno_i && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i) break; } /* Any other overlap causes a match failure. */ if (x_regno + x_nregs > x_regno_i && x_regno_i + x_nregs_i > x_regno) return false; if (y_regno + y_nregs > y_regno_i && y_regno_i + y_nregs_i > y_regno) return false; } if (i < 0) { /* Not found. Create a new entry if possible. */ if (!info->live_update || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL) return false; info->x_local[info->cur.local_count] = x; info->y_local[info->cur.local_count] = y; info->cur.local_count++; info->cur.version++; } note_local_live (info, x, y, rvalue); } return true; } case SET: gcc_assert (rvalue < 0); /* Ignore the destinations role as a destination. Still, we have to consider input registers embedded in the addresses of a MEM. N.B., we process the rvalue aspect of STRICT_LOW_PART / ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */ if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info)) return false; /* Process source. */ return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info); case PRE_MODIFY: /* Process destination. */ if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)) return false; /* Process source. */ return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info); case POST_MODIFY: { rtx x_dest0, x_dest1; /* Process destination. */ x_dest0 = XEXP (x, 0); gcc_assert (REG_P (x_dest0)); if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)) return false; x_dest1 = XEXP (x, 0); /* validate_change might have changed the destination. Put it back so that we can do a proper match for its role a an input. */ XEXP (x, 0) = x_dest0; if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info)) return false; gcc_assert (x_dest1 == XEXP (x, 0)); /* Process source. */ return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info); } case CLOBBER: gcc_assert (rvalue < 0); return true; /* Some special forms are also rvalues when they appear in lvalue positions. However, we must ont try to match a register after we have already altered it with validate_change, consider the rvalue aspect while we process the lvalue. */ case STRICT_LOW_PART: case ZERO_EXTEND: case SIGN_EXTEND: { rtx x_inner, y_inner; enum rtx_code code; int change; if (rvalue) break; x_inner = XEXP (x, 0); y_inner = XEXP (y, 0); if (GET_MODE (x_inner) != GET_MODE (y_inner)) return false; code = GET_CODE (x_inner); if (code != GET_CODE (y_inner)) return false; /* The address of a MEM is an input that will be processed during rvalue == -1 processing. */ if (code == SUBREG) { if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner)) return false; x = x_inner; x_inner = SUBREG_REG (x_inner); y_inner = SUBREG_REG (y_inner); if (GET_MODE (x_inner) != GET_MODE (y_inner)) return false; code = GET_CODE (x_inner); if (code != GET_CODE (y_inner)) return false; } if (code == MEM) return true; gcc_assert (code == REG); if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info)) return false; if (REGNO (x_inner) == REGNO (y_inner)) { change = assign_reg_reg_set (info->common_live, x_inner, 1); info->cur.version++; } else change = note_local_live (info, x_inner, y_inner, 1); gcc_assert (change); return true; } /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take place during input processing, however, that is benign, since they are paired with reads. */ case MEM: return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info); case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC: return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info) && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info)); case PARALLEL: /* If this is a top-level PATTERN PARALLEL, we expect the caller to have handled the SET_DESTs. A complex or vector PARALLEL can be identified by having a mode. */ gcc_assert (rvalue < 0 || GET_MODE (x) != VOIDmode); break; case LABEL_REF: /* Check special tablejump match case. */ if (XEXP (y, 0) == info->y_label) return (XEXP (x, 0) == info->x_label); /* We can't assume nonlocal labels have their following insns yet. */ if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y)) return XEXP (x, 0) == XEXP (y, 0); /* Two label-refs are equivalent if they point at labels in the same position in the instruction stream. */ return (next_real_insn (XEXP (x, 0)) == next_real_insn (XEXP (y, 0))); case SYMBOL_REF: return XSTR (x, 0) == XSTR (y, 0); /* Some rtl is guaranteed to be shared, or unique; If we didn't match EQ equality above, they aren't the same. */ case CONST_INT: case CODE_LABEL: return false; default: break; } /* For commutative operations, the RTX match if the operands match in any order. */ if (targetm.commutative_p (x, UNKNOWN)) return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info) && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info)) || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info) && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info))); /* Process subexpressions - this is similar to rtx_equal_p. */ length = GET_RTX_LENGTH (code); format = GET_RTX_FORMAT (code); for (i = 0; i < length; ++i) { switch (format[i]) { case 'w': if (XWINT (x, i) != XWINT (y, i)) return false; break; case 'n': case 'i': if (XINT (x, i) != XINT (y, i)) return false; break; case 'V': case 'E': if (XVECLEN (x, i) != XVECLEN (y, i)) return false; if (XVEC (x, i) != 0) { int j; for (j = 0; j < XVECLEN (x, i); ++j) { if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j), rvalue, info)) return false; } } break; case 'e': if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info)) return false; break; case 'S': case 's': if ((XSTR (x, i) || XSTR (y, i)) && (! XSTR (x, i) || ! XSTR (y, i) || strcmp (XSTR (x, i), XSTR (y, i)))) return false; break; case 'u': /* These are just backpointers, so they don't matter. */ break; case '0': case 't': break; /* It is believed that rtx's at this level will never contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: gcc_unreachable (); } } return true; }
static void gen_exp (rtx x, enum rtx_code subroutine_type, char *used) { RTX_CODE code; int i; int len; const char *fmt; if (x == 0) { printf ("NULL_RTX"); return; } code = GET_CODE (x); switch (code) { case MATCH_OPERAND: case MATCH_DUP: if (used) { if (used[XINT (x, 0)]) { printf ("copy_rtx (operand%d)", XINT (x, 0)); return; } used[XINT (x, 0)] = 1; } printf ("operand%d", XINT (x, 0)); return; case MATCH_OP_DUP: printf ("gen_rtx_fmt_"); for (i = 0; i < XVECLEN (x, 1); i++) printf ("e"); printf (" (GET_CODE (operand%d), ", XINT (x, 0)); if (GET_MODE (x) == VOIDmode) printf ("GET_MODE (operand%d)", XINT (x, 0)); else printf ("%smode", GET_MODE_NAME (GET_MODE (x))); for (i = 0; i < XVECLEN (x, 1); i++) { printf (",\n\t\t"); gen_exp (XVECEXP (x, 1, i), subroutine_type, used); } printf (")"); return; case MATCH_OPERATOR: printf ("gen_rtx_fmt_"); for (i = 0; i < XVECLEN (x, 2); i++) printf ("e"); printf (" (GET_CODE (operand%d)", XINT (x, 0)); printf (", %smode", GET_MODE_NAME (GET_MODE (x))); for (i = 0; i < XVECLEN (x, 2); i++) { printf (",\n\t\t"); gen_exp (XVECEXP (x, 2, i), subroutine_type, used); } printf (")"); return; case MATCH_PARALLEL: case MATCH_PAR_DUP: printf ("operand%d", XINT (x, 0)); return; case MATCH_SCRATCH: gen_rtx_scratch (x, subroutine_type); return; case PC: printf ("pc_rtx"); return; case RETURN: printf ("ret_rtx"); return; case SIMPLE_RETURN: printf ("simple_return_rtx"); return; case CLOBBER: if (REG_P (XEXP (x, 0))) { printf ("gen_hard_reg_clobber (%smode, %i)", GET_MODE_NAME (GET_MODE (XEXP (x, 0))), REGNO (XEXP (x, 0))); return; } break; case CC0: printf ("cc0_rtx"); return; case CONST_INT: if (INTVAL (x) == 0) printf ("const0_rtx"); else if (INTVAL (x) == 1) printf ("const1_rtx"); else if (INTVAL (x) == -1) printf ("constm1_rtx"); else if (-MAX_SAVED_CONST_INT <= INTVAL (x) && INTVAL (x) <= MAX_SAVED_CONST_INT) printf ("const_int_rtx[MAX_SAVED_CONST_INT + (%d)]", (int) INTVAL (x)); else if (INTVAL (x) == STORE_FLAG_VALUE) printf ("const_true_rtx"); else { printf ("GEN_INT ("); printf (HOST_WIDE_INT_PRINT_DEC_C, INTVAL (x)); printf (")"); } return; case CONST_DOUBLE: case CONST_FIXED: case CONST_WIDE_INT: /* These shouldn't be written in MD files. Instead, the appropriate routines in varasm.c should be called. */ gcc_unreachable (); default: break; } printf ("gen_rtx_"); print_code (code); printf (" (%smode", GET_MODE_NAME (GET_MODE (x))); fmt = GET_RTX_FORMAT (code); len = GET_RTX_LENGTH (code); for (i = 0; i < len; i++) { if (fmt[i] == '0') break; printf (",\n\t"); switch (fmt[i]) { case 'e': case 'u': gen_exp (XEXP (x, i), subroutine_type, used); break; case 'i': printf ("%u", XINT (x, i)); break; case 's': printf ("\"%s\"", XSTR (x, i)); break; case 'E': { int j; printf ("gen_rtvec (%d", XVECLEN (x, i)); for (j = 0; j < XVECLEN (x, i); j++) { printf (",\n\t\t"); gen_exp (XVECEXP (x, i, j), subroutine_type, used); } printf (")"); break; } default: gcc_unreachable (); } } printf (")"); }
/* Recursive hash function for RTL X. */ static hashval_t rtx_hash (rtx x) { int i, j; enum rtx_code code; const char *fmt; hashval_t val = 0; if (x == 0) return val; code = GET_CODE (x); val += (int) code + 4095; /* Some RTL can be compared nonrecursively. */ switch (code) { case REG: return val + REGNO (x); case LABEL_REF: return iterative_hash_object (XEXP (x, 0), val); case SYMBOL_REF: return iterative_hash_object (XSTR (x, 0), val); case SCRATCH: case CONST_DOUBLE: case CONST_INT: case CONST_VECTOR: return val; default: break; } /* Hash the elements. */ fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { switch (fmt[i]) { case 'w': val += XWINT (x, i); break; case 'n': case 'i': val += XINT (x, i); break; case 'V': case 'E': val += XVECLEN (x, i); for (j = 0; j < XVECLEN (x, i); j++) val += rtx_hash (XVECEXP (x, i, j)); break; case 'e': val += rtx_hash (XEXP (x, i)); break; case 'S': case 's': val += htab_hash_string (XSTR (x, i)); break; case 'u': case '0': case 't': break; /* It is believed that rtx's at this level will never contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: abort (); } } return val; }
int rtx_equal_p (const_rtx x, const_rtx y) { int i; int j; enum rtx_code code; const char *fmt; if (x == y) return 1; if (x == 0 || y == 0) return 0; code = GET_CODE (x); /* Rtx's of different codes cannot be equal. */ if (code != GET_CODE (y)) return 0; /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. (REG:SI x) and (REG:HI x) are NOT equivalent. */ if (GET_MODE (x) != GET_MODE (y)) return 0; /* MEMs referring to different address space are not equivalent. */ if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y)) return 0; /* Some RTL can be compared nonrecursively. */ switch (code) { case REG: return (REGNO (x) == REGNO (y)); case LABEL_REF: return XEXP (x, 0) == XEXP (y, 0); case SYMBOL_REF: return XSTR (x, 0) == XSTR (y, 0); case DEBUG_EXPR: case VALUE: case SCRATCH: case CONST_DOUBLE: case CONST_INT: case CONST_FIXED: return 0; case DEBUG_IMPLICIT_PTR: return DEBUG_IMPLICIT_PTR_DECL (x) == DEBUG_IMPLICIT_PTR_DECL (y); case DEBUG_PARAMETER_REF: return DEBUG_PARAMETER_REF_DECL (x) == DEBUG_PARAMETER_REF_DECL (y); case ENTRY_VALUE: return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); default: break; } /* Compare the elements. If any pair of corresponding elements fail to match, return 0 for the whole thing. */ fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { switch (fmt[i]) { case 'w': if (XWINT (x, i) != XWINT (y, i)) return 0; break; case 'n': case 'i': if (XINT (x, i) != XINT (y, i)) { #ifndef GENERATOR_FILE if (((code == ASM_OPERANDS && i == 6) || (code == ASM_INPUT && i == 1)) && locator_eq (XINT (x, i), XINT (y, i))) break; #endif return 0; } break; case 'V': case 'E': /* Two vectors must have the same length. */ if (XVECLEN (x, i) != XVECLEN (y, i)) return 0; /* And the corresponding elements must match. */ for (j = 0; j < XVECLEN (x, i); j++) if (rtx_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0) return 0; break; case 'e': if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0) return 0; break; case 'S': case 's': if ((XSTR (x, i) || XSTR (y, i)) && (! XSTR (x, i) || ! XSTR (y, i) || strcmp (XSTR (x, i), XSTR (y, i)))) return 0; break; case 'u': /* These are just backpointers, so they don't matter. */ break; case '0': case 't': break; /* It is believed that rtx's at this level will never contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: gcc_unreachable (); } } return 1; }
int rtx_equal_p (rtx x, rtx y) { int i; int j; enum rtx_code code; const char *fmt; if (x == y) return 1; if (x == 0 || y == 0) return 0; code = GET_CODE (x); /* Rtx's of different codes cannot be equal. */ if (code != GET_CODE (y)) return 0; /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. (REG:SI x) and (REG:HI x) are NOT equivalent. */ if (GET_MODE (x) != GET_MODE (y)) return 0; /* Some RTL can be compared nonrecursively. */ switch (code) { case REG: return (REGNO (x) == REGNO (y)); case LABEL_REF: return XEXP (x, 0) == XEXP (y, 0); case SYMBOL_REF: return XSTR (x, 0) == XSTR (y, 0); case SCRATCH: case CONST_DOUBLE: case CONST_INT: return 0; default: break; } /* Compare the elements. If any pair of corresponding elements fail to match, return 0 for the whole thing. */ fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { switch (fmt[i]) { case 'w': if (XWINT (x, i) != XWINT (y, i)) return 0; break; case 'n': case 'i': if (XINT (x, i) != XINT (y, i)) return 0; break; case 'V': case 'E': /* Two vectors must have the same length. */ if (XVECLEN (x, i) != XVECLEN (y, i)) return 0; /* And the corresponding elements must match. */ for (j = 0; j < XVECLEN (x, i); j++) if (rtx_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0) return 0; break; case 'e': if (rtx_equal_p (XEXP (x, i), XEXP (y, i)) == 0) return 0; break; case 'S': case 's': if ((XSTR (x, i) || XSTR (y, i)) && (! XSTR (x, i) || ! XSTR (y, i) || strcmp (XSTR (x, i), XSTR (y, i)))) return 0; break; case 'u': /* These are just backpointers, so they don't matter. */ break; case '0': case 't': break; /* It is believed that rtx's at this level will never contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: gcc_unreachable (); } } return 1; }
/* Scan rtx X for references to elimination source or target registers in contexts that would prevent the elimination from happening. Update the table of eliminables to reflect the changed state. MEM_MODE is the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */ static void mark_not_eliminable (rtx x, machine_mode mem_mode) { enum rtx_code code = GET_CODE (x); struct lra_elim_table *ep; int i, j; const char *fmt; switch (code) { case PRE_INC: case POST_INC: case PRE_DEC: case POST_DEC: case POST_MODIFY: case PRE_MODIFY: if (XEXP (x, 0) == stack_pointer_rtx && ((code != PRE_MODIFY && code != POST_MODIFY) || (GET_CODE (XEXP (x, 1)) == PLUS && XEXP (x, 0) == XEXP (XEXP (x, 1), 0) && CONST_INT_P (XEXP (XEXP (x, 1), 1))))) { int size = GET_MODE_SIZE (mem_mode); #ifdef PUSH_ROUNDING /* If more bytes than MEM_MODE are pushed, account for them. */ size = PUSH_ROUNDING (size); #endif if (code == PRE_DEC || code == POST_DEC) curr_sp_change -= size; else if (code == PRE_INC || code == POST_INC) curr_sp_change += size; else if (code == PRE_MODIFY || code == POST_MODIFY) curr_sp_change += INTVAL (XEXP (XEXP (x, 1), 1)); } else if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER) { /* If we modify the source of an elimination rule, disable it. Do the same if it is the destination and not the hard frame register. */ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) if (ep->from_rtx == XEXP (x, 0) || (ep->to_rtx == XEXP (x, 0) && ep->to_rtx != hard_frame_pointer_rtx)) setup_can_eliminate (ep, false); } return; case USE: if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) /* If using a hard register that is the source of an eliminate we still think can be performed, note it cannot be performed since we don't know how this hard register is used. */ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) if (ep->from_rtx == XEXP (x, 0) && ep->to_rtx != hard_frame_pointer_rtx) setup_can_eliminate (ep, false); return; case CLOBBER: if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) /* If clobbering a hard register that is the replacement register for an elimination we still think can be performed, note that it cannot be performed. Otherwise, we need not be concerned about it. */ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) if (ep->to_rtx == XEXP (x, 0) && ep->to_rtx != hard_frame_pointer_rtx) setup_can_eliminate (ep, false); return; case SET: if (SET_DEST (x) == stack_pointer_rtx && GET_CODE (SET_SRC (x)) == PLUS && XEXP (SET_SRC (x), 0) == SET_DEST (x) && CONST_INT_P (XEXP (SET_SRC (x), 1))) { curr_sp_change += INTVAL (XEXP (SET_SRC (x), 1)); return; } if (! REG_P (SET_DEST (x)) || REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER) mark_not_eliminable (SET_DEST (x), mem_mode); else { /* See if this is setting the replacement hard register for an elimination. If DEST is the hard frame pointer, we do nothing because we assume that all assignments to the frame pointer are for non-local gotos and are being done at a time when they are valid and do not disturb anything else. Some machines want to eliminate a fake argument pointer (or even a fake frame pointer) with either the real frame pointer or the stack pointer. Assignments to the hard frame pointer must not prevent this elimination. */ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) if (ep->to_rtx == SET_DEST (x) && SET_DEST (x) != hard_frame_pointer_rtx) setup_can_eliminate (ep, false); } mark_not_eliminable (SET_SRC (x), mem_mode); return; case MEM: /* Our only special processing is to pass the mode of the MEM to our recursive call. */ mark_not_eliminable (XEXP (x, 0), GET_MODE (x)); return; default: break; } fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') mark_not_eliminable (XEXP (x, i), mem_mode); else if (*fmt == 'E') for (j = 0; j < XVECLEN (x, i); j++) mark_not_eliminable (XVECEXP (x, i, j), mem_mode); } }
/* Scan X and replace any eliminable registers (such as fp) with a replacement (such as sp) if SUBST_P, plus an offset. The offset is a change in the offset between the eliminable register and its substitution if UPDATE_P, or the full offset if FULL_P, or otherwise zero. If FULL_P, we also use the SP offsets for elimination to SP. If UPDATE_P, use UPDATE_SP_OFFSET for updating offsets of register elimnable to SP. If UPDATE_SP_OFFSET is non-zero, don't use difference of the offset and the previous offset. MEM_MODE is the mode of an enclosing MEM. We need this to know how much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a MEM, we are allowed to replace a sum of a hard register and the constant zero with the hard register, which we cannot do outside a MEM. In addition, we need to record the fact that a hard register is referenced outside a MEM. If we make full substitution to SP for non-null INSN, add the insn sp offset. */ rtx lra_eliminate_regs_1 (rtx_insn *insn, rtx x, machine_mode mem_mode, bool subst_p, bool update_p, HOST_WIDE_INT update_sp_offset, bool full_p) { enum rtx_code code = GET_CODE (x); struct lra_elim_table *ep; rtx new_rtx; int i, j; const char *fmt; int copied = 0; lra_assert (!update_p || !full_p); lra_assert (update_sp_offset == 0 || (!subst_p && update_p && !full_p)); if (! current_function_decl) return x; switch (code) { CASE_CONST_ANY: case CONST: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case ASM_INPUT: case ADDR_VEC: case ADDR_DIFF_VEC: case RETURN: return x; case REG: /* First handle the case where we encounter a bare hard register that is eliminable. Replace it with a PLUS. */ if ((ep = get_elimination (x)) != NULL) { rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (update_sp_offset != 0) { if (ep->to_rtx == stack_pointer_rtx) return plus_constant (Pmode, to, update_sp_offset); return to; } else if (update_p) return plus_constant (Pmode, to, ep->offset - ep->previous_offset); else if (full_p) return plus_constant (Pmode, to, ep->offset - (insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx ? lra_get_insn_recog_data (insn)->sp_offset : 0)); else return to; } return x; case PLUS: /* If this is the sum of an eliminable register and a constant, rework the sum. */ if (REG_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1))) { if ((ep = get_elimination (XEXP (x, 0))) != NULL) { HOST_WIDE_INT offset; rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (! update_p && ! full_p) return gen_rtx_PLUS (Pmode, to, XEXP (x, 1)); if (update_sp_offset != 0) offset = ep->to_rtx == stack_pointer_rtx ? update_sp_offset : 0; else offset = (update_p ? ep->offset - ep->previous_offset : ep->offset); if (full_p && insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx) offset -= lra_get_insn_recog_data (insn)->sp_offset; if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == -offset) return to; else return gen_rtx_PLUS (Pmode, to, plus_constant (Pmode, XEXP (x, 1), offset)); } /* If the hard register is not eliminable, we are done since the other operand is a constant. */ return x; } /* If this is part of an address, we want to bring any constant to the outermost PLUS. We will do this by doing hard register replacement in our operands and seeing if a constant shows up in one of them. Note that there is no risk of modifying the structure of the insn, since we only get called for its operands, thus we are either modifying the address inside a MEM, or something like an address operand of a load-address insn. */ { rtx new0 = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); rtx new1 = lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); new0 = move_plus_up (new0); new1 = move_plus_up (new1); if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) return form_sum (new0, new1); } return x; case MULT: /* If this is the product of an eliminable hard register and a constant, apply the distribute law and move the constant out so that we have (plus (mult ..) ..). This is needed in order to keep load-address insns valid. This case is pathological. We ignore the possibility of overflow here. */ if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)) && (ep = get_elimination (XEXP (x, 0))) != NULL) { rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (update_sp_offset != 0) { if (ep->to_rtx == stack_pointer_rtx) return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), update_sp_offset * INTVAL (XEXP (x, 1))); return gen_rtx_MULT (Pmode, to, XEXP (x, 1)); } else if (update_p) return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), (ep->offset - ep->previous_offset) * INTVAL (XEXP (x, 1))); else if (full_p) { HOST_WIDE_INT offset = ep->offset; if (insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx) offset -= lra_get_insn_recog_data (insn)->sp_offset; return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), offset * INTVAL (XEXP (x, 1))); } else return gen_rtx_MULT (Pmode, to, XEXP (x, 1)); } /* fall through */ case CALL: case COMPARE: /* See comments before PLUS about handling MINUS. */ case MINUS: case DIV: case UDIV: case MOD: case UMOD: case AND: case IOR: case XOR: case ROTATERT: case ROTATE: case ASHIFTRT: case LSHIFTRT: case ASHIFT: case NE: case EQ: case GE: case GT: case GEU: case GTU: case LE: case LT: case LEU: case LTU: { rtx new0 = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); rtx new1 = XEXP (x, 1) ? lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p) : 0; if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1); } return x; case EXPR_LIST: /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ if (XEXP (x, 0)) { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) { /* If this is a REG_DEAD note, it is not valid anymore. Using the eliminated version could result in creating a REG_DEAD note for the stack or frame pointer. */ if (REG_NOTE_KIND (x) == REG_DEAD) return (XEXP (x, 1) ? lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p) : NULL_RTX); x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1)); } } /* fall through */ case INSN_LIST: case INT_LIST: /* Now do eliminations in the rest of the chain. If this was an EXPR_LIST, this might result in allocating more memory than is strictly needed, but it simplifies the code. */ if (XEXP (x, 1)) { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 1)) return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx); } return x; case PRE_INC: case POST_INC: case PRE_DEC: case POST_DEC: /* We do not support elimination of a register that is modified. elimination_effects has already make sure that this does not happen. */ return x; case PRE_MODIFY: case POST_MODIFY: /* We do not support elimination of a hard register that is modified. LRA has already make sure that this does not happen. The only remaining case we need to consider here is that the increment value may be an eliminable register. */ if (GET_CODE (XEXP (x, 1)) == PLUS && XEXP (XEXP (x, 1), 0) == XEXP (x, 0)) { rtx new_rtx = lra_eliminate_regs_1 (insn, XEXP (XEXP (x, 1), 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (XEXP (x, 1), 1)) return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0), gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), new_rtx)); } return x; case STRICT_LOW_PART: case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND: case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT: case FIX: case UNSIGNED_FIX: case UNSIGNED_FLOAT: case ABS: case SQRT: case FFS: case CLZ: case CTZ: case POPCOUNT: case PARITY: case BSWAP: new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx); return x; case SUBREG: new_rtx = lra_eliminate_regs_1 (insn, SUBREG_REG (x), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != SUBREG_REG (x)) { int x_size = GET_MODE_SIZE (GET_MODE (x)); int new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); if (MEM_P (new_rtx) && x_size <= new_size) { SUBREG_REG (x) = new_rtx; alter_subreg (&x, false); return x; } else if (! subst_p) { /* LRA can transform subregs itself. So don't call simplify_gen_subreg until LRA transformations are finished. Function simplify_gen_subreg can do non-trivial transformations (like truncation) which might make LRA work to fail. */ SUBREG_REG (x) = new_rtx; return x; } else return simplify_gen_subreg (GET_MODE (x), new_rtx, GET_MODE (new_rtx), SUBREG_BYTE (x)); } return x; case MEM: /* Our only special processing is to pass the mode of the MEM to our recursive call and copy the flags. While we are here, handle this case more efficiently. */ return replace_equiv_address_nv (x, lra_eliminate_regs_1 (insn, XEXP (x, 0), GET_MODE (x), subst_p, update_p, update_sp_offset, full_p)); case USE: /* Handle insn_list USE that a call to a pure function may generate. */ new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), VOIDmode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) return gen_rtx_USE (GET_MODE (x), new_rtx); return x; case CLOBBER: case SET: gcc_unreachable (); default: break; } /* Process each of our operands recursively. If any have changed, make a copy of the rtx. */ fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, i), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, i) && ! copied) { x = shallow_copy_rtx (x); copied = 1; } XEXP (x, i) = new_rtx; } else if (*fmt == 'E') { int copied_vec = 0; for (j = 0; j < XVECLEN (x, i); j++) { new_rtx = lra_eliminate_regs_1 (insn, XVECEXP (x, i, j), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XVECEXP (x, i, j) && ! copied_vec) { rtvec new_v = gen_rtvec_v (XVECLEN (x, i), XVEC (x, i)->elem); if (! copied) { x = shallow_copy_rtx (x); copied = 1; } XVEC (x, i) = new_v; copied_vec = 1; } XVECEXP (x, i, j) = new_rtx; } } } return x; }
static void print_rtx (const_rtx in_rtx) { int i = 0; int j; const char *format_ptr; int is_insn; if (sawclose) { if (flag_simple) fputc (' ', outfile); else fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } if (in_rtx == 0) { fputs ("(nil)", outfile); sawclose = 1; return; } else if (GET_CODE (in_rtx) > NUM_RTX_CODE) { fprintf (outfile, "(??? bad code %d\n%s%*s)", GET_CODE (in_rtx), print_rtx_head, indent * 2, ""); sawclose = 1; return; } is_insn = INSN_P (in_rtx); /* Print name of expression code. */ if (flag_simple && CONST_INT_P (in_rtx)) fputc ('(', outfile); else fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx))); if (! flag_simple) { if (RTX_FLAG (in_rtx, in_struct)) fputs ("/s", outfile); if (RTX_FLAG (in_rtx, volatil)) fputs ("/v", outfile); if (RTX_FLAG (in_rtx, unchanging)) fputs ("/u", outfile); if (RTX_FLAG (in_rtx, frame_related)) fputs ("/f", outfile); if (RTX_FLAG (in_rtx, jump)) fputs ("/j", outfile); if (RTX_FLAG (in_rtx, call)) fputs ("/c", outfile); if (RTX_FLAG (in_rtx, return_val)) fputs ("/i", outfile); /* Print REG_NOTE names for EXPR_LIST and INSN_LIST. */ if ((GET_CODE (in_rtx) == EXPR_LIST || GET_CODE (in_rtx) == INSN_LIST || GET_CODE (in_rtx) == INT_LIST) && (int)GET_MODE (in_rtx) < REG_NOTE_MAX) fprintf (outfile, ":%s", GET_REG_NOTE_NAME (GET_MODE (in_rtx))); /* For other rtl, print the mode if it's not VOID. */ else if (GET_MODE (in_rtx) != VOIDmode) fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx))); #ifndef GENERATOR_FILE if (GET_CODE (in_rtx) == VAR_LOCATION) { if (TREE_CODE (PAT_VAR_LOCATION_DECL (in_rtx)) == STRING_CST) fputs (" <debug string placeholder>", outfile); else print_mem_expr (outfile, PAT_VAR_LOCATION_DECL (in_rtx)); fputc (' ', outfile); print_rtx (PAT_VAR_LOCATION_LOC (in_rtx)); if (PAT_VAR_LOCATION_STATUS (in_rtx) == VAR_INIT_STATUS_UNINITIALIZED) fprintf (outfile, " [uninit]"); sawclose = 1; i = GET_RTX_LENGTH (VAR_LOCATION); } #endif } #ifndef GENERATOR_FILE if (CONST_DOUBLE_AS_FLOAT_P (in_rtx)) i = 5; #endif /* Get the format string and skip the first elements if we have handled them already. */ format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i; for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++) switch (*format_ptr++) { const char *str; case 'T': str = XTMPL (in_rtx, i); goto string; case 'S': case 's': str = XSTR (in_rtx, i); string: if (str == 0) fputs (" \"\"", outfile); else fprintf (outfile, " (\"%s\")", str); sawclose = 1; break; /* 0 indicates a field for internal use that should not be printed. An exception is the third field of a NOTE, where it indicates that the field has several different valid contents. */ case '0': if (i == 1 && REG_P (in_rtx)) { if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx)) fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx)); } #ifndef GENERATOR_FILE else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF) { int flags = SYMBOL_REF_FLAGS (in_rtx); if (flags) fprintf (outfile, " [flags %#x]", flags); } else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF) { tree decl = SYMBOL_REF_DECL (in_rtx); if (decl) print_node_brief (outfile, "", decl, dump_flags); } #endif else if (i == 4 && NOTE_P (in_rtx)) { switch (NOTE_KIND (in_rtx)) { case NOTE_INSN_EH_REGION_BEG: case NOTE_INSN_EH_REGION_END: if (flag_dump_unnumbered) fprintf (outfile, " #"); else fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx)); sawclose = 1; break; case NOTE_INSN_BLOCK_BEG: case NOTE_INSN_BLOCK_END: #ifndef GENERATOR_FILE dump_addr (outfile, " ", NOTE_BLOCK (in_rtx)); #endif sawclose = 1; break; case NOTE_INSN_BASIC_BLOCK: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_DELETED_LABEL: case NOTE_INSN_DELETED_DEBUG_LABEL: { const char *label = NOTE_DELETED_LABEL_NAME (in_rtx); if (label) fprintf (outfile, " (\"%s\")", label); else fprintf (outfile, " \"\""); } break; case NOTE_INSN_SWITCH_TEXT_SECTIONS: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_VAR_LOCATION: case NOTE_INSN_CALL_ARG_LOCATION: #ifndef GENERATOR_FILE fputc (' ', outfile); print_rtx (NOTE_VAR_LOCATION (in_rtx)); #endif break; case NOTE_INSN_CFI: #ifndef GENERATOR_FILE fputc ('\n', outfile); output_cfi_directive (outfile, NOTE_CFI (in_rtx)); fputc ('\t', outfile); #endif break; default: break; } } else if (i == 8 && JUMP_P (in_rtx) && JUMP_LABEL (in_rtx) != NULL) { /* Output the JUMP_LABEL reference. */ fprintf (outfile, "\n%s%*s -> ", print_rtx_head, indent * 2, ""); if (GET_CODE (JUMP_LABEL (in_rtx)) == RETURN) fprintf (outfile, "return"); else if (GET_CODE (JUMP_LABEL (in_rtx)) == SIMPLE_RETURN) fprintf (outfile, "simple_return"); else fprintf (outfile, "%d", INSN_UID (JUMP_LABEL (in_rtx))); } else if (i == 0 && GET_CODE (in_rtx) == VALUE) { #ifndef GENERATOR_FILE cselib_val *val = CSELIB_VAL_PTR (in_rtx); fprintf (outfile, " %u:%u", val->uid, val->hash); dump_addr (outfile, " @", in_rtx); dump_addr (outfile, "/", (void*)val); #endif } else if (i == 0 && GET_CODE (in_rtx) == DEBUG_EXPR) { #ifndef GENERATOR_FILE fprintf (outfile, " D#%i", DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (in_rtx))); #endif } else if (i == 0 && GET_CODE (in_rtx) == ENTRY_VALUE) { indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (ENTRY_VALUE_EXP (in_rtx)); indent -= 2; } break; case 'e': do_e: indent += 2; if (i == 7 && INSN_P (in_rtx)) /* Put REG_NOTES on their own line. */ fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); if (!sawclose) fprintf (outfile, " "); print_rtx (XEXP (in_rtx, i)); indent -= 2; break; case 'E': case 'V': indent += 2; if (sawclose) { fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } fputs (" [", outfile); if (NULL != XVEC (in_rtx, i)) { indent += 2; if (XVECLEN (in_rtx, i)) sawclose = 1; for (j = 0; j < XVECLEN (in_rtx, i); j++) print_rtx (XVECEXP (in_rtx, i, j)); indent -= 2; } if (sawclose) fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); fputs ("]", outfile); sawclose = 1; indent -= 2; break; case 'w': if (! flag_simple) fprintf (outfile, " "); fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i)); if (! flag_simple) fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]", (unsigned HOST_WIDE_INT) XWINT (in_rtx, i)); break; case 'i': if (i == 5 && INSN_P (in_rtx)) { #ifndef GENERATOR_FILE /* Pretty-print insn locations. Ignore scoping as it is mostly redundant with line number information and do not print anything when there is no location information available. */ if (INSN_LOCATION (in_rtx) && insn_file (in_rtx)) fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx)); #endif } else if (i == 6 && GET_CODE (in_rtx) == ASM_OPERANDS) { #ifndef GENERATOR_FILE fprintf (outfile, " %s:%i", LOCATION_FILE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx)), LOCATION_LINE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx))); #endif } else if (i == 1 && GET_CODE (in_rtx) == ASM_INPUT) { #ifndef GENERATOR_FILE fprintf (outfile, " %s:%i", LOCATION_FILE (ASM_INPUT_SOURCE_LOCATION (in_rtx)), LOCATION_LINE (ASM_INPUT_SOURCE_LOCATION (in_rtx))); #endif } else if (i == 6 && NOTE_P (in_rtx)) { /* This field is only used for NOTE_INSN_DELETED_LABEL, and other times often contains garbage from INSN->NOTE death. */ if (NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_LABEL || NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_DEBUG_LABEL) fprintf (outfile, " %d", XINT (in_rtx, i)); } #if !defined(GENERATOR_FILE) && NUM_UNSPECV_VALUES > 0 else if (i == 1 && GET_CODE (in_rtx) == UNSPEC_VOLATILE && XINT (in_rtx, 1) >= 0 && XINT (in_rtx, 1) < NUM_UNSPECV_VALUES) fprintf (outfile, " %s", unspecv_strings[XINT (in_rtx, 1)]); #endif #if !defined(GENERATOR_FILE) && NUM_UNSPEC_VALUES > 0 else if (i == 1 && (GET_CODE (in_rtx) == UNSPEC || GET_CODE (in_rtx) == UNSPEC_VOLATILE) && XINT (in_rtx, 1) >= 0 && XINT (in_rtx, 1) < NUM_UNSPEC_VALUES) fprintf (outfile, " %s", unspec_strings[XINT (in_rtx, 1)]); #endif else { int value = XINT (in_rtx, i); const char *name; #ifndef GENERATOR_FILE if (REG_P (in_rtx) && (unsigned) value < FIRST_PSEUDO_REGISTER) fprintf (outfile, " %d %s", value, reg_names[value]); else if (REG_P (in_rtx) && (unsigned) value <= LAST_VIRTUAL_REGISTER) { if (value == VIRTUAL_INCOMING_ARGS_REGNUM) fprintf (outfile, " %d virtual-incoming-args", value); else if (value == VIRTUAL_STACK_VARS_REGNUM) fprintf (outfile, " %d virtual-stack-vars", value); else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM) fprintf (outfile, " %d virtual-stack-dynamic", value); else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM) fprintf (outfile, " %d virtual-outgoing-args", value); else if (value == VIRTUAL_CFA_REGNUM) fprintf (outfile, " %d virtual-cfa", value); else if (value == VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM) fprintf (outfile, " %d virtual-preferred-stack-boundary", value); else fprintf (outfile, " %d virtual-reg-%d", value, value-FIRST_VIRTUAL_REGISTER); } else #endif if (flag_dump_unnumbered && (is_insn || NOTE_P (in_rtx))) fputc ('#', outfile); else fprintf (outfile, " %d", value); #ifndef GENERATOR_FILE if (REG_P (in_rtx) && REG_ATTRS (in_rtx)) { fputs (" [", outfile); if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx)) fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx)); if (REG_EXPR (in_rtx)) print_mem_expr (outfile, REG_EXPR (in_rtx)); if (REG_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, REG_OFFSET (in_rtx)); fputs (" ]", outfile); } #endif if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i) && XINT (in_rtx, i) >= 0 && (name = get_insn_name (XINT (in_rtx, i))) != NULL) fprintf (outfile, " {%s}", name); sawclose = 0; } break; /* Print NOTE_INSN names rather than integer codes. */ case 'n': fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i))); sawclose = 0; break; case 'u': if (XEXP (in_rtx, i) != NULL) { rtx sub = XEXP (in_rtx, i); enum rtx_code subc = GET_CODE (sub); if (GET_CODE (in_rtx) == LABEL_REF) { if (subc == NOTE && NOTE_KIND (sub) == NOTE_INSN_DELETED_LABEL) { if (flag_dump_unnumbered) fprintf (outfile, " [# deleted]"); else fprintf (outfile, " [%d deleted]", INSN_UID (sub)); sawclose = 0; break; } if (subc != CODE_LABEL) goto do_e; } if (flag_dump_unnumbered || (flag_dump_unnumbered_links && (i == 1 || i == 2) && (INSN_P (in_rtx) || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))) fputs (" #", outfile); else fprintf (outfile, " %d", INSN_UID (sub)); } else fputs (" 0", outfile); sawclose = 0; break; case 't': #ifndef GENERATOR_FILE if (i == 0 && GET_CODE (in_rtx) == DEBUG_IMPLICIT_PTR) print_mem_expr (outfile, DEBUG_IMPLICIT_PTR_DECL (in_rtx)); else if (i == 0 && GET_CODE (in_rtx) == DEBUG_PARAMETER_REF) print_mem_expr (outfile, DEBUG_PARAMETER_REF_DECL (in_rtx)); else dump_addr (outfile, " ", XTREE (in_rtx, i)); #endif break; case '*': fputs (" Unknown", outfile); sawclose = 0; break; case 'B': #ifndef GENERATOR_FILE if (XBBDEF (in_rtx, i)) fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index); #endif break; default: gcc_unreachable (); } switch (GET_CODE (in_rtx)) { #ifndef GENERATOR_FILE case MEM: if (__builtin_expect (final_insns_dump_p, false)) fprintf (outfile, " ["); else fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) MEM_ALIAS_SET (in_rtx)); if (MEM_EXPR (in_rtx)) print_mem_expr (outfile, MEM_EXPR (in_rtx)); if (MEM_OFFSET_KNOWN_P (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, MEM_OFFSET (in_rtx)); if (MEM_SIZE_KNOWN_P (in_rtx)) fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, MEM_SIZE (in_rtx)); if (MEM_ALIGN (in_rtx) != 1) fprintf (outfile, " A%u", MEM_ALIGN (in_rtx)); if (!ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (in_rtx))) fprintf (outfile, " AS%u", MEM_ADDR_SPACE (in_rtx)); fputc (']', outfile); break; case CONST_DOUBLE: if (FLOAT_MODE_P (GET_MODE (in_rtx))) { char s[60]; real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " %s", s); real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " [%s]", s); } break; #endif case CODE_LABEL: fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx)); switch (LABEL_KIND (in_rtx)) { case LABEL_NORMAL: break; case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break; case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break; case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break; default: gcc_unreachable (); } break; default: break; } fputc (')', outfile); sawclose = 1; }
static rtx alter_predicate_for_insn (rtx pattern, int alt, int max_op, int lineno) { const char *fmt; enum rtx_code code; int i, j, len; code = GET_CODE (pattern); switch (code) { case MATCH_OPERAND: { const char *c = XSTR (pattern, 2); if (n_alternatives (c) != 1) { message_with_line (lineno, "too many alternatives for operand %d", XINT (pattern, 0)); errors = 1; return NULL; } /* Replicate C as needed to fill out ALT alternatives. */ if (c && *c && alt > 1) { size_t c_len = strlen (c); size_t len = alt * (c_len + 1); char *new_c = xmalloc (len); memcpy (new_c, c, c_len); for (i = 1; i < alt; ++i) { new_c[i * (c_len + 1) - 1] = ','; memcpy (&new_c[i * (c_len + 1)], c, c_len); } new_c[len - 1] = '\0'; XSTR (pattern, 2) = new_c; } } /* Fall through. */ case MATCH_OPERATOR: case MATCH_SCRATCH: case MATCH_PARALLEL: case MATCH_INSN: XINT (pattern, 0) += max_op; break; default: break; } fmt = GET_RTX_FORMAT (code); len = GET_RTX_LENGTH (code); for (i = 0; i < len; i++) { rtx r; switch (fmt[i]) { case 'e': case 'u': r = alter_predicate_for_insn (XEXP (pattern, i), alt, max_op, lineno); if (r == NULL) return r; break; case 'E': for (j = XVECLEN (pattern, i) - 1; j >= 0; --j) { r = alter_predicate_for_insn (XVECEXP (pattern, i, j), alt, max_op, lineno); if (r == NULL) return r; } break; case 'i': case 'w': case '0': case 's': break; default: abort (); } } return pattern; }
static bool find_mem (rtx *address_of_x) { rtx x = *address_of_x; enum rtx_code code = GET_CODE (x); const char *const fmt = GET_RTX_FORMAT (code); int i; if (code == MEM && REG_P (XEXP (x, 0))) { /* Match with *reg0. */ mem_insn.mem_loc = address_of_x; mem_insn.reg0 = XEXP (x, 0); mem_insn.reg1_is_const = true; mem_insn.reg1_val = 0; mem_insn.reg1 = GEN_INT (0); if (find_inc (true)) return true; } if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS && REG_P (XEXP (XEXP (x, 0), 0))) { rtx reg1 = XEXP (XEXP (x, 0), 1); mem_insn.mem_loc = address_of_x; mem_insn.reg0 = XEXP (XEXP (x, 0), 0); mem_insn.reg1 = reg1; if (CONST_INT_P (reg1)) { mem_insn.reg1_is_const = true; /* Match with *(reg0 + c) where c is a const. */ mem_insn.reg1_val = INTVAL (reg1); if (find_inc (true)) return true; } else if (REG_P (reg1)) { /* Match with *(reg0 + reg1). */ mem_insn.reg1_is_const = false; if (find_inc (true)) return true; } } if (code == SIGN_EXTRACT || code == ZERO_EXTRACT) { /* If REG occurs inside a MEM used in a bit-field reference, that is unacceptable. */ return false; } /* Time for some deep diving. */ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e') { if (find_mem (&XEXP (x, i))) return true; } else if (fmt[i] == 'E') { int j; for (j = XVECLEN (x, i) - 1; j >= 0; j--) if (find_mem (&XVECEXP (x, i, j))) return true; } } return false; }
static int find_address (rtx *address_of_x) { rtx x = *address_of_x; enum rtx_code code = GET_CODE (x); const char *const fmt = GET_RTX_FORMAT (code); int i; int value = 0; int tem; if (code == MEM && rtx_equal_p (XEXP (x, 0), inc_insn.reg_res)) { /* Match with *reg0. */ mem_insn.mem_loc = address_of_x; mem_insn.reg0 = inc_insn.reg_res; mem_insn.reg1_is_const = true; mem_insn.reg1_val = 0; mem_insn.reg1 = GEN_INT (0); return -1; } if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS && rtx_equal_p (XEXP (XEXP (x, 0), 0), inc_insn.reg_res)) { rtx b = XEXP (XEXP (x, 0), 1); mem_insn.mem_loc = address_of_x; mem_insn.reg0 = inc_insn.reg_res; mem_insn.reg1 = b; mem_insn.reg1_is_const = inc_insn.reg1_is_const; if (CONST_INT_P (b)) { /* Match with *(reg0 + reg1) where reg1 is a const. */ HOST_WIDE_INT val = INTVAL (b); if (inc_insn.reg1_is_const && (inc_insn.reg1_val == val || inc_insn.reg1_val == -val)) { mem_insn.reg1_val = val; return -1; } } else if (!inc_insn.reg1_is_const && rtx_equal_p (inc_insn.reg1, b)) /* Match with *(reg0 + reg1). */ return -1; } if (code == SIGN_EXTRACT || code == ZERO_EXTRACT) { /* If REG occurs inside a MEM used in a bit-field reference, that is unacceptable. */ if (find_address (&XEXP (x, 0))) return 1; } if (x == inc_insn.reg_res) return 1; /* Time for some deep diving. */ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e') { tem = find_address (&XEXP (x, i)); /* If this is the first use, let it go so the rest of the insn can be checked. */ if (value == 0) value = tem; else if (tem != 0) /* More than one match was found. */ return 1; } else if (fmt[i] == 'E') { int j; for (j = XVECLEN (x, i) - 1; j >= 0; j--) { tem = find_address (&XVECEXP (x, i, j)); /* If this is the first use, let it go so the rest of the insn can be checked. */ if (value == 0) value = tem; else if (tem != 0) /* More than one match was found. */ return 1; } } } return value; }
static void print_rtx (rtx in_rtx) { int i = 0; int j; const char *format_ptr; int is_insn; if (sawclose) { if (flag_simple) fputc (' ', outfile); else fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } if (in_rtx == 0) { fputs ("(nil)", outfile); sawclose = 1; return; } else if (GET_CODE (in_rtx) > NUM_RTX_CODE) { fprintf (outfile, "(??? bad code %d\n)", GET_CODE (in_rtx)); sawclose = 1; return; } is_insn = INSN_P (in_rtx); /* When printing in VCG format we write INSNs, NOTE, LABEL, and BARRIER in separate nodes and therefore have to handle them special here. */ if (dump_for_graph && (is_insn || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx))) { i = 3; indent = 0; } else { /* Print name of expression code. */ if (flag_simple && GET_CODE (in_rtx) == CONST_INT) fputc ('(', outfile); else fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx))); if (! flag_simple) { if (RTX_FLAG (in_rtx, in_struct)) fputs ("/s", outfile); if (RTX_FLAG (in_rtx, volatil)) fputs ("/v", outfile); if (RTX_FLAG (in_rtx, unchanging)) fputs ("/u", outfile); if (RTX_FLAG (in_rtx, frame_related)) fputs ("/f", outfile); if (RTX_FLAG (in_rtx, jump)) fputs ("/j", outfile); if (RTX_FLAG (in_rtx, call)) fputs ("/c", outfile); if (RTX_FLAG (in_rtx, return_val)) fputs ("/i", outfile); /* Print REG_NOTE names for EXPR_LIST and INSN_LIST. */ if (GET_CODE (in_rtx) == EXPR_LIST || GET_CODE (in_rtx) == INSN_LIST) fprintf (outfile, ":%s", GET_REG_NOTE_NAME (GET_MODE (in_rtx))); /* For other rtl, print the mode if it's not VOID. */ else if (GET_MODE (in_rtx) != VOIDmode) fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx))); } } #ifndef GENERATOR_FILE if (GET_CODE (in_rtx) == CONST_DOUBLE && FLOAT_MODE_P (GET_MODE (in_rtx))) i = 5; #endif /* Get the format string and skip the first elements if we have handled them already. */ format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i; for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++) switch (*format_ptr++) { const char *str; case 'T': str = XTMPL (in_rtx, i); goto string; case 'S': case 's': str = XSTR (in_rtx, i); string: if (str == 0) fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile); else { if (dump_for_graph) fprintf (outfile, " (\\\"%s\\\")", str); else fprintf (outfile, " (\"%s\")", str); } sawclose = 1; break; /* 0 indicates a field for internal use that should not be printed. An exception is the third field of a NOTE, where it indicates that the field has several different valid contents. */ case '0': if (i == 1 && REG_P (in_rtx)) { if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx)) fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx)); } #ifndef GENERATOR_FILE else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF) { int flags = SYMBOL_REF_FLAGS (in_rtx); if (flags) fprintf (outfile, " [flags 0x%x]", flags); } else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF) { tree decl = SYMBOL_REF_DECL (in_rtx); if (decl) print_node_brief (outfile, "", decl, 0); } #endif else if (i == 4 && NOTE_P (in_rtx)) { switch (NOTE_LINE_NUMBER (in_rtx)) { case NOTE_INSN_EH_REGION_BEG: case NOTE_INSN_EH_REGION_END: if (flag_dump_unnumbered) fprintf (outfile, " #"); else fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx)); sawclose = 1; break; case NOTE_INSN_BLOCK_BEG: case NOTE_INSN_BLOCK_END: #ifndef GENERATOR_FILE dump_addr (outfile, " ", NOTE_BLOCK (in_rtx)); #endif sawclose = 1; break; case NOTE_INSN_BASIC_BLOCK: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_EXPECTED_VALUE: indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (NOTE_EXPECTED_VALUE (in_rtx)); indent -= 2; break; case NOTE_INSN_DELETED_LABEL: { const char *label = NOTE_DELETED_LABEL_NAME (in_rtx); if (label) fprintf (outfile, " (\"%s\")", label); else fprintf (outfile, " \"\""); } break; case NOTE_INSN_SWITCH_TEXT_SECTIONS: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_VAR_LOCATION: #ifndef GENERATOR_FILE fprintf (outfile, " ("); print_mem_expr (outfile, NOTE_VAR_LOCATION_DECL (in_rtx)); fprintf (outfile, " "); print_rtx (NOTE_VAR_LOCATION_LOC (in_rtx)); fprintf (outfile, ")"); #endif break; default: { const char * const str = X0STR (in_rtx, i); if (NOTE_LINE_NUMBER (in_rtx) < 0) ; else if (str == 0) fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile); else { if (dump_for_graph) fprintf (outfile, " (\\\"%s\\\")", str); else fprintf (outfile, " (\"%s\")", str); } break; } } } break; case 'e': do_e: indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (XEXP (in_rtx, i)); indent -= 2; break; case 'E': case 'V': indent += 2; if (sawclose) { fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } fputs (" [", outfile); if (NULL != XVEC (in_rtx, i)) { indent += 2; if (XVECLEN (in_rtx, i)) sawclose = 1; for (j = 0; j < XVECLEN (in_rtx, i); j++) print_rtx (XVECEXP (in_rtx, i, j)); indent -= 2; } if (sawclose) fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); fputs ("]", outfile); sawclose = 1; indent -= 2; break; case 'w': if (! flag_simple) fprintf (outfile, " "); fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i)); if (! flag_simple) fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]", XWINT (in_rtx, i)); break; case 'i': if (i == 4 && INSN_P (in_rtx)) { #ifndef GENERATOR_FILE /* Pretty-print insn locators. Ignore scoping as it is mostly redundant with line number information and do not print anything when there is no location information available. */ if (INSN_LOCATOR (in_rtx) && insn_file (in_rtx)) fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx)); #endif } else if (i == 6 && NOTE_P (in_rtx)) { /* This field is only used for NOTE_INSN_DELETED_LABEL, and other times often contains garbage from INSN->NOTE death. */ if (NOTE_LINE_NUMBER (in_rtx) == NOTE_INSN_DELETED_LABEL) fprintf (outfile, " %d", XINT (in_rtx, i)); } else { int value = XINT (in_rtx, i); const char *name; #ifndef GENERATOR_FILE if (REG_P (in_rtx) && value < FIRST_PSEUDO_REGISTER) fprintf (outfile, " %d %s", REGNO (in_rtx), reg_names[REGNO (in_rtx)]); else if (REG_P (in_rtx) && value <= LAST_VIRTUAL_REGISTER) { if (value == VIRTUAL_INCOMING_ARGS_REGNUM) fprintf (outfile, " %d virtual-incoming-args", value); else if (value == VIRTUAL_STACK_VARS_REGNUM) fprintf (outfile, " %d virtual-stack-vars", value); else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM) fprintf (outfile, " %d virtual-stack-dynamic", value); else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM) fprintf (outfile, " %d virtual-outgoing-args", value); else if (value == VIRTUAL_CFA_REGNUM) fprintf (outfile, " %d virtual-cfa", value); else fprintf (outfile, " %d virtual-reg-%d", value, value-FIRST_VIRTUAL_REGISTER); } else #endif if (flag_dump_unnumbered && (is_insn || NOTE_P (in_rtx))) fputc ('#', outfile); else fprintf (outfile, " %d", value); #ifndef GENERATOR_FILE if (REG_P (in_rtx) && REG_ATTRS (in_rtx)) { fputs (" [", outfile); if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx)) fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx)); if (REG_EXPR (in_rtx)) print_mem_expr (outfile, REG_EXPR (in_rtx)); if (REG_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, REG_OFFSET (in_rtx)); fputs (" ]", outfile); } #endif if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i) && XINT (in_rtx, i) >= 0 && (name = get_insn_name (XINT (in_rtx, i))) != NULL) fprintf (outfile, " {%s}", name); sawclose = 0; } break; /* Print NOTE_INSN names rather than integer codes. */ case 'n': if (XINT (in_rtx, i) >= (int) NOTE_INSN_BIAS && XINT (in_rtx, i) < (int) NOTE_INSN_MAX) fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i))); else fprintf (outfile, " %d", XINT (in_rtx, i)); sawclose = 0; break; case 'u': if (XEXP (in_rtx, i) != NULL) { rtx sub = XEXP (in_rtx, i); enum rtx_code subc = GET_CODE (sub); if (GET_CODE (in_rtx) == LABEL_REF) { if (subc == NOTE && NOTE_LINE_NUMBER (sub) == NOTE_INSN_DELETED_LABEL) { if (flag_dump_unnumbered) fprintf (outfile, " [# deleted]"); else fprintf (outfile, " [%d deleted]", INSN_UID (sub)); sawclose = 0; break; } if (subc != CODE_LABEL) goto do_e; } if (flag_dump_unnumbered) fputs (" #", outfile); else fprintf (outfile, " %d", INSN_UID (sub)); } else fputs (" 0", outfile); sawclose = 0; break; case 'b': #ifndef GENERATOR_FILE if (XBITMAP (in_rtx, i) == NULL) fputs (" {null}", outfile); else bitmap_print (outfile, XBITMAP (in_rtx, i), " {", "}"); #endif sawclose = 0; break; case 't': #ifndef GENERATOR_FILE dump_addr (outfile, " ", XTREE (in_rtx, i)); #endif break; case '*': fputs (" Unknown", outfile); sawclose = 0; break; case 'B': #ifndef GENERATOR_FILE if (XBBDEF (in_rtx, i)) fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index); #endif break; default: gcc_unreachable (); } switch (GET_CODE (in_rtx)) { #ifndef GENERATOR_FILE case MEM: fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC, MEM_ALIAS_SET (in_rtx)); if (MEM_EXPR (in_rtx)) print_mem_expr (outfile, MEM_EXPR (in_rtx)); if (MEM_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, INTVAL (MEM_OFFSET (in_rtx))); if (MEM_SIZE (in_rtx)) fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, INTVAL (MEM_SIZE (in_rtx))); if (MEM_ALIGN (in_rtx) != 1) fprintf (outfile, " A%u", MEM_ALIGN (in_rtx)); fputc (']', outfile); break; case CONST_DOUBLE: if (FLOAT_MODE_P (GET_MODE (in_rtx))) { char s[60]; real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " %s", s); real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " [%s]", s); } break; #endif case CODE_LABEL: fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx)); switch (LABEL_KIND (in_rtx)) { case LABEL_NORMAL: break; case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break; case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break; case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break; default: gcc_unreachable (); } break; default: break; } if (dump_for_graph && (is_insn || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx))) sawclose = 0; else { fputc (')', outfile); sawclose = 1; } }
rtx copy_rtx (rtx orig) { rtx copy; int i, j; RTX_CODE code; const char *format_ptr; code = GET_CODE (orig); switch (code) { case REG: case DEBUG_EXPR: case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case RETURN: case SIMPLE_RETURN: case SCRATCH: /* SCRATCH must be shared because they represent distinct values. */ return orig; case CLOBBER: if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER) return orig; break; case CONST: if (shared_const_p (orig)) return orig; break; /* A MEM with a constant address is not sharable. The problem is that the constant address may need to be reloaded. If the mem is shared, then reloading one copy of this mem will cause all copies to appear to have been reloaded. */ default: break; } /* Copy the various flags, fields, and other information. We assume that all fields need copying, and then clear the fields that should not be copied. That is the sensible default behavior, and forces us to explicitly document why we are *not* copying a flag. */ copy = shallow_copy_rtx (orig); /* We do not copy the USED flag, which is used as a mark bit during walks over the RTL. */ RTX_FLAG (copy, used) = 0; format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) switch (*format_ptr++) { case 'e': if (XEXP (orig, i) != NULL) XEXP (copy, i) = copy_rtx (XEXP (orig, i)); break; case 'E': case 'V': if (XVEC (orig, i) != NULL) { XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); for (j = 0; j < XVECLEN (copy, i); j++) XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j)); } break; case 't': case 'w': case 'i': case 's': case 'S': case 'T': case 'u': case 'B': case '0': /* These are left unchanged. */ break; default: gcc_unreachable (); } return copy; }
XSTR (pattern, 2) = new_c; } } /* Fall through. */ case MATCH_OPERATOR: case MATCH_SCRATCH: case MATCH_PARALLEL: XINT (pattern, 0) += max_op; break; default: break; } fmt = GET_RTX_FORMAT (code); len = GET_RTX_LENGTH (code); for (i = 0; i < len; i++) { rtx r; switch (fmt[i]) { case 'e': case 'u': r = alter_predicate_for_insn (XEXP (pattern, i), alt, max_op, lineno); if (r == NULL) return r; break; case 'E':
hashval_t iterative_hash_rtx (const_rtx x, hashval_t hash) { enum rtx_code code; enum machine_mode mode; int i, j; const char *fmt; if (x == NULL_RTX) return hash; code = GET_CODE (x); hash = iterative_hash_object (code, hash); mode = GET_MODE (x); hash = iterative_hash_object (mode, hash); switch (code) { case REG: i = REGNO (x); return iterative_hash_object (i, hash); case CONST_INT: return iterative_hash_object (INTVAL (x), hash); case SYMBOL_REF: if (XSTR (x, 0)) return iterative_hash (XSTR (x, 0), strlen (XSTR (x, 0)) + 1, hash); return hash; case LABEL_REF: case DEBUG_EXPR: case VALUE: case SCRATCH: case CONST_DOUBLE: case CONST_FIXED: case DEBUG_IMPLICIT_PTR: case DEBUG_PARAMETER_REF: return hash; default: break; } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) switch (fmt[i]) { case 'w': hash = iterative_hash_object (XWINT (x, i), hash); break; case 'n': case 'i': hash = iterative_hash_object (XINT (x, i), hash); break; case 'V': case 'E': j = XVECLEN (x, i); hash = iterative_hash_object (j, hash); for (j = 0; j < XVECLEN (x, i); j++) hash = iterative_hash_rtx (XVECEXP (x, i, j), hash); break; case 'e': hash = iterative_hash_rtx (XEXP (x, i), hash); break; case 'S': case 's': if (XSTR (x, i)) hash = iterative_hash (XSTR (x, 0), strlen (XSTR (x, 0)) + 1, hash); break; default: break; } return hash; }
rtx copy_rtx (rtx orig) { rtx copy; int i, j; RTX_CODE code; const char *format_ptr; code = GET_CODE (orig); switch (code) { case REG: case DEBUG_EXPR: case VALUE: CASE_CONST_ANY: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case RETURN: case SIMPLE_RETURN: case SCRATCH: /* SCRATCH must be shared because they represent distinct values. */ return orig; case CLOBBER: /* Share clobbers of hard registers (like cc0), but do not share pseudo reg clobbers or clobbers of hard registers that originated as pseudos. This is needed to allow safe register renaming. */ if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0))) return orig; break; case CLOBBER_HIGH: gcc_assert (REG_P (XEXP (orig, 0))); return orig; case CONST: if (shared_const_p (orig)) return orig; break; /* A MEM with a constant address is not sharable. The problem is that the constant address may need to be reloaded. If the mem is shared, then reloading one copy of this mem will cause all copies to appear to have been reloaded. */ default: break; } /* Copy the various flags, fields, and other information. We assume that all fields need copying, and then clear the fields that should not be copied. That is the sensible default behavior, and forces us to explicitly document why we are *not* copying a flag. */ copy = shallow_copy_rtx (orig); format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) switch (*format_ptr++) { case 'e': if (XEXP (orig, i) != NULL) XEXP (copy, i) = copy_rtx (XEXP (orig, i)); break; case 'E': case 'V': if (XVEC (orig, i) != NULL) { XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); for (j = 0; j < XVECLEN (copy, i); j++) XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j)); } break; case 't': case 'w': case 'i': case 'p': case 's': case 'S': case 'T': case 'u': case 'B': case '0': /* These are left unchanged. */ break; default: gcc_unreachable (); } return copy; }
static bool replace_oldest_value_addr (rtx *loc, enum reg_class cl, enum machine_mode mode, addr_space_t as, rtx insn, struct value_data *vd) { rtx x = *loc; RTX_CODE code = GET_CODE (x); const char *fmt; int i, j; bool changed = false; switch (code) { case PLUS: if (DEBUG_INSN_P (insn)) break; { rtx orig_op0 = XEXP (x, 0); rtx orig_op1 = XEXP (x, 1); RTX_CODE code0 = GET_CODE (orig_op0); RTX_CODE code1 = GET_CODE (orig_op1); rtx op0 = orig_op0; rtx op1 = orig_op1; rtx *locI = NULL; rtx *locB = NULL; enum rtx_code index_code = SCRATCH; if (GET_CODE (op0) == SUBREG) { op0 = SUBREG_REG (op0); code0 = GET_CODE (op0); } if (GET_CODE (op1) == SUBREG) { op1 = SUBREG_REG (op1); code1 = GET_CODE (op1); } if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE || code0 == ZERO_EXTEND || code1 == MEM) { locI = &XEXP (x, 0); locB = &XEXP (x, 1); index_code = GET_CODE (*locI); } else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE || code1 == ZERO_EXTEND || code0 == MEM) { locI = &XEXP (x, 1); locB = &XEXP (x, 0); index_code = GET_CODE (*locI); } else if (code0 == CONST_INT || code0 == CONST || code0 == SYMBOL_REF || code0 == LABEL_REF) { locB = &XEXP (x, 1); index_code = GET_CODE (XEXP (x, 0)); } else if (code1 == CONST_INT || code1 == CONST || code1 == SYMBOL_REF || code1 == LABEL_REF) { locB = &XEXP (x, 0); index_code = GET_CODE (XEXP (x, 1)); } else if (code0 == REG && code1 == REG) { int index_op; unsigned regno0 = REGNO (op0), regno1 = REGNO (op1); if (REGNO_OK_FOR_INDEX_P (regno1) && regno_ok_for_base_p (regno0, mode, as, PLUS, REG)) index_op = 1; else if (REGNO_OK_FOR_INDEX_P (regno0) && regno_ok_for_base_p (regno1, mode, as, PLUS, REG)) index_op = 0; else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG) || REGNO_OK_FOR_INDEX_P (regno1)) index_op = 1; else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG)) index_op = 0; else index_op = 1; locI = &XEXP (x, index_op); locB = &XEXP (x, !index_op); index_code = GET_CODE (*locI); } else if (code0 == REG) { locI = &XEXP (x, 0); locB = &XEXP (x, 1); index_code = GET_CODE (*locI); } else if (code1 == REG) { locI = &XEXP (x, 1); locB = &XEXP (x, 0); index_code = GET_CODE (*locI); } if (locI) changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS, mode, as, insn, vd); if (locB) changed |= replace_oldest_value_addr (locB, base_reg_class (mode, as, PLUS, index_code), mode, as, insn, vd); return changed; } case POST_INC: case POST_DEC: case POST_MODIFY: case PRE_INC: case PRE_DEC: case PRE_MODIFY: return false; case MEM: return replace_oldest_value_mem (x, insn, vd); case REG: return replace_oldest_value_reg (loc, cl, insn, vd); default: break; } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e') changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as, insn, vd); else if (fmt[i] == 'E') for (j = XVECLEN (x, i) - 1; j >= 0; j--) changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl, mode, as, insn, vd); } return changed; }
static bool vax_rtx_costs (rtx x, int code, int outer_code, int *total) { enum machine_mode mode = GET_MODE (x); int i = 0; /* may be modified in switch */ const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */ switch (code) { /* On a VAX, constants from 0..63 are cheap because they can use the 1 byte literal constant format. Compare to -1 should be made cheap so that decrement-and-branch insns can be formed more easily (if the value -1 is copied to a register some decrement-and-branch patterns will not match). */ case CONST_INT: if (INTVAL (x) == 0) return true; if (outer_code == AND) { *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2; return true; } if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077 || (outer_code == COMPARE && INTVAL (x) == -1) || ((outer_code == PLUS || outer_code == MINUS) && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077)) { *total = 1; return true; } /* FALLTHRU */ case CONST: case LABEL_REF: case SYMBOL_REF: *total = 3; return true; case CONST_DOUBLE: if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) *total = vax_float_literal (x) ? 5 : 8; else *total = ((CONST_DOUBLE_HIGH (x) == 0 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64) || (outer_code == PLUS && CONST_DOUBLE_HIGH (x) == -1 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)) ? 2 : 5; return true; case POST_INC: *total = 2; return true; /* Implies register operand. */ case PRE_DEC: *total = 3; return true; /* Implies register operand. */ case MULT: switch (mode) { case DFmode: *total = 16; /* 4 on VAX 9000 */ break; case SFmode: *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */ break; case DImode: *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */ break; case SImode: case HImode: case QImode: *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */ break; default: *total = MAX_COST; /* Mode is not supported. */ return true; } break; case UDIV: if (mode != SImode) { *total = MAX_COST; /* Mode is not supported. */ return true; } *total = 17; break; case DIV: if (mode == DImode) *total = 30; /* Highly variable. */ else if (mode == DFmode) /* divide takes 28 cycles if the result is not zero, 13 otherwise */ *total = 24; else *total = 11; /* 25 on VAX 2 */ break; case MOD: *total = 23; break; case UMOD: if (mode != SImode) { *total = MAX_COST; /* Mode is not supported. */ return true; } *total = 29; break; case FLOAT: *total = (6 /* 4 on VAX 9000 */ + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode)); break; case FIX: *total = 7; /* 17 on VAX 2 */ break; case ASHIFT: case LSHIFTRT: case ASHIFTRT: if (mode == DImode) *total = 12; else *total = 10; /* 6 on VAX 9000 */ break; case ROTATE: case ROTATERT: *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */ if (GET_CODE (XEXP (x, 1)) == CONST_INT) fmt = "e"; /* all constant rotate counts are short */ break; case PLUS: case MINUS: *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */ /* Small integer operands can use subl2 and addl2. */ if ((GET_CODE (XEXP (x, 1)) == CONST_INT) && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127) fmt = "e"; break; case IOR: case XOR: *total = 3; break; case AND: /* AND is special because the first operand is complemented. */ *total = 3; if (GET_CODE (XEXP (x, 0)) == CONST_INT) { if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63) *total = 4; fmt = "e"; i = 1; } break; case NEG: if (mode == DFmode) *total = 9; else if (mode == SFmode) *total = 6; else if (mode == DImode) *total = 4; else *total = 2; break; case NOT: *total = 2; break; case ZERO_EXTRACT: case SIGN_EXTRACT: *total = 15; break; case MEM: if (mode == DImode || mode == DFmode) *total = 5; /* 7 on VAX 2 */ else *total = 3; /* 4 on VAX 2 */ x = XEXP (x, 0); if (GET_CODE (x) != REG && GET_CODE (x) != POST_INC) *total += vax_address_cost_1 (x); return true; case FLOAT_EXTEND: case FLOAT_TRUNCATE: case TRUNCATE: *total = 3; /* FIXME: Costs need to be checked */ break; default: return false; } /* Now look inside the expression. Operands which are not registers or short constants add to the cost. FMT and I may have been adjusted in the switch above for instructions which require special handling. */ while (*fmt++ == 'e') { rtx op = XEXP (x, i); i += 1; code = GET_CODE (op); /* A NOT is likely to be found as the first operand of an AND (in which case the relevant cost is of the operand inside the not) and not likely to be found anywhere else. */ if (code == NOT) op = XEXP (op, 0), code = GET_CODE (op); switch (code) { case CONST_INT: if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63 && GET_MODE (x) != QImode) *total += 1; /* 2 on VAX 2 */ break; case CONST: case LABEL_REF: case SYMBOL_REF: *total += 1; /* 2 on VAX 2 */ break; case CONST_DOUBLE: if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT) { /* Registers are faster than floating point constants -- even those constants which can be encoded in a single byte. */ if (vax_float_literal (op)) *total += 1; else *total += (GET_MODE (x) == DFmode) ? 3 : 2; } else { if (CONST_DOUBLE_HIGH (op) != 0 || (unsigned)CONST_DOUBLE_LOW (op) > 63) *total += 2; } break; case MEM: *total += 1; /* 2 on VAX 2 */ if (GET_CODE (XEXP (op, 0)) != REG) *total += vax_address_cost_1 (XEXP (op, 0)); break; case REG: case SUBREG: break; default: *total += 1; break; } } return true; }
void add_rtx (const_rtx x, hash &hstate) { enum rtx_code code; machine_mode mode; int i, j; const char *fmt; if (x == NULL_RTX) return; code = GET_CODE (x); hstate.add_object (code); mode = GET_MODE (x); hstate.add_object (mode); switch (code) { case REG: hstate.add_int (REGNO (x)); return; case CONST_INT: hstate.add_object (INTVAL (x)); return; case CONST_WIDE_INT: for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) hstate.add_object (CONST_WIDE_INT_ELT (x, i)); return; case CONST_POLY_INT: for (i = 0; i < NUM_POLY_INT_COEFFS; ++i) hstate.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); break; case SYMBOL_REF: if (XSTR (x, 0)) hstate.add (XSTR (x, 0), strlen (XSTR (x, 0)) + 1); return; case LABEL_REF: case DEBUG_EXPR: case VALUE: case SCRATCH: case CONST_DOUBLE: case CONST_FIXED: case DEBUG_IMPLICIT_PTR: case DEBUG_PARAMETER_REF: return; default: break; } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) switch (fmt[i]) { case 'w': hstate.add_hwi (XWINT (x, i)); break; case 'n': case 'i': hstate.add_int (XINT (x, i)); break; case 'p': hstate.add_poly_int (SUBREG_BYTE (x)); break; case 'V': case 'E': j = XVECLEN (x, i); hstate.add_int (j); for (j = 0; j < XVECLEN (x, i); j++) inchash::add_rtx (XVECEXP (x, i, j), hstate); break; case 'e': inchash::add_rtx (XEXP (x, i), hstate); break; case 'S': case 's': if (XSTR (x, i)) hstate.add (XSTR (x, 0), strlen (XSTR (x, 0)) + 1); break; default: break; } }
void merge_memattrs (rtx x, rtx y) { int i; int j; enum rtx_code code; const char *fmt; if (x == y) return; if (x == 0 || y == 0) return; code = GET_CODE (x); if (code != GET_CODE (y)) return; if (GET_MODE (x) != GET_MODE (y)) return; if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y)) { if (! MEM_ATTRS (x)) MEM_ATTRS (y) = 0; else if (! MEM_ATTRS (y)) MEM_ATTRS (x) = 0; else { rtx mem_size; if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y)) { set_mem_alias_set (x, 0); set_mem_alias_set (y, 0); } if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y))) { set_mem_expr (x, 0); set_mem_expr (y, 0); set_mem_offset (x, 0); set_mem_offset (y, 0); } else if (MEM_OFFSET (x) != MEM_OFFSET (y)) { set_mem_offset (x, 0); set_mem_offset (y, 0); } if (!MEM_SIZE (x)) mem_size = NULL_RTX; else if (!MEM_SIZE (y)) mem_size = NULL_RTX; else mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)), INTVAL (MEM_SIZE (y)))); set_mem_size (x, mem_size); set_mem_size (y, mem_size); set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y))); set_mem_align (y, MEM_ALIGN (x)); } } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { switch (fmt[i]) { case 'E': /* Two vectors must have the same length. */ if (XVECLEN (x, i) != XVECLEN (y, i)) return; for (j = 0; j < XVECLEN (x, i); j++) merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j)); break; case 'e': merge_memattrs (XEXP (x, i), XEXP (y, i)); } } return; }