static void remove_constraints (rtx part) { int i, j; const char *format_ptr; if (part == 0) return; if (GET_CODE (part) == MATCH_OPERAND) XSTR (part, 2) = ""; else if (GET_CODE (part) == MATCH_SCRATCH) XSTR (part, 1) = ""; format_ptr = GET_RTX_FORMAT (GET_CODE (part)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (part)); i++) switch (*format_ptr++) { case 'e': case 'u': remove_constraints (XEXP (part, i)); break; case 'E': if (XVEC (part, i) != NULL) for (j = 0; j < XVECLEN (part, i); j++) remove_constraints (XVECEXP (part, i, j)); break; } }
static void collect_insn_data (rtx pattern, int *palt, int *pmax) { const char *fmt; enum rtx_code code; int i, j, len; code = GET_CODE (pattern); switch (code) { case MATCH_OPERAND: i = n_alternatives (XSTR (pattern, 2)); *palt = (i > *palt ? i : *palt); /* Fall through. */ case MATCH_OPERATOR: case MATCH_SCRATCH: case MATCH_PARALLEL: case MATCH_INSN: i = XINT (pattern, 0); if (i > *pmax) *pmax = i; break; default: break; } fmt = GET_RTX_FORMAT (code); len = GET_RTX_LENGTH (code); for (i = 0; i < len; i++) { switch (fmt[i]) { case 'e': case 'u': collect_insn_data (XEXP (pattern, i), palt, pmax); break; case 'V': if (XVEC (pattern, i) == NULL) break; /* Fall through. */ case 'E': for (j = XVECLEN (pattern, i) - 1; j >= 0; --j) collect_insn_data (XVECEXP (pattern, i, j), palt, pmax); break; case 'i': case 'w': case '0': case 's': case 'S': case 'T': break; default: abort (); } } }
rtx gen_rtx_fmt_E_stat (RTX_CODE code, enum machine_mode mode, rtvec arg0 MEM_STAT_DECL) { rtx rt; rt = rtx_alloc_stat (code PASS_MEM_STAT); PUT_MODE (rt, mode); XVEC (rt, 0) = arg0; return rt; }
rtx gen_rtx_fmt_E (RTX_CODE code, enum machine_mode mode, rtvec arg0) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XVEC (rt, 0) = arg0; return rt; }
rtx gen_rtx_fmt_sE (RTX_CODE code, enum machine_mode mode, const char *arg0, rtvec arg1) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XSTR (rt, 0) = arg0; XVEC (rt, 1) = arg1; return rt; }
rtx gen_rtx_fmt_iE (RTX_CODE code, enum machine_mode mode, int arg0, rtvec arg1) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XINT (rt, 0) = arg0; XVEC (rt, 1) = arg1; return rt; }
rtx gen_rtx_fmt_ssiEEsi (RTX_CODE code, enum machine_mode mode, const char *arg0, const char *arg1, int arg2, rtvec arg3, rtvec arg4, const char *arg5, int arg6) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XSTR (rt, 0) = arg0; XSTR (rt, 1) = arg1; XINT (rt, 2) = arg2; XVEC (rt, 3) = arg3; XVEC (rt, 4) = arg4; XSTR (rt, 5) = arg5; XINT (rt, 6) = arg6; return rt; }
rtx gen_rtx_fmt_isE (RTX_CODE code, enum machine_mode mode, int arg0, const char *arg1, rtvec arg2) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XINT (rt, 0) = arg0; XSTR (rt, 1) = arg1; XVEC (rt, 2) = arg2; return rt; }
static void gen_insn(rtx insn) { int i; /* Walk the insn pattern to gather the #define's status. */ clobbers_seen_this_insn = 0; dup_operands_seen_this_insn = 0; if (XVEC(insn, 1) != 0) for (i = 0; i < XVECLEN(insn, 1); i++) walk_insn_part(XVECEXP(insn, 1, i), 1, 0); if (clobbers_seen_this_insn > max_clobbers_per_insn) max_clobbers_per_insn = clobbers_seen_this_insn; if (dup_operands_seen_this_insn > max_dup_operands) max_dup_operands = dup_operands_seen_this_insn; }
/* Process MEMs in SET_DEST destinations. We must not process this together with REG SET_DESTs, but must do it separately, lest when we see [(set (reg:SI foo) (bar)) (set (mem:SI (reg:SI foo) (baz)))] struct_equiv_block_eq could get confused to assume that (reg:SI foo) is not live before this instruction. */ static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info) { enum rtx_code code = GET_CODE (x); int length; const char *format; int i; if (code != GET_CODE (y)) return false; if (code == MEM) return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info); /* Process subexpressions. */ length = GET_RTX_LENGTH (code); format = GET_RTX_FORMAT (code); for (i = 0; i < length; ++i) { switch (format[i]) { case 'V': case 'E': if (XVECLEN (x, i) != XVECLEN (y, i)) return false; if (XVEC (x, i) != 0) { int j; for (j = 0; j < XVECLEN (x, i); ++j) { if (! set_dest_addr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j), info)) return false; } } break; case 'e': if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info)) return false; break; default: break; } } return true; }
rtx gen_rtx_fmt_eEee0 (RTX_CODE code, enum machine_mode mode, rtx arg0, rtvec arg1, rtx arg2, rtx arg3) { rtx rt; rt = rtx_alloc (code); PUT_MODE (rt, mode); XEXP (rt, 0) = arg0; XVEC (rt, 1) = arg1; XEXP (rt, 2) = arg2; XEXP (rt, 3) = arg3; X0EXP (rt, 4) = NULL_RTX; return rt; }
static void gen_expand(rtx insn) { int i; /* Walk the insn pattern to gather the #define's status. */ /* Note that we don't bother recording the number of MATCH_DUPs that occur in a gen_expand, because only reload cares about that. */ if (XVEC(insn, 1) != 0) for (i = 0; i < XVECLEN(insn, 1); i++) { /* Compute the maximum SETs and CLOBBERS in any one of the sub-insns; don't sum across all of them. */ clobbers_seen_this_insn = 0; walk_insn_part(XVECEXP(insn, 1, i), 0, 0); if (clobbers_seen_this_insn > max_clobbers_per_insn) max_clobbers_per_insn = clobbers_seen_this_insn; } }
static void walk_insn_part (rtx part, int recog_p, int non_pc_set_src) { int i, j; RTX_CODE code; const char *format_ptr; if (part == 0) return; code = GET_CODE (part); switch (code) { case CLOBBER: clobbers_seen_this_insn++; break; case MATCH_OPERAND: if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); return; case MATCH_OP_DUP: case MATCH_PAR_DUP: ++dup_operands_seen_this_insn; /* FALLTHRU */ case MATCH_SCRATCH: case MATCH_PARALLEL: case MATCH_OPERATOR: if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); /* Now scan the rtl's in the vector inside the MATCH_OPERATOR or MATCH_PARALLEL. */ break; case LABEL_REF: if (GET_CODE (LABEL_REF_LABEL (part)) == MATCH_OPERAND || GET_CODE (LABEL_REF_LABEL (part)) == MATCH_DUP) break; return; case MATCH_DUP: ++dup_operands_seen_this_insn; if (XINT (part, 0) > max_recog_operands) max_recog_operands = XINT (part, 0); return; case CC0: if (recog_p) have_cc0_flag = 1; return; case LO_SUM: if (recog_p) have_lo_sum_flag = 1; return; case ROTATE: if (recog_p) have_rotate_flag = 1; return; case ROTATERT: if (recog_p) have_rotatert_flag = 1; return; case SET: walk_insn_part (SET_DEST (part), 0, recog_p); walk_insn_part (SET_SRC (part), recog_p, GET_CODE (SET_DEST (part)) != PC); return; case IF_THEN_ELSE: /* Only consider this machine as having a conditional move if the two arms of the IF_THEN_ELSE are both MATCH_OPERAND. Otherwise, we have some specific IF_THEN_ELSE construct (like the doz instruction on the RS/6000) that can't be used in the general context we want it for. */ if (recog_p && non_pc_set_src && GET_CODE (XEXP (part, 1)) == MATCH_OPERAND && GET_CODE (XEXP (part, 2)) == MATCH_OPERAND) have_cmove_flag = 1; break; case COND_EXEC: if (recog_p) have_cond_exec_flag = 1; break; case REG: case CONST_INT: case SYMBOL_REF: case PC: return; default: break; } format_ptr = GET_RTX_FORMAT (GET_CODE (part)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (part)); i++) switch (*format_ptr++) { case 'e': case 'u': walk_insn_part (XEXP (part, i), recog_p, non_pc_set_src); break; case 'E': if (XVEC (part, i) != NULL) for (j = 0; j < XVECLEN (part, i); j++) walk_insn_part (XVECEXP (part, i, j), recog_p, non_pc_set_src); break; } }
rtx copy_rtx (rtx orig) { rtx copy; int i, j; RTX_CODE code; const char *format_ptr; code = GET_CODE (orig); switch (code) { case REG: case DEBUG_EXPR: case VALUE: CASE_CONST_ANY: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case RETURN: case SIMPLE_RETURN: case SCRATCH: /* SCRATCH must be shared because they represent distinct values. */ return orig; case CLOBBER: /* Share clobbers of hard registers (like cc0), but do not share pseudo reg clobbers or clobbers of hard registers that originated as pseudos. This is needed to allow safe register renaming. */ if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0))) return orig; break; case CLOBBER_HIGH: gcc_assert (REG_P (XEXP (orig, 0))); return orig; case CONST: if (shared_const_p (orig)) return orig; break; /* A MEM with a constant address is not sharable. The problem is that the constant address may need to be reloaded. If the mem is shared, then reloading one copy of this mem will cause all copies to appear to have been reloaded. */ default: break; } /* Copy the various flags, fields, and other information. We assume that all fields need copying, and then clear the fields that should not be copied. That is the sensible default behavior, and forces us to explicitly document why we are *not* copying a flag. */ copy = shallow_copy_rtx (orig); format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) switch (*format_ptr++) { case 'e': if (XEXP (orig, i) != NULL) XEXP (copy, i) = copy_rtx (XEXP (orig, i)); break; case 'E': case 'V': if (XVEC (orig, i) != NULL) { XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); for (j = 0; j < XVECLEN (copy, i); j++) XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j)); } break; case 't': case 'w': case 'i': case 'p': case 's': case 'S': case 'T': case 'u': case 'B': case '0': /* These are left unchanged. */ break; default: gcc_unreachable (); } return copy; }
static void print_rtx (const_rtx in_rtx) { int i = 0; int j; const char *format_ptr; int is_insn; if (sawclose) { if (flag_simple) fputc (' ', outfile); else fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } if (in_rtx == 0) { fputs ("(nil)", outfile); sawclose = 1; return; } else if (GET_CODE (in_rtx) > NUM_RTX_CODE) { fprintf (outfile, "(??? bad code %d\n%s%*s)", GET_CODE (in_rtx), print_rtx_head, indent * 2, ""); sawclose = 1; return; } is_insn = INSN_P (in_rtx); /* Print name of expression code. */ if (flag_simple && CONST_INT_P (in_rtx)) fputc ('(', outfile); else fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx))); if (! flag_simple) { if (RTX_FLAG (in_rtx, in_struct)) fputs ("/s", outfile); if (RTX_FLAG (in_rtx, volatil)) fputs ("/v", outfile); if (RTX_FLAG (in_rtx, unchanging)) fputs ("/u", outfile); if (RTX_FLAG (in_rtx, frame_related)) fputs ("/f", outfile); if (RTX_FLAG (in_rtx, jump)) fputs ("/j", outfile); if (RTX_FLAG (in_rtx, call)) fputs ("/c", outfile); if (RTX_FLAG (in_rtx, return_val)) fputs ("/i", outfile); /* Print REG_NOTE names for EXPR_LIST and INSN_LIST. */ if ((GET_CODE (in_rtx) == EXPR_LIST || GET_CODE (in_rtx) == INSN_LIST || GET_CODE (in_rtx) == INT_LIST) && (int)GET_MODE (in_rtx) < REG_NOTE_MAX) fprintf (outfile, ":%s", GET_REG_NOTE_NAME (GET_MODE (in_rtx))); /* For other rtl, print the mode if it's not VOID. */ else if (GET_MODE (in_rtx) != VOIDmode) fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx))); #ifndef GENERATOR_FILE if (GET_CODE (in_rtx) == VAR_LOCATION) { if (TREE_CODE (PAT_VAR_LOCATION_DECL (in_rtx)) == STRING_CST) fputs (" <debug string placeholder>", outfile); else print_mem_expr (outfile, PAT_VAR_LOCATION_DECL (in_rtx)); fputc (' ', outfile); print_rtx (PAT_VAR_LOCATION_LOC (in_rtx)); if (PAT_VAR_LOCATION_STATUS (in_rtx) == VAR_INIT_STATUS_UNINITIALIZED) fprintf (outfile, " [uninit]"); sawclose = 1; i = GET_RTX_LENGTH (VAR_LOCATION); } #endif } #ifndef GENERATOR_FILE if (CONST_DOUBLE_AS_FLOAT_P (in_rtx)) i = 5; #endif /* Get the format string and skip the first elements if we have handled them already. */ format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i; for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++) switch (*format_ptr++) { const char *str; case 'T': str = XTMPL (in_rtx, i); goto string; case 'S': case 's': str = XSTR (in_rtx, i); string: if (str == 0) fputs (" \"\"", outfile); else fprintf (outfile, " (\"%s\")", str); sawclose = 1; break; /* 0 indicates a field for internal use that should not be printed. An exception is the third field of a NOTE, where it indicates that the field has several different valid contents. */ case '0': if (i == 1 && REG_P (in_rtx)) { if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx)) fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx)); } #ifndef GENERATOR_FILE else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF) { int flags = SYMBOL_REF_FLAGS (in_rtx); if (flags) fprintf (outfile, " [flags %#x]", flags); } else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF) { tree decl = SYMBOL_REF_DECL (in_rtx); if (decl) print_node_brief (outfile, "", decl, dump_flags); } #endif else if (i == 4 && NOTE_P (in_rtx)) { switch (NOTE_KIND (in_rtx)) { case NOTE_INSN_EH_REGION_BEG: case NOTE_INSN_EH_REGION_END: if (flag_dump_unnumbered) fprintf (outfile, " #"); else fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx)); sawclose = 1; break; case NOTE_INSN_BLOCK_BEG: case NOTE_INSN_BLOCK_END: #ifndef GENERATOR_FILE dump_addr (outfile, " ", NOTE_BLOCK (in_rtx)); #endif sawclose = 1; break; case NOTE_INSN_BASIC_BLOCK: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_DELETED_LABEL: case NOTE_INSN_DELETED_DEBUG_LABEL: { const char *label = NOTE_DELETED_LABEL_NAME (in_rtx); if (label) fprintf (outfile, " (\"%s\")", label); else fprintf (outfile, " \"\""); } break; case NOTE_INSN_SWITCH_TEXT_SECTIONS: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_VAR_LOCATION: case NOTE_INSN_CALL_ARG_LOCATION: #ifndef GENERATOR_FILE fputc (' ', outfile); print_rtx (NOTE_VAR_LOCATION (in_rtx)); #endif break; case NOTE_INSN_CFI: #ifndef GENERATOR_FILE fputc ('\n', outfile); output_cfi_directive (outfile, NOTE_CFI (in_rtx)); fputc ('\t', outfile); #endif break; default: break; } } else if (i == 8 && JUMP_P (in_rtx) && JUMP_LABEL (in_rtx) != NULL) { /* Output the JUMP_LABEL reference. */ fprintf (outfile, "\n%s%*s -> ", print_rtx_head, indent * 2, ""); if (GET_CODE (JUMP_LABEL (in_rtx)) == RETURN) fprintf (outfile, "return"); else if (GET_CODE (JUMP_LABEL (in_rtx)) == SIMPLE_RETURN) fprintf (outfile, "simple_return"); else fprintf (outfile, "%d", INSN_UID (JUMP_LABEL (in_rtx))); } else if (i == 0 && GET_CODE (in_rtx) == VALUE) { #ifndef GENERATOR_FILE cselib_val *val = CSELIB_VAL_PTR (in_rtx); fprintf (outfile, " %u:%u", val->uid, val->hash); dump_addr (outfile, " @", in_rtx); dump_addr (outfile, "/", (void*)val); #endif } else if (i == 0 && GET_CODE (in_rtx) == DEBUG_EXPR) { #ifndef GENERATOR_FILE fprintf (outfile, " D#%i", DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (in_rtx))); #endif } else if (i == 0 && GET_CODE (in_rtx) == ENTRY_VALUE) { indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (ENTRY_VALUE_EXP (in_rtx)); indent -= 2; } break; case 'e': do_e: indent += 2; if (i == 7 && INSN_P (in_rtx)) /* Put REG_NOTES on their own line. */ fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); if (!sawclose) fprintf (outfile, " "); print_rtx (XEXP (in_rtx, i)); indent -= 2; break; case 'E': case 'V': indent += 2; if (sawclose) { fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } fputs (" [", outfile); if (NULL != XVEC (in_rtx, i)) { indent += 2; if (XVECLEN (in_rtx, i)) sawclose = 1; for (j = 0; j < XVECLEN (in_rtx, i); j++) print_rtx (XVECEXP (in_rtx, i, j)); indent -= 2; } if (sawclose) fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); fputs ("]", outfile); sawclose = 1; indent -= 2; break; case 'w': if (! flag_simple) fprintf (outfile, " "); fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i)); if (! flag_simple) fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]", (unsigned HOST_WIDE_INT) XWINT (in_rtx, i)); break; case 'i': if (i == 5 && INSN_P (in_rtx)) { #ifndef GENERATOR_FILE /* Pretty-print insn locations. Ignore scoping as it is mostly redundant with line number information and do not print anything when there is no location information available. */ if (INSN_LOCATION (in_rtx) && insn_file (in_rtx)) fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx)); #endif } else if (i == 6 && GET_CODE (in_rtx) == ASM_OPERANDS) { #ifndef GENERATOR_FILE fprintf (outfile, " %s:%i", LOCATION_FILE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx)), LOCATION_LINE (ASM_OPERANDS_SOURCE_LOCATION (in_rtx))); #endif } else if (i == 1 && GET_CODE (in_rtx) == ASM_INPUT) { #ifndef GENERATOR_FILE fprintf (outfile, " %s:%i", LOCATION_FILE (ASM_INPUT_SOURCE_LOCATION (in_rtx)), LOCATION_LINE (ASM_INPUT_SOURCE_LOCATION (in_rtx))); #endif } else if (i == 6 && NOTE_P (in_rtx)) { /* This field is only used for NOTE_INSN_DELETED_LABEL, and other times often contains garbage from INSN->NOTE death. */ if (NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_LABEL || NOTE_KIND (in_rtx) == NOTE_INSN_DELETED_DEBUG_LABEL) fprintf (outfile, " %d", XINT (in_rtx, i)); } #if !defined(GENERATOR_FILE) && NUM_UNSPECV_VALUES > 0 else if (i == 1 && GET_CODE (in_rtx) == UNSPEC_VOLATILE && XINT (in_rtx, 1) >= 0 && XINT (in_rtx, 1) < NUM_UNSPECV_VALUES) fprintf (outfile, " %s", unspecv_strings[XINT (in_rtx, 1)]); #endif #if !defined(GENERATOR_FILE) && NUM_UNSPEC_VALUES > 0 else if (i == 1 && (GET_CODE (in_rtx) == UNSPEC || GET_CODE (in_rtx) == UNSPEC_VOLATILE) && XINT (in_rtx, 1) >= 0 && XINT (in_rtx, 1) < NUM_UNSPEC_VALUES) fprintf (outfile, " %s", unspec_strings[XINT (in_rtx, 1)]); #endif else { int value = XINT (in_rtx, i); const char *name; #ifndef GENERATOR_FILE if (REG_P (in_rtx) && (unsigned) value < FIRST_PSEUDO_REGISTER) fprintf (outfile, " %d %s", value, reg_names[value]); else if (REG_P (in_rtx) && (unsigned) value <= LAST_VIRTUAL_REGISTER) { if (value == VIRTUAL_INCOMING_ARGS_REGNUM) fprintf (outfile, " %d virtual-incoming-args", value); else if (value == VIRTUAL_STACK_VARS_REGNUM) fprintf (outfile, " %d virtual-stack-vars", value); else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM) fprintf (outfile, " %d virtual-stack-dynamic", value); else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM) fprintf (outfile, " %d virtual-outgoing-args", value); else if (value == VIRTUAL_CFA_REGNUM) fprintf (outfile, " %d virtual-cfa", value); else if (value == VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM) fprintf (outfile, " %d virtual-preferred-stack-boundary", value); else fprintf (outfile, " %d virtual-reg-%d", value, value-FIRST_VIRTUAL_REGISTER); } else #endif if (flag_dump_unnumbered && (is_insn || NOTE_P (in_rtx))) fputc ('#', outfile); else fprintf (outfile, " %d", value); #ifndef GENERATOR_FILE if (REG_P (in_rtx) && REG_ATTRS (in_rtx)) { fputs (" [", outfile); if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx)) fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx)); if (REG_EXPR (in_rtx)) print_mem_expr (outfile, REG_EXPR (in_rtx)); if (REG_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, REG_OFFSET (in_rtx)); fputs (" ]", outfile); } #endif if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i) && XINT (in_rtx, i) >= 0 && (name = get_insn_name (XINT (in_rtx, i))) != NULL) fprintf (outfile, " {%s}", name); sawclose = 0; } break; /* Print NOTE_INSN names rather than integer codes. */ case 'n': fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i))); sawclose = 0; break; case 'u': if (XEXP (in_rtx, i) != NULL) { rtx sub = XEXP (in_rtx, i); enum rtx_code subc = GET_CODE (sub); if (GET_CODE (in_rtx) == LABEL_REF) { if (subc == NOTE && NOTE_KIND (sub) == NOTE_INSN_DELETED_LABEL) { if (flag_dump_unnumbered) fprintf (outfile, " [# deleted]"); else fprintf (outfile, " [%d deleted]", INSN_UID (sub)); sawclose = 0; break; } if (subc != CODE_LABEL) goto do_e; } if (flag_dump_unnumbered || (flag_dump_unnumbered_links && (i == 1 || i == 2) && (INSN_P (in_rtx) || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx)))) fputs (" #", outfile); else fprintf (outfile, " %d", INSN_UID (sub)); } else fputs (" 0", outfile); sawclose = 0; break; case 't': #ifndef GENERATOR_FILE if (i == 0 && GET_CODE (in_rtx) == DEBUG_IMPLICIT_PTR) print_mem_expr (outfile, DEBUG_IMPLICIT_PTR_DECL (in_rtx)); else if (i == 0 && GET_CODE (in_rtx) == DEBUG_PARAMETER_REF) print_mem_expr (outfile, DEBUG_PARAMETER_REF_DECL (in_rtx)); else dump_addr (outfile, " ", XTREE (in_rtx, i)); #endif break; case '*': fputs (" Unknown", outfile); sawclose = 0; break; case 'B': #ifndef GENERATOR_FILE if (XBBDEF (in_rtx, i)) fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index); #endif break; default: gcc_unreachable (); } switch (GET_CODE (in_rtx)) { #ifndef GENERATOR_FILE case MEM: if (__builtin_expect (final_insns_dump_p, false)) fprintf (outfile, " ["); else fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) MEM_ALIAS_SET (in_rtx)); if (MEM_EXPR (in_rtx)) print_mem_expr (outfile, MEM_EXPR (in_rtx)); if (MEM_OFFSET_KNOWN_P (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, MEM_OFFSET (in_rtx)); if (MEM_SIZE_KNOWN_P (in_rtx)) fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, MEM_SIZE (in_rtx)); if (MEM_ALIGN (in_rtx) != 1) fprintf (outfile, " A%u", MEM_ALIGN (in_rtx)); if (!ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (in_rtx))) fprintf (outfile, " AS%u", MEM_ADDR_SPACE (in_rtx)); fputc (']', outfile); break; case CONST_DOUBLE: if (FLOAT_MODE_P (GET_MODE (in_rtx))) { char s[60]; real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " %s", s); real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " [%s]", s); } break; #endif case CODE_LABEL: fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx)); switch (LABEL_KIND (in_rtx)) { case LABEL_NORMAL: break; case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break; case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break; case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break; default: gcc_unreachable (); } break; default: break; } fputc (')', outfile); sawclose = 1; }
rtx copy_rtx (rtx orig) { rtx copy; int i, j; RTX_CODE code; const char *format_ptr; code = GET_CODE (orig); switch (code) { case REG: case DEBUG_EXPR: case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case RETURN: case SIMPLE_RETURN: case SCRATCH: /* SCRATCH must be shared because they represent distinct values. */ return orig; case CLOBBER: if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER) return orig; break; case CONST: if (shared_const_p (orig)) return orig; break; /* A MEM with a constant address is not sharable. The problem is that the constant address may need to be reloaded. If the mem is shared, then reloading one copy of this mem will cause all copies to appear to have been reloaded. */ default: break; } /* Copy the various flags, fields, and other information. We assume that all fields need copying, and then clear the fields that should not be copied. That is the sensible default behavior, and forces us to explicitly document why we are *not* copying a flag. */ copy = shallow_copy_rtx (orig); /* We do not copy the USED flag, which is used as a mark bit during walks over the RTL. */ RTX_FLAG (copy, used) = 0; format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) switch (*format_ptr++) { case 'e': if (XEXP (orig, i) != NULL) XEXP (copy, i) = copy_rtx (XEXP (orig, i)); break; case 'E': case 'V': if (XVEC (orig, i) != NULL) { XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); for (j = 0; j < XVECLEN (copy, i); j++) XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j)); } break; case 't': case 'w': case 'i': case 's': case 'S': case 'T': case 'u': case 'B': case '0': /* These are left unchanged. */ break; default: gcc_unreachable (); } return copy; }
/* test bbox computations */ #define XSCALE 65536 #define XX(x) ((FT_Pos)(x*XSCALE)) #define XVEC(x,y) { XX(x), XX(y) } #define XVAL(x) ((x)/(1.0*XSCALE)) /* dummy outline #1 */ static FT_Vector dummy_vec_1[4] = { #if 1 XVEC( 408.9111, 535.3164 ), XVEC( 455.8887, 634.396 ), XVEC( -37.8765, 786.2207 ), XVEC( 164.6074, 535.3164 ) #else { (FT_Int32)0x0198E93DL , (FT_Int32)0x021750FFL }, /* 408.9111, 535.3164 */ { (FT_Int32)0x01C7E312L , (FT_Int32)0x027A6560L }, /* 455.8887, 634.3960 */ { (FT_Int32)0xFFDA1F9EL , (FT_Int32)0x0312387FL }, /* -37.8765, 786.2207 */ { (FT_Int32)0x00A49B7EL , (FT_Int32)0x021750FFL } /* 164.6074, 535.3164 */ #endif }; static char dummy_tag_1[4] = { FT_CURVE_TAG_ON, FT_CURVE_TAG_CUBIC,
static void print_rtx (rtx in_rtx) { int i = 0; int j; const char *format_ptr; int is_insn; if (sawclose) { if (flag_simple) fputc (' ', outfile); else fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } if (in_rtx == 0) { fputs ("(nil)", outfile); sawclose = 1; return; } else if (GET_CODE (in_rtx) > NUM_RTX_CODE) { fprintf (outfile, "(??? bad code %d\n)", GET_CODE (in_rtx)); sawclose = 1; return; } is_insn = INSN_P (in_rtx); /* When printing in VCG format we write INSNs, NOTE, LABEL, and BARRIER in separate nodes and therefore have to handle them special here. */ if (dump_for_graph && (is_insn || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx))) { i = 3; indent = 0; } else { /* Print name of expression code. */ if (flag_simple && GET_CODE (in_rtx) == CONST_INT) fputc ('(', outfile); else fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx))); if (! flag_simple) { if (RTX_FLAG (in_rtx, in_struct)) fputs ("/s", outfile); if (RTX_FLAG (in_rtx, volatil)) fputs ("/v", outfile); if (RTX_FLAG (in_rtx, unchanging)) fputs ("/u", outfile); if (RTX_FLAG (in_rtx, frame_related)) fputs ("/f", outfile); if (RTX_FLAG (in_rtx, jump)) fputs ("/j", outfile); if (RTX_FLAG (in_rtx, call)) fputs ("/c", outfile); if (RTX_FLAG (in_rtx, return_val)) fputs ("/i", outfile); /* Print REG_NOTE names for EXPR_LIST and INSN_LIST. */ if (GET_CODE (in_rtx) == EXPR_LIST || GET_CODE (in_rtx) == INSN_LIST) fprintf (outfile, ":%s", GET_REG_NOTE_NAME (GET_MODE (in_rtx))); /* For other rtl, print the mode if it's not VOID. */ else if (GET_MODE (in_rtx) != VOIDmode) fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx))); } } #ifndef GENERATOR_FILE if (GET_CODE (in_rtx) == CONST_DOUBLE && FLOAT_MODE_P (GET_MODE (in_rtx))) i = 5; #endif /* Get the format string and skip the first elements if we have handled them already. */ format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)) + i; for (; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++) switch (*format_ptr++) { const char *str; case 'T': str = XTMPL (in_rtx, i); goto string; case 'S': case 's': str = XSTR (in_rtx, i); string: if (str == 0) fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile); else { if (dump_for_graph) fprintf (outfile, " (\\\"%s\\\")", str); else fprintf (outfile, " (\"%s\")", str); } sawclose = 1; break; /* 0 indicates a field for internal use that should not be printed. An exception is the third field of a NOTE, where it indicates that the field has several different valid contents. */ case '0': if (i == 1 && REG_P (in_rtx)) { if (REGNO (in_rtx) != ORIGINAL_REGNO (in_rtx)) fprintf (outfile, " [%d]", ORIGINAL_REGNO (in_rtx)); } #ifndef GENERATOR_FILE else if (i == 1 && GET_CODE (in_rtx) == SYMBOL_REF) { int flags = SYMBOL_REF_FLAGS (in_rtx); if (flags) fprintf (outfile, " [flags 0x%x]", flags); } else if (i == 2 && GET_CODE (in_rtx) == SYMBOL_REF) { tree decl = SYMBOL_REF_DECL (in_rtx); if (decl) print_node_brief (outfile, "", decl, 0); } #endif else if (i == 4 && NOTE_P (in_rtx)) { switch (NOTE_LINE_NUMBER (in_rtx)) { case NOTE_INSN_EH_REGION_BEG: case NOTE_INSN_EH_REGION_END: if (flag_dump_unnumbered) fprintf (outfile, " #"); else fprintf (outfile, " %d", NOTE_EH_HANDLER (in_rtx)); sawclose = 1; break; case NOTE_INSN_BLOCK_BEG: case NOTE_INSN_BLOCK_END: #ifndef GENERATOR_FILE dump_addr (outfile, " ", NOTE_BLOCK (in_rtx)); #endif sawclose = 1; break; case NOTE_INSN_BASIC_BLOCK: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_EXPECTED_VALUE: indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (NOTE_EXPECTED_VALUE (in_rtx)); indent -= 2; break; case NOTE_INSN_DELETED_LABEL: { const char *label = NOTE_DELETED_LABEL_NAME (in_rtx); if (label) fprintf (outfile, " (\"%s\")", label); else fprintf (outfile, " \"\""); } break; case NOTE_INSN_SWITCH_TEXT_SECTIONS: { #ifndef GENERATOR_FILE basic_block bb = NOTE_BASIC_BLOCK (in_rtx); if (bb != 0) fprintf (outfile, " [bb %d]", bb->index); #endif break; } case NOTE_INSN_VAR_LOCATION: #ifndef GENERATOR_FILE fprintf (outfile, " ("); print_mem_expr (outfile, NOTE_VAR_LOCATION_DECL (in_rtx)); fprintf (outfile, " "); print_rtx (NOTE_VAR_LOCATION_LOC (in_rtx)); fprintf (outfile, ")"); #endif break; default: { const char * const str = X0STR (in_rtx, i); if (NOTE_LINE_NUMBER (in_rtx) < 0) ; else if (str == 0) fputs (dump_for_graph ? " \\\"\\\"" : " \"\"", outfile); else { if (dump_for_graph) fprintf (outfile, " (\\\"%s\\\")", str); else fprintf (outfile, " (\"%s\")", str); } break; } } } break; case 'e': do_e: indent += 2; if (!sawclose) fprintf (outfile, " "); print_rtx (XEXP (in_rtx, i)); indent -= 2; break; case 'E': case 'V': indent += 2; if (sawclose) { fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); sawclose = 0; } fputs (" [", outfile); if (NULL != XVEC (in_rtx, i)) { indent += 2; if (XVECLEN (in_rtx, i)) sawclose = 1; for (j = 0; j < XVECLEN (in_rtx, i); j++) print_rtx (XVECEXP (in_rtx, i, j)); indent -= 2; } if (sawclose) fprintf (outfile, "\n%s%*s", print_rtx_head, indent * 2, ""); fputs ("]", outfile); sawclose = 1; indent -= 2; break; case 'w': if (! flag_simple) fprintf (outfile, " "); fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, XWINT (in_rtx, i)); if (! flag_simple) fprintf (outfile, " [" HOST_WIDE_INT_PRINT_HEX "]", XWINT (in_rtx, i)); break; case 'i': if (i == 4 && INSN_P (in_rtx)) { #ifndef GENERATOR_FILE /* Pretty-print insn locators. Ignore scoping as it is mostly redundant with line number information and do not print anything when there is no location information available. */ if (INSN_LOCATOR (in_rtx) && insn_file (in_rtx)) fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx)); #endif } else if (i == 6 && NOTE_P (in_rtx)) { /* This field is only used for NOTE_INSN_DELETED_LABEL, and other times often contains garbage from INSN->NOTE death. */ if (NOTE_LINE_NUMBER (in_rtx) == NOTE_INSN_DELETED_LABEL) fprintf (outfile, " %d", XINT (in_rtx, i)); } else { int value = XINT (in_rtx, i); const char *name; #ifndef GENERATOR_FILE if (REG_P (in_rtx) && value < FIRST_PSEUDO_REGISTER) fprintf (outfile, " %d %s", REGNO (in_rtx), reg_names[REGNO (in_rtx)]); else if (REG_P (in_rtx) && value <= LAST_VIRTUAL_REGISTER) { if (value == VIRTUAL_INCOMING_ARGS_REGNUM) fprintf (outfile, " %d virtual-incoming-args", value); else if (value == VIRTUAL_STACK_VARS_REGNUM) fprintf (outfile, " %d virtual-stack-vars", value); else if (value == VIRTUAL_STACK_DYNAMIC_REGNUM) fprintf (outfile, " %d virtual-stack-dynamic", value); else if (value == VIRTUAL_OUTGOING_ARGS_REGNUM) fprintf (outfile, " %d virtual-outgoing-args", value); else if (value == VIRTUAL_CFA_REGNUM) fprintf (outfile, " %d virtual-cfa", value); else fprintf (outfile, " %d virtual-reg-%d", value, value-FIRST_VIRTUAL_REGISTER); } else #endif if (flag_dump_unnumbered && (is_insn || NOTE_P (in_rtx))) fputc ('#', outfile); else fprintf (outfile, " %d", value); #ifndef GENERATOR_FILE if (REG_P (in_rtx) && REG_ATTRS (in_rtx)) { fputs (" [", outfile); if (ORIGINAL_REGNO (in_rtx) != REGNO (in_rtx)) fprintf (outfile, "orig:%i", ORIGINAL_REGNO (in_rtx)); if (REG_EXPR (in_rtx)) print_mem_expr (outfile, REG_EXPR (in_rtx)); if (REG_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, REG_OFFSET (in_rtx)); fputs (" ]", outfile); } #endif if (is_insn && &INSN_CODE (in_rtx) == &XINT (in_rtx, i) && XINT (in_rtx, i) >= 0 && (name = get_insn_name (XINT (in_rtx, i))) != NULL) fprintf (outfile, " {%s}", name); sawclose = 0; } break; /* Print NOTE_INSN names rather than integer codes. */ case 'n': if (XINT (in_rtx, i) >= (int) NOTE_INSN_BIAS && XINT (in_rtx, i) < (int) NOTE_INSN_MAX) fprintf (outfile, " %s", GET_NOTE_INSN_NAME (XINT (in_rtx, i))); else fprintf (outfile, " %d", XINT (in_rtx, i)); sawclose = 0; break; case 'u': if (XEXP (in_rtx, i) != NULL) { rtx sub = XEXP (in_rtx, i); enum rtx_code subc = GET_CODE (sub); if (GET_CODE (in_rtx) == LABEL_REF) { if (subc == NOTE && NOTE_LINE_NUMBER (sub) == NOTE_INSN_DELETED_LABEL) { if (flag_dump_unnumbered) fprintf (outfile, " [# deleted]"); else fprintf (outfile, " [%d deleted]", INSN_UID (sub)); sawclose = 0; break; } if (subc != CODE_LABEL) goto do_e; } if (flag_dump_unnumbered) fputs (" #", outfile); else fprintf (outfile, " %d", INSN_UID (sub)); } else fputs (" 0", outfile); sawclose = 0; break; case 'b': #ifndef GENERATOR_FILE if (XBITMAP (in_rtx, i) == NULL) fputs (" {null}", outfile); else bitmap_print (outfile, XBITMAP (in_rtx, i), " {", "}"); #endif sawclose = 0; break; case 't': #ifndef GENERATOR_FILE dump_addr (outfile, " ", XTREE (in_rtx, i)); #endif break; case '*': fputs (" Unknown", outfile); sawclose = 0; break; case 'B': #ifndef GENERATOR_FILE if (XBBDEF (in_rtx, i)) fprintf (outfile, " %i", XBBDEF (in_rtx, i)->index); #endif break; default: gcc_unreachable (); } switch (GET_CODE (in_rtx)) { #ifndef GENERATOR_FILE case MEM: fprintf (outfile, " [" HOST_WIDE_INT_PRINT_DEC, MEM_ALIAS_SET (in_rtx)); if (MEM_EXPR (in_rtx)) print_mem_expr (outfile, MEM_EXPR (in_rtx)); if (MEM_OFFSET (in_rtx)) fprintf (outfile, "+" HOST_WIDE_INT_PRINT_DEC, INTVAL (MEM_OFFSET (in_rtx))); if (MEM_SIZE (in_rtx)) fprintf (outfile, " S" HOST_WIDE_INT_PRINT_DEC, INTVAL (MEM_SIZE (in_rtx))); if (MEM_ALIGN (in_rtx) != 1) fprintf (outfile, " A%u", MEM_ALIGN (in_rtx)); fputc (']', outfile); break; case CONST_DOUBLE: if (FLOAT_MODE_P (GET_MODE (in_rtx))) { char s[60]; real_to_decimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " %s", s); real_to_hexadecimal (s, CONST_DOUBLE_REAL_VALUE (in_rtx), sizeof (s), 0, 1); fprintf (outfile, " [%s]", s); } break; #endif case CODE_LABEL: fprintf (outfile, " [%d uses]", LABEL_NUSES (in_rtx)); switch (LABEL_KIND (in_rtx)) { case LABEL_NORMAL: break; case LABEL_STATIC_ENTRY: fputs (" [entry]", outfile); break; case LABEL_GLOBAL_ENTRY: fputs (" [global entry]", outfile); break; case LABEL_WEAK_ENTRY: fputs (" [weak entry]", outfile); break; default: gcc_unreachable (); } break; default: break; } if (dump_for_graph && (is_insn || NOTE_P (in_rtx) || LABEL_P (in_rtx) || BARRIER_P (in_rtx))) sawclose = 0; else { fputc (')', outfile); sawclose = 1; } }
/* Scan X and replace any eliminable registers (such as fp) with a replacement (such as sp) if SUBST_P, plus an offset. The offset is a change in the offset between the eliminable register and its substitution if UPDATE_P, or the full offset if FULL_P, or otherwise zero. If FULL_P, we also use the SP offsets for elimination to SP. If UPDATE_P, use UPDATE_SP_OFFSET for updating offsets of register elimnable to SP. If UPDATE_SP_OFFSET is non-zero, don't use difference of the offset and the previous offset. MEM_MODE is the mode of an enclosing MEM. We need this to know how much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a MEM, we are allowed to replace a sum of a hard register and the constant zero with the hard register, which we cannot do outside a MEM. In addition, we need to record the fact that a hard register is referenced outside a MEM. If we make full substitution to SP for non-null INSN, add the insn sp offset. */ rtx lra_eliminate_regs_1 (rtx_insn *insn, rtx x, machine_mode mem_mode, bool subst_p, bool update_p, HOST_WIDE_INT update_sp_offset, bool full_p) { enum rtx_code code = GET_CODE (x); struct lra_elim_table *ep; rtx new_rtx; int i, j; const char *fmt; int copied = 0; lra_assert (!update_p || !full_p); lra_assert (update_sp_offset == 0 || (!subst_p && update_p && !full_p)); if (! current_function_decl) return x; switch (code) { CASE_CONST_ANY: case CONST: case SYMBOL_REF: case CODE_LABEL: case PC: case CC0: case ASM_INPUT: case ADDR_VEC: case ADDR_DIFF_VEC: case RETURN: return x; case REG: /* First handle the case where we encounter a bare hard register that is eliminable. Replace it with a PLUS. */ if ((ep = get_elimination (x)) != NULL) { rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (update_sp_offset != 0) { if (ep->to_rtx == stack_pointer_rtx) return plus_constant (Pmode, to, update_sp_offset); return to; } else if (update_p) return plus_constant (Pmode, to, ep->offset - ep->previous_offset); else if (full_p) return plus_constant (Pmode, to, ep->offset - (insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx ? lra_get_insn_recog_data (insn)->sp_offset : 0)); else return to; } return x; case PLUS: /* If this is the sum of an eliminable register and a constant, rework the sum. */ if (REG_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1))) { if ((ep = get_elimination (XEXP (x, 0))) != NULL) { HOST_WIDE_INT offset; rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (! update_p && ! full_p) return gen_rtx_PLUS (Pmode, to, XEXP (x, 1)); if (update_sp_offset != 0) offset = ep->to_rtx == stack_pointer_rtx ? update_sp_offset : 0; else offset = (update_p ? ep->offset - ep->previous_offset : ep->offset); if (full_p && insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx) offset -= lra_get_insn_recog_data (insn)->sp_offset; if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == -offset) return to; else return gen_rtx_PLUS (Pmode, to, plus_constant (Pmode, XEXP (x, 1), offset)); } /* If the hard register is not eliminable, we are done since the other operand is a constant. */ return x; } /* If this is part of an address, we want to bring any constant to the outermost PLUS. We will do this by doing hard register replacement in our operands and seeing if a constant shows up in one of them. Note that there is no risk of modifying the structure of the insn, since we only get called for its operands, thus we are either modifying the address inside a MEM, or something like an address operand of a load-address insn. */ { rtx new0 = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); rtx new1 = lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); new0 = move_plus_up (new0); new1 = move_plus_up (new1); if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) return form_sum (new0, new1); } return x; case MULT: /* If this is the product of an eliminable hard register and a constant, apply the distribute law and move the constant out so that we have (plus (mult ..) ..). This is needed in order to keep load-address insns valid. This case is pathological. We ignore the possibility of overflow here. */ if (REG_P (XEXP (x, 0)) && CONST_INT_P (XEXP (x, 1)) && (ep = get_elimination (XEXP (x, 0))) != NULL) { rtx to = subst_p ? ep->to_rtx : ep->from_rtx; if (update_sp_offset != 0) { if (ep->to_rtx == stack_pointer_rtx) return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), update_sp_offset * INTVAL (XEXP (x, 1))); return gen_rtx_MULT (Pmode, to, XEXP (x, 1)); } else if (update_p) return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), (ep->offset - ep->previous_offset) * INTVAL (XEXP (x, 1))); else if (full_p) { HOST_WIDE_INT offset = ep->offset; if (insn != NULL_RTX && ep->to_rtx == stack_pointer_rtx) offset -= lra_get_insn_recog_data (insn)->sp_offset; return plus_constant (Pmode, gen_rtx_MULT (Pmode, to, XEXP (x, 1)), offset * INTVAL (XEXP (x, 1))); } else return gen_rtx_MULT (Pmode, to, XEXP (x, 1)); } /* fall through */ case CALL: case COMPARE: /* See comments before PLUS about handling MINUS. */ case MINUS: case DIV: case UDIV: case MOD: case UMOD: case AND: case IOR: case XOR: case ROTATERT: case ROTATE: case ASHIFTRT: case LSHIFTRT: case ASHIFT: case NE: case EQ: case GE: case GT: case GEU: case GTU: case LE: case LT: case LEU: case LTU: { rtx new0 = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); rtx new1 = XEXP (x, 1) ? lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p) : 0; if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1); } return x; case EXPR_LIST: /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ if (XEXP (x, 0)) { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) { /* If this is a REG_DEAD note, it is not valid anymore. Using the eliminated version could result in creating a REG_DEAD note for the stack or frame pointer. */ if (REG_NOTE_KIND (x) == REG_DEAD) return (XEXP (x, 1) ? lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p) : NULL_RTX); x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1)); } } /* fall through */ case INSN_LIST: case INT_LIST: /* Now do eliminations in the rest of the chain. If this was an EXPR_LIST, this might result in allocating more memory than is strictly needed, but it simplifies the code. */ if (XEXP (x, 1)) { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 1)) return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx); } return x; case PRE_INC: case POST_INC: case PRE_DEC: case POST_DEC: /* We do not support elimination of a register that is modified. elimination_effects has already make sure that this does not happen. */ return x; case PRE_MODIFY: case POST_MODIFY: /* We do not support elimination of a hard register that is modified. LRA has already make sure that this does not happen. The only remaining case we need to consider here is that the increment value may be an eliminable register. */ if (GET_CODE (XEXP (x, 1)) == PLUS && XEXP (XEXP (x, 1), 0) == XEXP (x, 0)) { rtx new_rtx = lra_eliminate_regs_1 (insn, XEXP (XEXP (x, 1), 1), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (XEXP (x, 1), 1)) return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0), gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), new_rtx)); } return x; case STRICT_LOW_PART: case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND: case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT: case FIX: case UNSIGNED_FIX: case UNSIGNED_FLOAT: case ABS: case SQRT: case FFS: case CLZ: case CTZ: case POPCOUNT: case PARITY: case BSWAP: new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx); return x; case SUBREG: new_rtx = lra_eliminate_regs_1 (insn, SUBREG_REG (x), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != SUBREG_REG (x)) { int x_size = GET_MODE_SIZE (GET_MODE (x)); int new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); if (MEM_P (new_rtx) && x_size <= new_size) { SUBREG_REG (x) = new_rtx; alter_subreg (&x, false); return x; } else if (! subst_p) { /* LRA can transform subregs itself. So don't call simplify_gen_subreg until LRA transformations are finished. Function simplify_gen_subreg can do non-trivial transformations (like truncation) which might make LRA work to fail. */ SUBREG_REG (x) = new_rtx; return x; } else return simplify_gen_subreg (GET_MODE (x), new_rtx, GET_MODE (new_rtx), SUBREG_BYTE (x)); } return x; case MEM: /* Our only special processing is to pass the mode of the MEM to our recursive call and copy the flags. While we are here, handle this case more efficiently. */ return replace_equiv_address_nv (x, lra_eliminate_regs_1 (insn, XEXP (x, 0), GET_MODE (x), subst_p, update_p, update_sp_offset, full_p)); case USE: /* Handle insn_list USE that a call to a pure function may generate. */ new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, 0), VOIDmode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, 0)) return gen_rtx_USE (GET_MODE (x), new_rtx); return x; case CLOBBER: case SET: gcc_unreachable (); default: break; } /* Process each of our operands recursively. If any have changed, make a copy of the rtx. */ fmt = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) { if (*fmt == 'e') { new_rtx = lra_eliminate_regs_1 (insn, XEXP (x, i), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XEXP (x, i) && ! copied) { x = shallow_copy_rtx (x); copied = 1; } XEXP (x, i) = new_rtx; } else if (*fmt == 'E') { int copied_vec = 0; for (j = 0; j < XVECLEN (x, i); j++) { new_rtx = lra_eliminate_regs_1 (insn, XVECEXP (x, i, j), mem_mode, subst_p, update_p, update_sp_offset, full_p); if (new_rtx != XVECEXP (x, i, j) && ! copied_vec) { rtvec new_v = gen_rtvec_v (XVECLEN (x, i), XVEC (x, i)->elem); if (! copied) { x = shallow_copy_rtx (x); copied = 1; } XVEC (x, i) = new_v; copied_vec = 1; } XVECEXP (x, i, j) = new_rtx; } } } return x; }
/* Check if *XP is equivalent to Y. Until an an unreconcilable difference is found, use in-group changes with validate_change on *XP to make register assignments agree. It is the (not necessarily direct) callers responsibility to verify / confirm / cancel these changes, as appropriate. RVALUE indicates if the processed piece of rtl is used as a destination, in which case we can't have different registers being an input. Returns nonzero if the two blocks have been identified as equivalent, zero otherwise. RVALUE == 0: destination RVALUE == 1: source RVALUE == -1: source, ignore SET_DEST of SET / clobber. */ bool rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info) { rtx x = *xp; enum rtx_code code; int length; const char *format; int i; if (!y || !x) return x == y; code = GET_CODE (y); if (code != REG && x == y) return true; if (GET_CODE (x) != code || GET_MODE (x) != GET_MODE (y)) return false; /* ??? could extend to allow CONST_INT inputs. */ switch (code) { case REG: { unsigned x_regno = REGNO (x); unsigned y_regno = REGNO (y); int x_common_live, y_common_live; if (reload_completed && (x_regno >= FIRST_PSEUDO_REGISTER || y_regno >= FIRST_PSEUDO_REGISTER)) { /* We should only see this in REG_NOTEs. */ gcc_assert (!info->live_update); /* Returning false will cause us to remove the notes. */ return false; } #ifdef STACK_REGS /* After reg-stack, can only accept literal matches of stack regs. */ if (info->mode & CLEANUP_POST_REGSTACK && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG) || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG))) return x_regno == y_regno; #endif /* If the register is a locally live one in one block, the corresponding one must be locally live in the other, too, and match of identical regnos doesn't apply. */ if (REGNO_REG_SET_P (info->x_local_live, x_regno)) { if (!REGNO_REG_SET_P (info->y_local_live, y_regno)) return false; } else if (REGNO_REG_SET_P (info->y_local_live, y_regno)) return false; else if (x_regno == y_regno) { if (!rvalue && info->cur.input_valid && (reg_overlap_mentioned_p (x, info->x_input) || reg_overlap_mentioned_p (x, info->y_input))) return false; /* Update liveness information. */ if (info->live_update && assign_reg_reg_set (info->common_live, x, rvalue)) info->cur.version++; return true; } x_common_live = REGNO_REG_SET_P (info->common_live, x_regno); y_common_live = REGNO_REG_SET_P (info->common_live, y_regno); if (x_common_live != y_common_live) return false; else if (x_common_live) { if (! rvalue || info->input_cost < 0 || no_new_pseudos) return false; /* If info->live_update is not set, we are processing notes. We then allow a match with x_input / y_input found in a previous pass. */ if (info->live_update && !info->cur.input_valid) { info->cur.input_valid = true; info->x_input = x; info->y_input = y; info->cur.input_count += optimize_size ? 2 : 1; if (info->input_reg && GET_MODE (info->input_reg) != GET_MODE (info->x_input)) info->input_reg = NULL_RTX; if (!info->input_reg) info->input_reg = gen_reg_rtx (GET_MODE (info->x_input)); } else if ((info->live_update ? ! info->cur.input_valid : ! info->x_input) || ! rtx_equal_p (x, info->x_input) || ! rtx_equal_p (y, info->y_input)) return false; validate_change (info->cur.x_start, xp, info->input_reg, 1); } else { int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]); int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]); int size = GET_MODE_SIZE (GET_MODE (x)); enum machine_mode x_mode = GET_MODE (x); unsigned x_regno_i, y_regno_i; int x_nregs_i, y_nregs_i, size_i; int local_count = info->cur.local_count; /* This might be a register local to each block. See if we have it already registered. */ for (i = local_count - 1; i >= 0; i--) { x_regno_i = REGNO (info->x_local[i]); x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]); y_regno_i = REGNO (info->y_local[i]); y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]); size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i])); /* If we have a new pair of registers that is wider than an old pair and enclosing it with matching offsets, remove the old pair. If we find a matching, wider, old pair, use the old one. If the width is the same, use the old one if the modes match, but the new if they don't. We don't want to get too fancy with subreg_regno_offset here, so we just test two straightforward cases each. */ if (info->live_update && (x_mode != GET_MODE (info->x_local[i]) ? size >= size_i : size > size_i)) { /* If the new pair is fully enclosing a matching existing pair, remove the old one. N.B. because we are removing one entry here, the check below if we have space for a new entry will succeed. */ if ((x_regno <= x_regno_i && x_regno + x_nregs >= x_regno_i + x_nregs_i && x_nregs == y_nregs && x_nregs_i == y_nregs_i && x_regno - x_regno_i == y_regno - y_regno_i) || (x_regno == x_regno_i && y_regno == y_regno_i && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i)) { info->cur.local_count = --local_count; info->x_local[i] = info->x_local[local_count]; info->y_local[i] = info->y_local[local_count]; continue; } } else { /* If the new pair is fully enclosed within a matching existing pair, succeed. */ if (x_regno >= x_regno_i && x_regno + x_nregs <= x_regno_i + x_nregs_i && x_nregs == y_nregs && x_nregs_i == y_nregs_i && x_regno - x_regno_i == y_regno - y_regno_i) break; if (x_regno == x_regno_i && y_regno == y_regno_i && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i) break; } /* Any other overlap causes a match failure. */ if (x_regno + x_nregs > x_regno_i && x_regno_i + x_nregs_i > x_regno) return false; if (y_regno + y_nregs > y_regno_i && y_regno_i + y_nregs_i > y_regno) return false; } if (i < 0) { /* Not found. Create a new entry if possible. */ if (!info->live_update || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL) return false; info->x_local[info->cur.local_count] = x; info->y_local[info->cur.local_count] = y; info->cur.local_count++; info->cur.version++; } note_local_live (info, x, y, rvalue); } return true; } case SET: gcc_assert (rvalue < 0); /* Ignore the destinations role as a destination. Still, we have to consider input registers embedded in the addresses of a MEM. N.B., we process the rvalue aspect of STRICT_LOW_PART / ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */ if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info)) return false; /* Process source. */ return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info); case PRE_MODIFY: /* Process destination. */ if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)) return false; /* Process source. */ return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info); case POST_MODIFY: { rtx x_dest0, x_dest1; /* Process destination. */ x_dest0 = XEXP (x, 0); gcc_assert (REG_P (x_dest0)); if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)) return false; x_dest1 = XEXP (x, 0); /* validate_change might have changed the destination. Put it back so that we can do a proper match for its role a an input. */ XEXP (x, 0) = x_dest0; if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info)) return false; gcc_assert (x_dest1 == XEXP (x, 0)); /* Process source. */ return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info); } case CLOBBER: gcc_assert (rvalue < 0); return true; /* Some special forms are also rvalues when they appear in lvalue positions. However, we must ont try to match a register after we have already altered it with validate_change, consider the rvalue aspect while we process the lvalue. */ case STRICT_LOW_PART: case ZERO_EXTEND: case SIGN_EXTEND: { rtx x_inner, y_inner; enum rtx_code code; int change; if (rvalue) break; x_inner = XEXP (x, 0); y_inner = XEXP (y, 0); if (GET_MODE (x_inner) != GET_MODE (y_inner)) return false; code = GET_CODE (x_inner); if (code != GET_CODE (y_inner)) return false; /* The address of a MEM is an input that will be processed during rvalue == -1 processing. */ if (code == SUBREG) { if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner)) return false; x = x_inner; x_inner = SUBREG_REG (x_inner); y_inner = SUBREG_REG (y_inner); if (GET_MODE (x_inner) != GET_MODE (y_inner)) return false; code = GET_CODE (x_inner); if (code != GET_CODE (y_inner)) return false; } if (code == MEM) return true; gcc_assert (code == REG); if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info)) return false; if (REGNO (x_inner) == REGNO (y_inner)) { change = assign_reg_reg_set (info->common_live, x_inner, 1); info->cur.version++; } else change = note_local_live (info, x_inner, y_inner, 1); gcc_assert (change); return true; } /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take place during input processing, however, that is benign, since they are paired with reads. */ case MEM: return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info); case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC: return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info) && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info)); case PARALLEL: /* If this is a top-level PATTERN PARALLEL, we expect the caller to have handled the SET_DESTs. A complex or vector PARALLEL can be identified by having a mode. */ gcc_assert (rvalue < 0 || GET_MODE (x) != VOIDmode); break; case LABEL_REF: /* Check special tablejump match case. */ if (XEXP (y, 0) == info->y_label) return (XEXP (x, 0) == info->x_label); /* We can't assume nonlocal labels have their following insns yet. */ if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y)) return XEXP (x, 0) == XEXP (y, 0); /* Two label-refs are equivalent if they point at labels in the same position in the instruction stream. */ return (next_real_insn (XEXP (x, 0)) == next_real_insn (XEXP (y, 0))); case SYMBOL_REF: return XSTR (x, 0) == XSTR (y, 0); /* Some rtl is guaranteed to be shared, or unique; If we didn't match EQ equality above, they aren't the same. */ case CONST_INT: case CODE_LABEL: return false; default: break; } /* For commutative operations, the RTX match if the operands match in any order. */ if (targetm.commutative_p (x, UNKNOWN)) return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info) && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info)) || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info) && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info))); /* Process subexpressions - this is similar to rtx_equal_p. */ length = GET_RTX_LENGTH (code); format = GET_RTX_FORMAT (code); for (i = 0; i < length; ++i) { switch (format[i]) { case 'w': if (XWINT (x, i) != XWINT (y, i)) return false; break; case 'n': case 'i': if (XINT (x, i) != XINT (y, i)) return false; break; case 'V': case 'E': if (XVECLEN (x, i) != XVECLEN (y, i)) return false; if (XVEC (x, i) != 0) { int j; for (j = 0; j < XVECLEN (x, i); ++j) { if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j), rvalue, info)) return false; } } break; case 'e': if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info)) return false; break; case 'S': case 's': if ((XSTR (x, i) || XSTR (y, i)) && (! XSTR (x, i) || ! XSTR (y, i) || strcmp (XSTR (x, i), XSTR (y, i)))) return false; break; case 'u': /* These are just backpointers, so they don't matter. */ break; case '0': case 't': break; /* It is believed that rtx's at this level will never contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: gcc_unreachable (); } } return true; }
static int is_predicable (struct queue_elem *elem) { rtvec vec = XVEC (elem->data, 4); const char *value; int i; if (! vec) return predicable_default; for (i = GET_NUM_ELEM (vec) - 1; i >= 0; --i) { rtx sub = RTVEC_ELT (vec, i); switch (GET_CODE (sub)) { case SET_ATTR: if (strcmp (XSTR (sub, 0), "predicable") == 0) { value = XSTR (sub, 1); goto found; } break; case SET_ATTR_ALTERNATIVE: if (strcmp (XSTR (sub, 0), "predicable") == 0) { message_with_line (elem->lineno, "multiple alternatives for `predicable'"); errors = 1; return 0; } break; case SET: if (GET_CODE (SET_DEST (sub)) != ATTR || strcmp (XSTR (SET_DEST (sub), 0), "predicable") != 0) break; sub = SET_SRC (sub); if (GET_CODE (sub) == CONST_STRING) { value = XSTR (sub, 0); goto found; } /* ??? It would be possible to handle this if we really tried. It's not easy though, and I'm not going to bother until it really proves necessary. */ message_with_line (elem->lineno, "non-constant value for `predicable'"); errors = 1; return 0; default: gcc_unreachable (); } } return predicable_default; found: /* Verify that predicability does not vary on the alternative. */ /* ??? It should be possible to handle this by simply eliminating the non-predicable alternatives from the insn. FRV would like to do this. Delay this until we've got the basics solid. */ if (strchr (value, ',') != NULL) { message_with_line (elem->lineno, "multiple alternatives for `predicable'"); errors = 1; return 0; } /* Find out which value we're looking at. */ if (strcmp (value, predicable_true) == 0) return 1; if (strcmp (value, predicable_false) == 0) return 0; message_with_line (elem->lineno, "unknown value `%s' for `predicable' attribute", value); errors = 1; return 0; }
static void process_rtx (rtx desc, int lineno) { switch (GET_CODE (desc)) { case DEFINE_INSN: queue_pattern (desc, &define_insn_tail, read_rtx_filename, lineno); break; case DEFINE_COND_EXEC: queue_pattern (desc, &define_cond_exec_tail, read_rtx_filename, lineno); break; case DEFINE_ATTR: queue_pattern (desc, &define_attr_tail, read_rtx_filename, lineno); break; case INCLUDE: process_include (desc, lineno); break; case DEFINE_INSN_AND_SPLIT: { const char *split_cond; rtx split; rtvec attr; int i; /* Create a split with values from the insn_and_split. */ split = rtx_alloc (DEFINE_SPLIT); i = XVECLEN (desc, 1); XVEC (split, 0) = rtvec_alloc (i); while (--i >= 0) { XVECEXP (split, 0, i) = copy_rtx (XVECEXP (desc, 1, i)); remove_constraints (XVECEXP (split, 0, i)); } /* If the split condition starts with "&&", append it to the insn condition to create the new split condition. */ split_cond = XSTR (desc, 4); if (split_cond[0] == '&' && split_cond[1] == '&') split_cond = concat (XSTR (desc, 2), split_cond, NULL); XSTR (split, 1) = split_cond; XVEC (split, 2) = XVEC (desc, 5); XSTR (split, 3) = XSTR (desc, 6); /* Fix up the DEFINE_INSN. */ attr = XVEC (desc, 7); PUT_CODE (desc, DEFINE_INSN); XVEC (desc, 4) = attr; /* Queue them. */ queue_pattern (desc, &define_insn_tail, read_rtx_filename, lineno); queue_pattern (split, &other_tail, read_rtx_filename, lineno); break; } default: queue_pattern (desc, &other_tail, read_rtx_filename, lineno); break; } }
static void gen_insn (rtx insn, int lineno) { struct pattern_stats stats; int i; /* See if the pattern for this insn ends with a group of CLOBBERs of (hard) registers or MATCH_SCRATCHes. If so, store away the information for later. */ if (XVEC (insn, 1)) { int has_hard_reg = 0; for (i = XVECLEN (insn, 1) - 1; i > 0; i--) { if (GET_CODE (XVECEXP (insn, 1, i)) != CLOBBER) break; if (REG_P (XEXP (XVECEXP (insn, 1, i), 0))) has_hard_reg = 1; else if (GET_CODE (XEXP (XVECEXP (insn, 1, i), 0)) != MATCH_SCRATCH) break; } if (i != XVECLEN (insn, 1) - 1) { struct clobber_pat *p; struct clobber_ent *link = XNEW (struct clobber_ent); int j; link->code_number = insn_code_number; /* See if any previous CLOBBER_LIST entry is the same as this one. */ for (p = clobber_list; p; p = p->next) { if (p->first_clobber != i + 1 || XVECLEN (p->pattern, 1) != XVECLEN (insn, 1)) continue; for (j = i + 1; j < XVECLEN (insn, 1); j++) { rtx old_rtx = XEXP (XVECEXP (p->pattern, 1, j), 0); rtx new_rtx = XEXP (XVECEXP (insn, 1, j), 0); /* OLD and NEW_INSN are the same if both are to be a SCRATCH of the same mode, or if both are registers of the same mode and number. */ if (! (GET_MODE (old_rtx) == GET_MODE (new_rtx) && ((GET_CODE (old_rtx) == MATCH_SCRATCH && GET_CODE (new_rtx) == MATCH_SCRATCH) || (REG_P (old_rtx) && REG_P (new_rtx) && REGNO (old_rtx) == REGNO (new_rtx))))) break; } if (j == XVECLEN (insn, 1)) break; } if (p == 0) { p = XNEW (struct clobber_pat); p->insns = 0; p->pattern = insn; p->first_clobber = i + 1; p->next = clobber_list; p->has_hard_reg = has_hard_reg; clobber_list = p; } link->next = p->insns; p->insns = link; }
static void process_rtx (rtx desc, int lineno) { switch (GET_CODE (desc)) { case DEFINE_INSN: queue_pattern (desc, &define_insn_tail, read_rtx_filename, lineno); break; case DEFINE_COND_EXEC: queue_pattern (desc, &define_cond_exec_tail, read_rtx_filename, lineno); break; case DEFINE_ATTR: queue_pattern (desc, &define_attr_tail, read_rtx_filename, lineno); break; case DEFINE_PREDICATE: case DEFINE_SPECIAL_PREDICATE: case DEFINE_CONSTRAINT: case DEFINE_REGISTER_CONSTRAINT: case DEFINE_MEMORY_CONSTRAINT: case DEFINE_ADDRESS_CONSTRAINT: queue_pattern (desc, &define_pred_tail, read_rtx_filename, lineno); break; case INCLUDE: process_include (desc, lineno); break; case DEFINE_INSN_AND_SPLIT: { const char *split_cond; rtx split; rtvec attr; int i; struct queue_elem *insn_elem; struct queue_elem *split_elem; /* Create a split with values from the insn_and_split. */ split = rtx_alloc (DEFINE_SPLIT); i = XVECLEN (desc, 1); XVEC (split, 0) = rtvec_alloc (i); while (--i >= 0) { XVECEXP (split, 0, i) = copy_rtx (XVECEXP (desc, 1, i)); remove_constraints (XVECEXP (split, 0, i)); } /* If the split condition starts with "&&", append it to the insn condition to create the new split condition. */ split_cond = XSTR (desc, 4); if (split_cond[0] == '&' && split_cond[1] == '&') { copy_rtx_ptr_loc (split_cond + 2, split_cond); split_cond = join_c_conditions (XSTR (desc, 2), split_cond + 2); } XSTR (split, 1) = split_cond; XVEC (split, 2) = XVEC (desc, 5); XSTR (split, 3) = XSTR (desc, 6); /* Fix up the DEFINE_INSN. */ attr = XVEC (desc, 7); PUT_CODE (desc, DEFINE_INSN); XVEC (desc, 4) = attr; /* Queue them. */ insn_elem = queue_pattern (desc, &define_insn_tail, read_rtx_filename, lineno); split_elem = queue_pattern (split, &other_tail, read_rtx_filename, lineno); insn_elem->split = split_elem; break; } default: queue_pattern (desc, &other_tail, read_rtx_filename, lineno); break; } }