/* If has_instr_jmp_targets is true, this routine trashes the note field * of each instr_t to store the offset in order to properly encode * the relative pc for an instr_t jump target */ byte * instrlist_encode_to_copy(dcontext_t *dcontext, instrlist_t *ilist, byte *copy_pc, byte *final_pc, byte *max_pc, bool has_instr_jmp_targets) { instr_t *inst; int len = 0; #ifdef ARM /* XXX i#1734: reset encode state to avoid any stale encode state * or dangling pointer. */ if (instr_get_isa_mode(instrlist_first(ilist)) == DR_ISA_ARM_THUMB) encode_reset_it_block(dcontext); #endif /* Do an extra pass over the instrlist so we can determine if an instr opnd * was erroneously used with has_instr_jmp_targets = false. */ DOCHECK(2, { if (!has_instr_jmp_targets) { for (inst = instrlist_first(ilist); inst; inst = instr_get_next(inst)) { if (TEST(INSTR_OPERANDS_VALID, (inst)->flags)) { int i; for (i = 0; i < instr_num_srcs(inst); ++i) { CLIENT_ASSERT(!opnd_is_instr(instr_get_src(inst, i)), "has_instr_jmp_targets was unset " "but an instr opnd was found"); } } } } });
bool instr_is_cti_short_rewrite(instr_t *instr, byte *pc) { /* We assume all app's cbz/cbnz have been mangled. * See comments in x86/'s version of this routine. */ dcontext_t *dcontext; dr_isa_mode_t old_mode; if (pc == NULL) { if (instr == NULL || !instr_has_allocated_bits(instr) || instr->length != CTI_SHORT_REWRITE_LENGTH) return false; pc = instr_get_raw_bits(instr); } if (instr != NULL && instr_opcode_valid(instr)) { int opc = instr_get_opcode(instr); if (opc != OP_cbz && opc != OP_cbnz) return false; } if ((*(pc+1) != CBNZ_BYTE_A && *(pc+1) != CBZ_BYTE_A) || /* Further verify by checking for a disp of 1 */ (*pc & 0xf8) != 0x08) return false; /* XXX: this would be easier if decode_raw_is_jmp took in isa_mode */ dcontext = get_thread_private_dcontext(); if (instr != NULL) dr_set_isa_mode(dcontext, instr_get_isa_mode(instr), &old_mode); if (!decode_raw_is_jmp(dcontext, pc + CTI_SHORT_REWRITE_B_OFFS)) return false; if (instr != NULL) dr_set_isa_mode(dcontext, old_mode, NULL); return true; }
// We stored the instr count in *bb_field in bb_analysis(). int offline_instru_t::instrument_instr(void *drcontext, void *tag, void **bb_field, instrlist_t *ilist, instr_t *where, reg_id_t reg_ptr, reg_id_t reg_tmp, int adjust, instr_t *app) { app_pc pc, modbase; uint modidx; offline_entry_t entry; // We write just once per bb. if ((ptr_uint_t)*bb_field > MAX_INSTR_COUNT) return adjust; pc = dr_fragment_app_pc(tag); if (drmodtrack_lookup(drcontext, pc, &modidx, &modbase) != DRCOVLIB_SUCCESS) { // FIXME i#2062: add non-module support. The plan for instrs is to have // one entry w/ the start abs pc, and subsequent entries that pack the instr // length for 10 instrs, 4 bits each, into a pc.modoffs field. We will // also need to store the type (read/write/prefetch*) and size for the // memrefs. modidx = 0; modbase = pc; } entry.pc.type = OFFLINE_TYPE_PC; // We put the ARM vs Thumb mode into the modoffs to ensure proper decoding. entry.pc.modoffs = dr_app_pc_as_jump_target(instr_get_isa_mode(where), pc) - modbase; entry.pc.modidx = modidx; entry.pc.instr_count = (ptr_uint_t)*bb_field; insert_save_pc(drcontext, ilist, where, reg_ptr, reg_tmp, adjust, entry.combined_value); *(ptr_uint_t*)bb_field = MAX_INSTR_COUNT + 1; return (adjust + sizeof(offline_entry_t)); }
int instr_length_arch(dcontext_t *dcontext, instr_t *instr) { if (instr_get_opcode(instr) == OP_LABEL) return 0; /* Avoid encoding OP_b to avoid reachability checks for added fall-through * jumps, whose targets are later changed to the stub prior to emit. * Another option is to remove the assert on bad encoding, so that the * instr_encode_check_reachability() call in private_instr_encode() can * gracefully fail: which we now do, but this is a nice optimization. */ if (instr_get_opcode(instr) == OP_b) return 4; if (instr_get_isa_mode(instr) == DR_ISA_ARM_THUMB) { /* We have to encode to find the size */ return -1; } else return ARM_INSTR_SIZE; }