address Relocation::pd_call_destination(address orig_addr) { intptr_t adj = 0; address inst_loc = addr(); if (orig_addr != NULL) { // We just moved this call instruction from orig_addr to addr(). // This means its target will appear to have grown by addr() - orig_addr. adj = -(inst_loc - orig_addr); } if (NativeFarCall::is_far_call_at(inst_loc)) { NativeFarCall* call = nativeFarCall_at(inst_loc); return call->destination() + (intptr_t)(call->is_pcrelative() ? adj : 0); } else if (NativeJump::is_jump_at(inst_loc)) { NativeJump* jump = nativeJump_at(inst_loc); return jump->jump_destination() + (intptr_t)(jump->is_pcrelative() ? adj : 0); } else if (NativeConditionalFarBranch::is_conditional_far_branch_at(inst_loc)) { NativeConditionalFarBranch* branch = NativeConditionalFarBranch_at(inst_loc); return branch->branch_destination(); } else { // There are two instructions at the beginning of a stub, therefore we // load at orig_addr + 8. orig_addr = nativeCall_at(inst_loc)->get_trampoline(); if (orig_addr == NULL) { return (address) -1; } else { return (address) nativeMovConstReg_at(orig_addr + 8)->data(); } } }
void Relocation::pd_set_call_destination(address x) { if (NativeCall::is_call_at(addr())) { NativeCall* call = nativeCall_at(addr()); call->set_destination(x); return; } if (NativeFarCall::is_call_at(addr())) { NativeFarCall* call = nativeFarCall_at(addr()); call->set_destination(x); return; } // Special case: Patchable branch local to the code cache. // This will break badly if the code cache grows larger than a few Mb. NativeGeneralJump* br = nativeGeneralJump_at(addr()); br->set_jump_destination(x); }
void Relocation::pd_set_call_destination(address x) { address inst_loc = addr(); if (NativeFarCall::is_far_call_at(inst_loc)) { NativeFarCall* call = nativeFarCall_at(inst_loc); call->set_destination(x); } else if (NativeJump::is_jump_at(inst_loc)) { NativeJump* jump= nativeJump_at(inst_loc); jump->set_jump_destination(x); } else if (NativeConditionalFarBranch::is_conditional_far_branch_at(inst_loc)) { NativeConditionalFarBranch* branch = NativeConditionalFarBranch_at(inst_loc); branch->set_branch_destination(x); } else { NativeCall* call = nativeCall_at(inst_loc); call->set_destination_mt_safe(x, false); } }
void Relocation::pd_set_call_destination(address x) { address inst_addr = addr(); if (NativeFarCall::is_far_call_at(inst_addr)) { if (!ShortenBranches) { if (MacroAssembler::is_call_far_pcrelative(inst_addr)) { address a1 = MacroAssembler::get_target_addr_pcrel(inst_addr+MacroAssembler::nop_size()); #ifdef ASSERT address a3 = nativeFarCall_at(inst_addr)->destination(); if (a1 != a3) { unsigned int range = 128; Assembler::dump_code_range(tty, inst_addr, range, "pc-relative call w/o ShortenBranches?"); assert(false, "pc-relative call w/o ShortenBranches?"); } #endif nativeFarCall_at(inst_addr)->set_destination(x, 0); return; } assert(x == (address)-1, "consistency check"); return; } int toc_offset = -1; if (type() == relocInfo::runtime_call_w_cp_type) { toc_offset = ((runtime_call_w_cp_Relocation *)this)->get_constant_pool_offset(); } if (toc_offset>=0) { NativeFarCall* call = nativeFarCall_at(inst_addr); call->set_destination(x, toc_offset); return; } } if (NativeCall::is_call_at(inst_addr)) { NativeCall* call = nativeCall_at(inst_addr); if (call->is_pcrelative()) { call->set_destination_mt_safe(x); return; } } // constant is absolute, must use x nativeMovConstReg_at(inst_addr)->set_data(((intptr_t)x)); }
address Relocation::pd_call_destination(address orig_addr) { address inst_addr = addr(); if (NativeFarCall::is_far_call_at(inst_addr)) { if (!ShortenBranches) { if (MacroAssembler::is_call_far_pcrelative(inst_addr)) { address a1 = MacroAssembler::get_target_addr_pcrel(orig_addr+MacroAssembler::nop_size()); #ifdef ASSERT address a2 = MacroAssembler::get_target_addr_pcrel(inst_addr+MacroAssembler::nop_size()); address a3 = nativeFarCall_at(orig_addr)->destination(); address a4 = nativeFarCall_at(inst_addr)->destination(); if ((a1 != a3) || (a2 != a4)) { unsigned int range = 128; Assembler::dump_code_range(tty, inst_addr, range, "pc-relative call w/o ShortenBranches?"); Assembler::dump_code_range(tty, orig_addr, range, "pc-relative call w/o ShortenBranches?"); assert(false, "pc-relative call w/o ShortenBranches?"); } #endif return a1; } return (address)(-1); } NativeFarCall* call; if (orig_addr == NULL) { call = nativeFarCall_at(inst_addr); } else { // must access location (in CP) where destination is stored in unmoved code, because load from CP is pc-relative call = nativeFarCall_at(orig_addr); } return call->destination(); } if (NativeCall::is_call_at(inst_addr)) { NativeCall* call = nativeCall_at(inst_addr); if (call->is_pcrelative()) { intptr_t off = inst_addr - orig_addr; return (address) (call->destination()-off); } } return (address) nativeMovConstReg_at(inst_addr)->data(); }
address Relocation::pd_call_destination(address orig_addr) { intptr_t adj = 0; if (orig_addr != NULL) { // We just moved this call instruction from orig_addr to addr(). // This means its target will appear to have grown by addr() - orig_addr. adj = -( addr() - orig_addr ); } if (NativeCall::is_call_at(addr())) { NativeCall* call = nativeCall_at(addr()); return call->destination() + adj; } if (NativeFarCall::is_call_at(addr())) { NativeFarCall* call = nativeFarCall_at(addr()); return call->destination() + adj; } // Special case: Patchable branch local to the code cache. // This will break badly if the code cache grows larger than a few Mb. NativeGeneralJump* br = nativeGeneralJump_at(addr()); return br->jump_destination() + adj; }