void CodeInstaller::pd_relocate_CodeBlob(CodeBlob* cb, NativeInstruction* inst) { if (cb->is_nmethod()) { nmethod* nm = (nmethod*) cb; nativeJump_at((address)inst)->set_jump_destination(nm->verified_entry_point()); } else { nativeJump_at((address)inst)->set_jump_destination(cb->code_begin()); } _instructions->relocate((address)inst, runtime_call_Relocation::spec(), Assembler::call32_operand); }
address Relocation::pd_call_destination(address orig_addr) { intptr_t adj = 0; address inst_loc = addr(); if (orig_addr != NULL) { // We just moved this call instruction from orig_addr to addr(). // This means its target will appear to have grown by addr() - orig_addr. adj = -(inst_loc - orig_addr); } if (NativeFarCall::is_far_call_at(inst_loc)) { NativeFarCall* call = nativeFarCall_at(inst_loc); return call->destination() + (intptr_t)(call->is_pcrelative() ? adj : 0); } else if (NativeJump::is_jump_at(inst_loc)) { NativeJump* jump = nativeJump_at(inst_loc); return jump->jump_destination() + (intptr_t)(jump->is_pcrelative() ? adj : 0); } else if (NativeConditionalFarBranch::is_conditional_far_branch_at(inst_loc)) { NativeConditionalFarBranch* branch = NativeConditionalFarBranch_at(inst_loc); return branch->branch_destination(); } else { // There are two instructions at the beginning of a stub, therefore we // load at orig_addr + 8. orig_addr = nativeCall_at(inst_loc)->get_trampoline(); if (orig_addr == NULL) { return (address) -1; } else { return (address) nativeMovConstReg_at(orig_addr + 8)->data(); } } }
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { address stub = find_stub(); guarantee(stub != NULL, "stub not found"); if (TraceICs) { ResourceMark rm; tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), "a) MT-unsafe modification of inline cache"); assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache"); // Update stub. method_holder->set_data((intptr_t)callee()); jump->set_jump_destination(entry); // Update jump to call. set_destination_mt_safe(stub); }
void Relocation::pd_set_call_destination(address x) { NativeInstruction* ni = nativeInstruction_at(addr()); if (ni->is_call()) { nativeCall_at(addr())->set_destination(x); } else if (ni->is_jump()) { NativeJump* nj = nativeJump_at(addr()); // Unresolved jumps are recognized by a destination of -1 // However 64bit can't actually produce such an address // and encodes a jump to self but jump_destination will // return a -1 as the signal. We must not relocate this // jmp or the ic code will not see it as unresolved. if (nj->jump_destination() == (address) -1) { x = addr(); // jump to self } nj->set_jump_destination(x); } else if (ni->is_cond_jump()) { // %%%% kludge this, for now, until we get a jump_destination method address old_dest = nativeGeneralJump_at(addr())->jump_destination(); address disp = Assembler::locate_operand(addr(), Assembler::call32_operand); *(jint*)disp += (x - old_dest); } else if (ni->is_mov_literal64()) { ((NativeMovConstReg*)ni)->set_data((intptr_t)x); } else { ShouldNotReachHere(); } }
void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) { address stub = find_stub(/*is_aot*/ false); guarantee(stub != NULL, "stub not found"); if (TraceICs) { ResourceMark rm; tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeCall::get_IC_pos_in_java_to_interp_stub()); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // A generated lambda form might be deleted from the Lambdaform // cache in MethodTypeForm. If a jit compiled lambdaform method // becomes not entrant and the cache access returns null, the new // resolve will lead to a new generated LambdaForm. assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee() || callee->is_compiled_lambda_form(), "a) MT-unsafe modification of inline cache"); assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache"); // Update stub. method_holder->set_data((intptr_t)callee()); jump->set_jump_destination(entry); // Update jump to call. set_destination_mt_safe(stub); }
oop InlineCacheBuffer::ic_buffer_cached_oop(address code_begin) { // creation also verifies the object NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // Verifies the jump NativeJump* jump = nativeJump_at(move->next_instruction_address()); return (oop)move->data(); }
void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination, TRAPS) { address pc = (address) inst; if (inst->is_call()) { // NOTE: for call without a mov, the offset must fit a 32-bit immediate // see also CompilerToVM.getMaxCallTargetOffset() NativeCall* call = nativeCall_at(pc); call->set_destination((address) foreign_call_destination); _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); } else if (inst->is_mov_literal64()) { NativeMovConstReg* mov = nativeMovConstReg_at(pc); mov->set_data((intptr_t) foreign_call_destination); _instructions->relocate(mov->instruction_address(), runtime_call_Relocation::spec(), Assembler::imm_operand); } else if (inst->is_jump()) { NativeJump* jump = nativeJump_at(pc); jump->set_jump_destination((address) foreign_call_destination); _instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); } else if (inst->is_cond_jump()) { address old_dest = nativeGeneralJump_at(pc)->jump_destination(); address disp = Assembler::locate_operand(pc, Assembler::call32_operand); *(jint*) disp += ((address) foreign_call_destination) - old_dest; _instructions->relocate(pc, runtime_call_Relocation::spec(), Assembler::call32_operand); } else { JVMCI_ERROR("unsupported relocation for foreign call"); } TRACE_jvmci_3("relocating (foreign call) at " PTR_FORMAT, p2i(inst)); }
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); // Reset stub address stub = static_stub->addr(); assert(stub!=NULL, "stub not found"); NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); method_holder->set_data(0); jump->set_jump_destination((address)-1); }
// Code for unit testing implementation of NativeJump class void NativeJump::test() { #ifdef ASSERT ResourceMark rm; CodeBuffer cb("test", 100, 100); MacroAssembler* a = new MacroAssembler(&cb); NativeJump* nj; uint idx; int offsets[] = { 0x0, 0xffffffff, 0x7fffffff, 0x80000000, 4096, 4097, 0x20, 0x4000, }; VM_Version::allow_all(); AddressLiteral al(0x7fffbbbb, relocInfo::external_word_type); a->sethi(al, I3); a->jmpl(I3, al.low10(), G0, RelocationHolder::none); a->delayed()->nop(); a->sethi(al, I3); a->jmpl(I3, al.low10(), L3, RelocationHolder::none); a->delayed()->nop(); nj = nativeJump_at( cb.insts_begin() ); nj->print(); nj = nativeJump_at( nj->next_instruction_address() ); for (idx = 0; idx < ARRAY_SIZE(offsets); idx++) { nj->set_jump_destination( nj->instruction_address() + offsets[idx] ); assert(nj->jump_destination() == (nj->instruction_address() + offsets[idx]), "check unit test"); nj->print(); } VM_Version::revert(); #endif // ASSERT }
address Relocation::pd_call_destination() { NativeInstruction* ni = nativeInstruction_at(addr()); if (ni->is_call()) return nativeCall_at(addr())->destination(); #if 0 else if (ni->is_jump()) return nativeJump_at(addr())->jump_destination(); else if (ni->is_cond_jump()) return nativeGeneralJump_at(addr())->jump_destination(); else #endif /* excise for now */ { ShouldNotReachHere(); return NULL; } }
void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination, TRAPS) { address pc = (address) inst; if (inst->is_call()) { NativeCall* call = nativeCall_at(pc); call->set_destination((address) foreign_call_destination); _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec()); } else if (inst->is_sethi()) { NativeJump* jump = nativeJump_at(pc); jump->set_jump_destination((address) foreign_call_destination); _instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec()); } else { JVMCI_ERROR("unknown call or jump instruction at " PTR_FORMAT, p2i(pc)); } TRACE_jvmci_3("relocating (foreign call) at " PTR_FORMAT, p2i(inst)); }
inline void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination) { address pc = (address) inst; if (inst->is_call()) { NativeCall* call = nativeCall_at(pc); call->set_destination((address) foreign_call_destination); _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec()); } else if (inst->is_sethi()) { NativeJump* jump = nativeJump_at(pc); jump->set_jump_destination((address) foreign_call_destination); _instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec()); } else { fatal(err_msg("unknown call or jump instruction at %p", pc)); } TRACE_graal_3("relocating (foreign call) at %p", inst); }
void CompiledStaticCall::verify() { // Verify call NativeCall::verify(); if (os::is_MP()) { verify_alignment(); } // Verify stub address stub = find_stub(); assert(stub != NULL, "no stub found for static call"); NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); }
void CompiledDirectStaticCall::verify() { // Verify call. _call->verify(); if (os::is_MP()) { _call->verify_alignment(); } // Verify stub. address stub = find_stub(/*is_aot*/ false); assert(stub != NULL, "no stub found for static call"); // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeCall::get_IC_pos_in_java_to_interp_stub()); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); }
void Relocation::pd_set_call_destination(address x, intptr_t off) { NativeInstruction* ni = nativeInstruction_at(addr()); if (ni->is_call()) nativeCall_at(addr())->set_destination(x); #if 0 /* excise for now */ else if (ni->is_jump()) nativeJump_at(addr())->set_jump_destination(x); else if (ni->is_cond_jump()) { // %%%% kludge this, for now, until we get a jump_destination method address old_dest = nativeGeneralJump_at(addr())->jump_destination(); address disp = Assembler::locate_operand(addr(), Assembler::call32_operand); *(jint*)disp += (x - old_dest); } #endif /* excise for now */ else { ShouldNotReachHere(); } }
void Relocation::pd_set_call_destination(address x) { address inst_loc = addr(); if (NativeFarCall::is_far_call_at(inst_loc)) { NativeFarCall* call = nativeFarCall_at(inst_loc); call->set_destination(x); } else if (NativeJump::is_jump_at(inst_loc)) { NativeJump* jump= nativeJump_at(inst_loc); jump->set_jump_destination(x); } else if (NativeConditionalFarBranch::is_conditional_far_branch_at(inst_loc)) { NativeConditionalFarBranch* branch = NativeConditionalFarBranch_at(inst_loc); branch->set_branch_destination(x); } else { NativeCall* call = nativeCall_at(inst_loc); call->set_destination_mt_safe(x, false); } }
address Relocation::pd_call_destination(address orig_addr) { intptr_t adj = 0; if (orig_addr != NULL) { // We just moved this call instruction from orig_addr to addr(). // This means its target will appear to have grown by addr() - orig_addr. adj = -( addr() - orig_addr ); } NativeInstruction* ni = nativeInstruction_at(addr()); if (ni->is_call()) { return nativeCall_at(addr())->destination() + adj; } else if (ni->is_jump()) { return nativeJump_at(addr())->jump_destination() + adj; } else if (ni->is_cond_jump()) { return nativeGeneralJump_at(addr())->jump_destination() + adj; } else if (ni->is_mov_literal64()) { return (address) ((NativeMovConstReg*)ni)->data(); } else { ShouldNotReachHere(); return NULL; } }
address InlineCacheBuffer::ic_buffer_entry_point(address code_begin) { NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // creation also verifies the object NativeJump* jump = nativeJump_at(move->next_instruction_address()); return jump->jump_destination(); }
address InlineCacheBuffer::ic_buffer_entry_point(address code_begin) { address jump_address; jump_address = code_begin + NativeInstruction::instruction_size; NativeJump* jump = nativeJump_at(jump_address); return jump->jump_destination(); }
void NativeFarCall::verify() { // make sure code pattern is actually a jumpl_to instruction assert((int)instruction_size == (int)NativeJump::instruction_size, "same as jump_to"); assert((int)jmpl_offset == (int)NativeMovConstReg::add_offset, "sethi size ok"); nativeJump_at(addr_at(0))->verify(); }
// --- is_entrant ------------------------------------------------------------ // See if the 1st op is not a 'jmp resolve_and_patch'. bool CodeBlob::is_entrant() const { return !NativeJump::is_jump_at(CodeBlob::code_begins()) || nativeJump_at(CodeBlob::code_begins())->jump_destination() != StubRoutines::resolve_and_patch_call_entry(); }
void* InlineCacheBuffer::ic_buffer_cached_value(address code_begin) { NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // creation also verifies the object NativeJump* jump = nativeJump_at(move->next_instruction_address()); void* o = (void*)move->data(); return o; }