address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. // set (empty), G5 // jmp -1 if (mark == NULL) { mark = cbuf.insts_mark(); // Get mark within main instrs section. } MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()); if (base == NULL) { return NULL; // CodeBuffer::expand failed. } // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark)); __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); __ set_inst_mark(); AddressLiteral addrlit(-1); __ JUMP(addrlit, G3, 0); __ delayed()->nop(); assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); // Update current stubs pointer and restore code_end. __ end_a_stub(); return base; }
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { // Stub is fixed up when the corresponding call is converted from // calling compiled code to calling interpreted code. // movq rbx, 0 // jmp -5 # to self if (mark == NULL) { mark = cbuf.insts_mark(); // Get mark within main instrs section. } // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()); if (base == NULL) { return NULL; // CodeBuffer::expand failed. } // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand); // Static stub relocation also tags the Method* in the code-stream. __ mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time. // This is recognized as unresolved by relocs/nativeinst/ic code. __ jump(RuntimeAddress(__ pc())); assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); // Update current stubs pointer and restore insts_end. __ end_a_stub(); return base; }
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { // Stub is fixed up when the corresponding call is converted from // calling compiled code to calling interpreted code. // mov rmethod, 0 // jmp -4 # to self address mark = cbuf.insts_mark(); // Get mark within main instrs section. // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()*2); int offset = __ offset(); if (base == NULL) return; // CodeBuffer::expand failed // static stub relocation stores the instruction address of the call __ relocate(static_stub_Relocation::spec(mark)); // static stub relocation also tags the Method* in the code-stream. __ mov_metadata(rmethod, (Metadata*)NULL); __ movptr(rscratch1, 0); __ br(rscratch1); assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big"); __ end_a_stub(); }
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { #ifdef COMPILER2 // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. // set (empty), G5 // jmp -1 address mark = cbuf.insts_mark(); // Get mark within main instrs section. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()*2); if (base == NULL) return; // CodeBuffer::expand failed. // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark)); __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); __ set_inst_mark(); AddressLiteral addrlit(-1); __ JUMP(addrlit, G3, 0); __ delayed()->nop(); // Update current stubs pointer and restore code_end. __ end_a_stub(); #else ShouldNotReachHere(); #endif }
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { address stub = find_stub(); guarantee(stub != NULL, "stub not found"); if (TraceICs) { ResourceMark rm; tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); #ifndef PRODUCT NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), "a) MT-unsafe modification of inline cache"); assert(method_holder->data() == 0 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache"); #endif // Update stub. method_holder->set_data((intptr_t)callee()); NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry); ICache::invalidate_range(stub, to_interp_stub_size()); // Update jump to call. set_destination_mt_safe(stub); }