// Offset of given address to current method's TOC. inline int MacroAssembler::offset_to_method_toc(address addr) { intptr_t offset = (intptr_t)addr - (intptr_t)method_toc(); assert(is_simm((long)offset, 31) && offset >= 0, "must be in range"); return (int)offset; }
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark/* = NULL*/) { #ifdef COMPILER2 if (mark == NULL) { // Get the mark within main instrs section which is set to the address of the call. mark = cbuf.insts_mark(); } // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); // Start the stub. address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size()); if (stub == NULL) { return NULL; // CodeCache is full } // For java_to_interp stubs we use R11_scratch1 as scratch register // and in call trampoline stubs we use R12_scratch2. This way we // can distinguish them (see is_NativeCallTrampolineStub_at()). Register reg_scratch = R11_scratch1; // Create a static stub relocation which relates this stub // with the call instruction at insts_call_instruction_offset in the // instructions code-section. __ relocate(static_stub_Relocation::spec(mark)); const int stub_start_offset = __ offset(); // Now, create the stub's code: // - load the TOC // - load the inline cache oop from the constant pool // - load the call target from the constant pool // - call __ calculate_address_from_global_toc(reg_scratch, __ method_toc()); AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL); bool success = __ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch, /*fixed_size*/ true); if (!success) { return NULL; // CodeCache is full } if (ReoptimizeCallSequences) { __ b64_patchable((address)-1, relocInfo::none); } else { AddressLiteral a((address)-1); success = __ load_const_from_method_toc(reg_scratch, a, reg_scratch, /*fixed_size*/ true); if (!success) { return NULL; // CodeCache is full } __ mtctr(reg_scratch); __ bctr(); } // FIXME: Assert that the stub can be identified and patched. // Java_to_interp_stub_size should be good. assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(), "should be good size"); assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)), "must not confuse java_to_interp with trampoline stubs"); // End the stub. __ end_a_stub(); return stub; #else ShouldNotReachHere(); return NULL; #endif }