address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. // set (empty), G5 // jmp -1 if (mark == NULL) { mark = cbuf.insts_mark(); // Get mark within main instrs section. } MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()); if (base == NULL) { return NULL; // CodeBuffer::expand failed. } // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark)); __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); __ set_inst_mark(); AddressLiteral addrlit(-1); __ JUMP(addrlit, G3, 0); __ delayed()->nop(); assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); // Update current stubs pointer and restore code_end. __ end_a_stub(); return base; }
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { // Stub is fixed up when the corresponding call is converted from // calling compiled code to calling interpreted code. // movq rbx, 0 // jmp -5 # to self address mark = cbuf.insts_mark(); // Get mark within main instrs section. // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()); if (base == NULL) { return NULL; // CodeBuffer::expand failed. } // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand); // Static stub relocation also tags the Method* in the code-stream. __ mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time. // This is recognized as unresolved by relocs/nativeinst/ic code. __ jump(RuntimeAddress(__ pc())); // Update current stubs pointer and restore insts_end. __ end_a_stub(); return base; }
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { #ifdef COMPILER2 // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. // set (empty), G5 // jmp -1 address mark = cbuf.insts_mark(); // Get mark within main instrs section. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()*2); if (base == NULL) return; // CodeBuffer::expand failed. // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark)); __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); __ set_inst_mark(); AddressLiteral addrlit(-1); __ JUMP(addrlit, G3, 0); __ delayed()->nop(); // Update current stubs pointer and restore code_end. __ end_a_stub(); #else ShouldNotReachHere(); #endif }
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { // Stub is fixed up when the corresponding call is converted from // calling compiled code to calling interpreted code. // mov rmethod, 0 // jmp -4 # to self address mark = cbuf.insts_mark(); // Get mark within main instrs section. // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address base = __ start_a_stub(to_interp_stub_size()*2); int offset = __ offset(); if (base == NULL) return; // CodeBuffer::expand failed // static stub relocation stores the instruction address of the call __ relocate(static_stub_Relocation::spec(mark)); // static stub relocation also tags the Method* in the code-stream. __ mov_metadata(rmethod, (Metadata*)NULL); __ movptr(rscratch1, 0); __ br(rscratch1); assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big"); __ end_a_stub(); }
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark/* = NULL*/) { #ifdef COMPILER2 // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. if (mark == NULL) { // Get the mark within main instrs section which is set to the address of the call. mark = cbuf.insts_mark(); } assert(mark != NULL, "mark must not be NULL"); // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address stub = __ start_a_stub(Compile::MAX_stubs_size); if (stub == NULL) { return NULL; // CodeBuffer::expand failed. } __ relocate(static_stub_Relocation::spec(mark)); AddressLiteral meta = __ allocate_metadata_address(NULL); bool success = __ load_const_from_toc(as_Register(Matcher::inline_cache_reg_encode()), meta); __ set_inst_mark(); AddressLiteral a((address)-1); success = success && __ load_const_from_toc(Z_R1, a); if (!success) { return NULL; // CodeCache is full. } __ z_br(Z_R1); __ end_a_stub(); // Update current stubs pointer and restore insts_end. return stub; #else ShouldNotReachHere(); #endif }
address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark/* = NULL*/) { #ifdef COMPILER2 if (mark == NULL) { // Get the mark within main instrs section which is set to the address of the call. mark = cbuf.insts_mark(); } // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); // Start the stub. address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size()); if (stub == NULL) { return NULL; // CodeCache is full } // For java_to_interp stubs we use R11_scratch1 as scratch register // and in call trampoline stubs we use R12_scratch2. This way we // can distinguish them (see is_NativeCallTrampolineStub_at()). Register reg_scratch = R11_scratch1; // Create a static stub relocation which relates this stub // with the call instruction at insts_call_instruction_offset in the // instructions code-section. __ relocate(static_stub_Relocation::spec(mark)); const int stub_start_offset = __ offset(); // Now, create the stub's code: // - load the TOC // - load the inline cache oop from the constant pool // - load the call target from the constant pool // - call __ calculate_address_from_global_toc(reg_scratch, __ method_toc()); AddressLiteral ic = __ allocate_metadata_address((Metadata *)NULL); bool success = __ load_const_from_method_toc(as_Register(Matcher::inline_cache_reg_encode()), ic, reg_scratch, /*fixed_size*/ true); if (!success) { return NULL; // CodeCache is full } if (ReoptimizeCallSequences) { __ b64_patchable((address)-1, relocInfo::none); } else { AddressLiteral a((address)-1); success = __ load_const_from_method_toc(reg_scratch, a, reg_scratch, /*fixed_size*/ true); if (!success) { return NULL; // CodeCache is full } __ mtctr(reg_scratch); __ bctr(); } // FIXME: Assert that the stub can be identified and patched. // Java_to_interp_stub_size should be good. assert((__ offset() - stub_start_offset) <= CompiledStaticCall::to_interp_stub_size(), "should be good size"); assert(!is_NativeCallTrampolineStub_at(__ addr_at(stub_start_offset)), "must not confuse java_to_interp with trampoline stubs"); // End the stub. __ end_a_stub(); return stub; #else ShouldNotReachHere(); return NULL; #endif }