void Relocation::pd_set_data_value(address x, intptr_t o, bool verify_only) { bool copy_back_to_oop_pool = true; // TODO: PPC port // The following comment is from the declaration of DataRelocation: // // "The "o" (displacement) argument is relevant only to split relocations // on RISC machines. In some CPUs (SPARC), the set-hi and set-lo ins'ns // can encode more than 32 bits between them. This allows compilers to // share set-hi instructions between addresses that differ by a small // offset (e.g., different static variables in the same class). // On such machines, the "x" argument to set_value on all set-lo // instructions must be the same as the "x" argument for the // corresponding set-hi instructions. The "o" arguments for the // set-hi instructions are ignored, and must not affect the high-half // immediate constant. The "o" arguments for the set-lo instructions are // added into the low-half immediate constant, and must not overflow it." // // Currently we don't support splitting of relocations, so o must be // zero: assert(o == 0, "tried to split relocations"); if (!verify_only) { if (format() != 1) { nativeMovConstReg_at(addr())->set_data_plain(((intptr_t)x), code()); } else { assert(type() == relocInfo::oop_type || type() == relocInfo::metadata_type, "how to encode else?"); narrowOop no = (type() == relocInfo::oop_type) ? oopDesc::encode_heap_oop((oop)x) : Klass::encode_klass((Klass*)x); nativeMovConstReg_at(addr())->set_narrow_oop(no, code()); } } else { guarantee((address) (nativeMovConstReg_at(addr())->data()) == x, "data must match"); } }
void install(MacroAssembler* masm, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) { _info = info; _obj = obj; masm->bind(_patch_site_continuation); _bytes_to_copy = masm->pc() - pc_start(); if (_id == PatchingStub::access_field_id) { // embed a fixed offset to handle long patches which need to be offset by a word. // the patching code will just add the field offset field to this offset so // that we can refernce either the high or low word of a double word field. int field_offset = 0; switch (patch_code) { case lir_patch_low: field_offset = lo_word_offset_in_bytes; break; case lir_patch_high: field_offset = hi_word_offset_in_bytes; break; case lir_patch_normal: field_offset = 0; break; default: ShouldNotReachHere(); } NativeMovRegMem* n_move = nativeMovRegMem_at(pc_start()); n_move->set_offset(field_offset); } else if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) { assert(_obj != noreg, "must have register object for load_klass/load_mirror"); #ifdef ASSERT // verify that we're pointing at a NativeMovConstReg nativeMovConstReg_at(pc_start()); #endif } else { ShouldNotReachHere(); } assert(_bytes_to_copy <= (masm->pc() - pc_start()), "not enough bytes"); }
void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination, TRAPS) { address pc = (address) inst; if (inst->is_call()) { // NOTE: for call without a mov, the offset must fit a 32-bit immediate // see also CompilerToVM.getMaxCallTargetOffset() NativeCall* call = nativeCall_at(pc); call->set_destination((address) foreign_call_destination); _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); } else if (inst->is_mov_literal64()) { NativeMovConstReg* mov = nativeMovConstReg_at(pc); mov->set_data((intptr_t) foreign_call_destination); _instructions->relocate(mov->instruction_address(), runtime_call_Relocation::spec(), Assembler::imm_operand); } else if (inst->is_jump()) { NativeJump* jump = nativeJump_at(pc); jump->set_jump_destination((address) foreign_call_destination); _instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); } else if (inst->is_cond_jump()) { address old_dest = nativeGeneralJump_at(pc)->jump_destination(); address disp = Assembler::locate_operand(pc, Assembler::call32_operand); *(jint*) disp += ((address) foreign_call_destination) - old_dest; _instructions->relocate(pc, runtime_call_Relocation::spec(), Assembler::call32_operand); } else { JVMCI_ERROR("unsupported relocation for foreign call"); } TRACE_jvmci_3("relocating (foreign call) at " PTR_FORMAT, p2i(inst)); }
address Relocation::pd_call_destination(address orig_addr) { intptr_t adj = 0; address inst_loc = addr(); if (orig_addr != NULL) { // We just moved this call instruction from orig_addr to addr(). // This means its target will appear to have grown by addr() - orig_addr. adj = -(inst_loc - orig_addr); } if (NativeFarCall::is_far_call_at(inst_loc)) { NativeFarCall* call = nativeFarCall_at(inst_loc); return call->destination() + (intptr_t)(call->is_pcrelative() ? adj : 0); } else if (NativeJump::is_jump_at(inst_loc)) { NativeJump* jump = nativeJump_at(inst_loc); return jump->jump_destination() + (intptr_t)(jump->is_pcrelative() ? adj : 0); } else if (NativeConditionalFarBranch::is_conditional_far_branch_at(inst_loc)) { NativeConditionalFarBranch* branch = NativeConditionalFarBranch_at(inst_loc); return branch->branch_destination(); } else { // There are two instructions at the beginning of a stub, therefore we // load at orig_addr + 8. orig_addr = nativeCall_at(inst_loc)->get_trampoline(); if (orig_addr == NULL) { return (address) -1; } else { return (address) nativeMovConstReg_at(orig_addr + 8)->data(); } } }
void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) { address stub = find_stub(/*is_aot*/ false); guarantee(stub != NULL, "stub not found"); if (TraceICs) { ResourceMark rm; tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeCall::get_IC_pos_in_java_to_interp_stub()); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // A generated lambda form might be deleted from the Lambdaform // cache in MethodTypeForm. If a jit compiled lambdaform method // becomes not entrant and the cache access returns null, the new // resolve will lead to a new generated LambdaForm. assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee() || callee->is_compiled_lambda_form(), "a) MT-unsafe modification of inline cache"); assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache"); // Update stub. method_holder->set_data((intptr_t)callee()); jump->set_jump_destination(entry); // Update jump to call. set_destination_mt_safe(stub); }
oop InlineCacheBuffer::ic_buffer_cached_oop(address code_begin) { // creation also verifies the object NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // Verifies the jump NativeJump* jump = nativeJump_at(move->next_instruction_address()); return (oop)move->data(); }
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { address stub = find_stub(); guarantee(stub != NULL, "stub not found"); if (TraceICs) { ResourceMark rm; tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), "a) MT-unsafe modification of inline cache"); assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache"); // Update stub. method_holder->set_data((intptr_t)callee()); jump->set_jump_destination(entry); // Update jump to call. set_destination_mt_safe(stub); }
// Release the CompiledICHolder* associated with this call site is there is one. void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) { // This call site might have become stale so inspect it carefully. NativeCall* call = nativeCall_at(call_site->addr()); if (is_icholder_entry(call->destination())) { NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value()); InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data()); } }
void Relocation::pd_set_data_value(address x, intptr_t o) { NativeInstruction* ni = nativeInstruction_at(addr()); if( ni->is_movl() ) { nativeMovConstReg_at(addr())->set_data(((intptr_t)x) + o); } else ShouldNotReachHere(); }
void Relocation::pd_set_data_value(address x, intptr_t o, bool verify_only) { // we don't support splitting of relocations, so o must be zero: assert(o == 0, "tried to split relocations"); if (!verify_only) { switch (format()) { case relocInfo::uncompressed_format: nativeMovConstReg_at(addr())->set_data_plain(((intptr_t)x) + o, code()); break; case relocInfo::compressed_format: if (type() == relocInfo::metadata_type) nativeMovConstReg_at(addr())->set_narrow_klass(((intptr_t)x) + o); else if (type() == relocInfo::oop_type) nativeMovConstReg_at(addr())->set_narrow_oop(((intptr_t)x) + o); else guarantee(false, "bad relocInfo type for relocInfo::narrow_oop_format"); break; case relocInfo::pcrel_addr_format: // patch target location nativeMovConstReg_at(addr())->set_pcrel_addr(((intptr_t)x) + o, code()); break; case relocInfo::pcrel_data_format: // patch data at target location nativeMovConstReg_at(addr())->set_pcrel_data(((intptr_t)x) + o, code()); break; default: assert(false, "not a valid relocInfo format"); break; } } else { // TODO: Reading of narrow oops out of code stream is not implemented // (see nativeMovConstReg::data()). Implement this if you want to verify. // assert(x == (address) nativeMovConstReg_at(addr())->data(), "Instructions must match"); switch (format()) { case relocInfo::uncompressed_format: break; case relocInfo::compressed_format: break; case relocInfo::pcrel_addr_format: break; case relocInfo::pcrel_data_format: break; default: assert(false, "not a valid relocInfo format"); break; } } }
address Relocation::pd_get_address_from_code() { // Only support movl's for now NativeInstruction* ni = nativeInstruction_at(addr()); if( ni->is_movl() ) { return (address)(nativeMovConstReg_at(addr())->data()); } ShouldNotReachHere(); return NULL; }
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); // Reset stub. address stub = static_stub->addr(); assert(stub != NULL, "stub not found"); // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); method_holder->set_data(0); }
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); // Reset stub address stub = static_stub->addr(); assert(stub!=NULL, "stub not found"); NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); method_holder->set_data(0); jump->set_jump_destination((address)-1); }
void CompiledIC::initialize_from_iter(RelocIterator* iter) { assert(iter->addr() == _ic_call->instruction_address(), "must find ic_call"); if (iter->type() == relocInfo::virtual_call_type) { virtual_call_Relocation* r = iter->virtual_call_reloc(); _is_optimized = false; _value = nativeMovConstReg_at(r->cached_value()); } else { assert(iter->type() == relocInfo::opt_virtual_call_type, "must be a virtual call"); _is_optimized = true; _value = NULL; } }
// Code for unit testing implementation of NativeMovConstReg class void NativeMovConstReg::test() { #ifdef ASSERT ResourceMark rm; CodeBuffer cb("test", 100, 100); MacroAssembler* a = new MacroAssembler(&cb); NativeMovConstReg* nm; uint idx; int offsets[] = { 0x0, 0x7fffffff, 0x80000000, 0xffffffff, 0x20, 4096, 4097, }; VM_Version::allow_all(); AddressLiteral al1(0xaaaabbbb, relocInfo::external_word_type); a->sethi(al1, I3); a->add(I3, al1.low10(), I3); AddressLiteral al2(0xccccdddd, relocInfo::external_word_type); a->sethi(al2, O2); a->add(O2, al2.low10(), O2); nm = nativeMovConstReg_at( cb.insts_begin() ); nm->print(); nm = nativeMovConstReg_at( nm->next_instruction_address() ); for (idx = 0; idx < ARRAY_SIZE(offsets); idx++) { nm->set_data( offsets[idx] ); assert(nm->data() == offsets[idx], "check unit test"); } nm->print(); VM_Version::revert(); #endif }
void CompiledStaticCall::verify() { // Verify call NativeCall::verify(); if (os::is_MP()) { verify_alignment(); } // Verify stub address stub = find_stub(); assert(stub != NULL, "no stub found for static call"); NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); }
void CompiledDirectStaticCall::verify() { // Verify call. _call->verify(); if (os::is_MP()) { _call->verify_alignment(); } // Verify stub. address stub = find_stub(/*is_aot*/ false); assert(stub != NULL, "no stub found for static call"); // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeCall::get_IC_pos_in_java_to_interp_stub()); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); }
void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, TRAPS) { address pc = _instructions->start() + pc_offset; if (HotSpotMetaspaceConstantImpl::compressed(constant)) { #ifdef _LP64 NativeMovConstReg32* move = nativeMovConstReg32_at(pc); narrowKlass narrowOop = record_narrow_metadata_reference(constant, CHECK); move->set_data((intptr_t)narrowOop); TRACE_jvmci_3("relocating (narrow metaspace constant) at %p/%p", pc, narrowOop); #else JVMCI_ERROR("compressed Klass* on 32bit"); #endif } else { NativeMovConstReg* move = nativeMovConstReg_at(pc); Metadata* reference = record_metadata_reference(constant, CHECK); move->set_data((intptr_t)reference); TRACE_jvmci_3("relocating (metaspace constant) at %p/%p", pc, reference); } }
void Relocation::pd_set_call_destination(address x) { address inst_addr = addr(); if (NativeFarCall::is_far_call_at(inst_addr)) { if (!ShortenBranches) { if (MacroAssembler::is_call_far_pcrelative(inst_addr)) { address a1 = MacroAssembler::get_target_addr_pcrel(inst_addr+MacroAssembler::nop_size()); #ifdef ASSERT address a3 = nativeFarCall_at(inst_addr)->destination(); if (a1 != a3) { unsigned int range = 128; Assembler::dump_code_range(tty, inst_addr, range, "pc-relative call w/o ShortenBranches?"); assert(false, "pc-relative call w/o ShortenBranches?"); } #endif nativeFarCall_at(inst_addr)->set_destination(x, 0); return; } assert(x == (address)-1, "consistency check"); return; } int toc_offset = -1; if (type() == relocInfo::runtime_call_w_cp_type) { toc_offset = ((runtime_call_w_cp_Relocation *)this)->get_constant_pool_offset(); } if (toc_offset>=0) { NativeFarCall* call = nativeFarCall_at(inst_addr); call->set_destination(x, toc_offset); return; } } if (NativeCall::is_call_at(inst_addr)) { NativeCall* call = nativeCall_at(inst_addr); if (call->is_pcrelative()) { call->set_destination_mt_safe(x); return; } } // constant is absolute, must use x nativeMovConstReg_at(inst_addr)->set_data(((intptr_t)x)); }
address Relocation::pd_call_destination(address orig_addr) { address inst_addr = addr(); if (NativeFarCall::is_far_call_at(inst_addr)) { if (!ShortenBranches) { if (MacroAssembler::is_call_far_pcrelative(inst_addr)) { address a1 = MacroAssembler::get_target_addr_pcrel(orig_addr+MacroAssembler::nop_size()); #ifdef ASSERT address a2 = MacroAssembler::get_target_addr_pcrel(inst_addr+MacroAssembler::nop_size()); address a3 = nativeFarCall_at(orig_addr)->destination(); address a4 = nativeFarCall_at(inst_addr)->destination(); if ((a1 != a3) || (a2 != a4)) { unsigned int range = 128; Assembler::dump_code_range(tty, inst_addr, range, "pc-relative call w/o ShortenBranches?"); Assembler::dump_code_range(tty, orig_addr, range, "pc-relative call w/o ShortenBranches?"); assert(false, "pc-relative call w/o ShortenBranches?"); } #endif return a1; } return (address)(-1); } NativeFarCall* call; if (orig_addr == NULL) { call = nativeFarCall_at(inst_addr); } else { // must access location (in CP) where destination is stored in unmoved code, because load from CP is pc-relative call = nativeFarCall_at(orig_addr); } return call->destination(); } if (NativeCall::is_call_at(inst_addr)) { NativeCall* call = nativeCall_at(inst_addr); if (call->is_pcrelative()) { intptr_t off = inst_addr - orig_addr; return (address) (call->destination()-off); } } return (address) nativeMovConstReg_at(inst_addr)->data(); }
void CodeInstaller::pd_patch_OopConstant(int pc_offset, Handle constant, TRAPS) { address pc = _instructions->start() + pc_offset; Handle obj = HotSpotObjectConstantImpl::object(constant); jobject value = JNIHandles::make_local(obj()); if (HotSpotObjectConstantImpl::compressed(constant)) { #ifdef _LP64 int oop_index = _oop_recorder->find_index(value); RelocationHolder rspec = oop_Relocation::spec(oop_index); _instructions->relocate(pc, rspec, 1); #else JVMCI_ERROR("compressed oop on 32bit"); #endif } else { NativeMovConstReg* move = nativeMovConstReg_at(pc); move->set_data((intptr_t) value); // We need two relocations: one on the sethi and one on the add. int oop_index = _oop_recorder->find_index(value); RelocationHolder rspec = oop_Relocation::spec(oop_index); _instructions->relocate(pc + NativeMovConstReg::sethi_offset, rspec); _instructions->relocate(pc + NativeMovConstReg::add_offset, rspec); } }
CompiledIC::CompiledIC(nmethod* nm, NativeCall* call) : _ic_call(call) { address ic_call = call->instruction_address(); assert(ic_call != NULL, "ic_call address must be set"); assert(nm != NULL, "must pass nmethod"); assert(nm->contains(ic_call), "must be in nmethod"); // search for the ic_call at the given address RelocIterator iter(nm, ic_call, ic_call+1); bool ret = iter.next(); assert(ret == true, "relocInfo must exist at this address"); assert(iter.addr() == ic_call, "must find ic_call"); if (iter.type() == relocInfo::virtual_call_type) { virtual_call_Relocation* r = iter.virtual_call_reloc(); _is_optimized = false; _value = nativeMovConstReg_at(r->cached_value()); } else { assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call"); _is_optimized = true; _value = NULL; } }
address InlineCacheBuffer::ic_buffer_entry_point(address code_begin) { NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // creation also verifies the object NativeJump* jump = nativeJump_at(move->next_instruction_address()); return jump->jump_destination(); }
inline void CodeInstaller::pd_site_DataPatch(int pc_offset, oop site) { oop constant = CompilationResult_DataPatch::constant(site); int alignment = CompilationResult_DataPatch::alignment(site); bool inlined = CompilationResult_DataPatch::inlined(site) == JNI_TRUE; oop kind = Constant::kind(constant); char typeChar = Kind::typeChar(kind); address pc = _instructions->start() + pc_offset; switch (typeChar) { case 'z': case 'b': case 's': case 'c': case 'i': fatal("int-sized values not expected in DataPatch"); break; case 'f': case 'j': case 'd': { if (inlined) { NativeMovConstReg* move = nativeMovConstReg_at(pc); uint64_t value = Constant::primitive(constant); move->set_data(value); } else { int size = _constants->size(); if (alignment > 0) { guarantee(alignment <= _constants->alignment(), "Alignment inside constants section is restricted by alignment of section begin"); size = align_size_up(size, alignment); } // we don't care if this is a long/double/etc., the primitive field contains the right bits address dest = _constants->start() + size; _constants->set_end(dest); uint64_t value = Constant::primitive(constant); _constants->emit_int64(value); NativeMovRegMem* load = nativeMovRegMem_at(pc); int disp = _constants_size + pc_offset - size - BytesPerInstWord; load->set_offset(-disp); } break; } case 'a': { int size = _constants->size(); if (alignment > 0) { guarantee(alignment <= _constants->alignment(), "Alignment inside constants section is restricted by alignment of section begin"); size = align_size_up(size, alignment); } address dest = _constants->start() + size; _constants->set_end(dest); Handle obj = Constant::object(constant); jobject value = JNIHandles::make_local(obj()); _constants->emit_address((address) value); NativeMovRegMem* load = nativeMovRegMem_at(pc); int disp = _constants_size + pc_offset - size - BytesPerInstWord; load->set_offset(-disp); int oop_index = _oop_recorder->find_index(value); _constants->relocate(dest, oop_Relocation::spec(oop_index)); break; } default: fatal(err_msg("unexpected Kind (%d) in DataPatch", typeChar)); break; } }
address Relocation::pd_get_address_from_code() { return (address)(nativeMovConstReg_at(addr())->data()); }
void* InlineCacheBuffer::ic_buffer_cached_value(address code_begin) { NativeMovConstReg* move = nativeMovConstReg_at(code_begin); return (void*)move->data(); }
void* InlineCacheBuffer::ic_buffer_cached_value(address code_begin) { NativeMovConstReg* move = nativeMovConstReg_at(code_begin); // creation also verifies the object NativeJump* jump = nativeJump_at(move->next_instruction_address()); void* o = (void*)move->data(); return o; }