Esempio n. 1
0
VtableStub* VtableStubs::lookup(bool is_vtable_stub, int vtable_index) {
  MutexLocker ml(VtableStubs_lock);
  unsigned hash = VtableStubs::hash(is_vtable_stub, vtable_index);
  VtableStub* s = _table[hash];
  while( s && !s->matches(is_vtable_stub, vtable_index)) s = s->next();
  return s;
}
Esempio n. 2
0
void VtableStubs::vtable_stub_do(void f(VtableStub*)) {
    for (int i = 0; i < N; i++) {
        for (VtableStub* s = _table[i]; s != NULL; s = s->next()) {
            f(s);
        }
    }
}
Esempio n. 3
0
bool VtableStubs::is_entry_point(address pc) {
  MutexLocker ml(VtableStubs_lock);
  VtableStub* stub = (VtableStub*)(pc - VtableStub::entry_offset());
  uint hash = VtableStubs::hash(stub->is_vtable_stub(), stub->index());
  VtableStub* s;
  for (s = _table[hash]; s != NULL && s != stub; s = s->next()) {}
  return s == stub;
}
Esempio n. 4
0
VtableStub* VtableStubs::stub_containing(address pc) {
  // Note: No locking needed since any change to the data structure
  //       happens with an atomic store into it (we don't care about
  //       consistency with the _number_of_vtable_stubs counter).
  for (int i = 0; i < N; i++) {
    for (VtableStub* s = _table[i]; s != NULL; s = s->next()) {
      if (s->contains(pc)) return s;
    }
  }
  return NULL;
}
bool CompiledIC::is_icholder_entry(address entry) {
  CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
  if (cb != NULL && cb->is_adapter_blob()) {
    return true;
  }
  // itable stubs also use CompiledICHolder
  if (cb != NULL && cb->is_vtable_blob()) {
    VtableStub* s = VtableStubs::entry_point(entry);
    return (s != NULL) && s->is_itable_stub();
  }

  return false;
}
VtableStub* VtableStubs::create_itable_stub(int vtable_index, int receiver_location) {  
  const int i486_code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(i486_code_length) VtableStub(false, vtable_index, receiver_location);
  ResourceMark rm;
  MacroAssembler* masm = new MacroAssembler(new CodeBuffer(s->entry_point(), i486_code_length));
  
  // Entry arguments:
  //  eax: Interface
  //  ecx: Receiver
  
  // get receiver (need to skip return address on top of stack)
  COMPILER1_ONLY(assert(receiver_location == 0, "receiver is always in ecx - no location info needed");)
 
#ifdef COMPILER2
    if( receiver_location < SharedInfo::stack0 ) {
Esempio n. 7
0
address VtableStubs::create_stub(bool is_vtable_stub, int vtable_index, methodOop method) {
  assert(vtable_index >= 0, "must be positive");

  VtableStub* s = ShareVtableStubs ? lookup(is_vtable_stub, vtable_index) : NULL;
  if (s == NULL) {
    if (is_vtable_stub) {
      s = create_vtable_stub(vtable_index);
    } else {
      s = create_itable_stub(vtable_index);
    }
    enter(is_vtable_stub, vtable_index, s);
    if (PrintAdapterHandlers) {
      tty->print_cr("Decoding VtableStub %s[%d]@%d",
                    is_vtable_stub? "vtbl": "itbl", vtable_index, VtableStub::receiver_location());
      Disassembler::decode(s->code_begin(), s->code_end());
    }
  }
  return s->entry_point();
}
Esempio n. 8
0
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  const int code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new(code_length) VtableStub(true, vtable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

  assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");

  const Register tmp = Rtemp; // Rtemp OK, should be free at call sites

  address npe_addr = __ pc();
  __ load_klass(tmp, R0);

  {
  int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index * vtableEntry::size_in_bytes();
  int method_offset = vtableEntry::method_offset_in_bytes() + entry_offset;

  assert ((method_offset & (wordSize - 1)) == 0, "offset should be aligned");
  int offset_mask = AARCH64_ONLY(0xfff << LogBytesPerWord) NOT_AARCH64(0xfff);
  if (method_offset & ~offset_mask) {
    __ add(tmp, tmp, method_offset & ~offset_mask);
  }
  __ ldr(Rmethod, Address(tmp, method_offset & offset_mask));
  }

  address ame_addr = __ pc();
#ifdef AARCH64
  __ ldr(tmp, Address(Rmethod, Method::from_compiled_offset()));
  __ br(tmp);
#else
  __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
#endif // AARCH64

  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("vtable #%d at " PTR_FORMAT "[%d] left over: %d",
                  vtable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // FIXME ARM: need correct 'slop' - below is x86 code
  // shut the door on sizing bugs
  //int slop = 8;  // 32-bit offset is this much larger than a 13-bit one
  //assert(vtable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 9
0
address VtableStubs::find_stub(bool is_vtable_stub, int vtable_index) {
  assert(vtable_index >= 0, "must be positive");

  VtableStub* s = ShareVtableStubs ? lookup(is_vtable_stub, vtable_index) : NULL;
  if (s == NULL) {
    if (is_vtable_stub) {
      s = create_vtable_stub(vtable_index);
    } else {
      s = create_itable_stub(vtable_index);
    }

    // Creation of vtable or itable can fail if there is not enough free space in the code cache.
    if (s == NULL) {
      return NULL;
    }

    enter(is_vtable_stub, vtable_index, s);
    if (PrintAdapterHandlers) {
      tty->print_cr("Decoding VtableStub %s[%d]@%d",
                    is_vtable_stub? "vtbl": "itbl", vtable_index, VtableStub::receiver_location());
      Disassembler::decode(s->code_begin(), s->code_end());
    }
    // Notify JVMTI about this stub. The event will be recorded by the enclosing
    // JvmtiDynamicCodeEventCollector and posted when this thread has released
    // all locks.
    if (JvmtiExport::should_post_dynamic_code_generated()) {
      JvmtiExport::post_dynamic_code_generated_while_holding_locks(is_vtable_stub? "vtable stub": "itable stub",
                                                                   s->code_begin(), s->code_end());
    }
  }
  return s->entry_point();
}
Esempio n. 10
0
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  const int aarch64_code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new(aarch64_code_length) VtableStub(true, vtable_index);
  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), aarch64_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ lea(r19, ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
    __ incrementw(Address(r19));
  }
#endif

  // get receiver (need to skip return address on top of stack)
  assert(VtableStub::receiver_location() == j_rarg0->as_VMReg(), "receiver expected in j_rarg0");

  // get receiver klass
  address npe_addr = __ pc();
  __ load_klass(r19, j_rarg0);

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    // check offset vs vtable length
    __ ldrw(rscratch1, Address(r19, Klass::vtable_length_offset()));
    __ cmpw(rscratch1, vtable_index * vtableEntry::size());
    __ br(Assembler::GT, L);
    __ enter();
    __ mov(r2, vtable_index);
    __ call_VM(noreg,
               CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), j_rarg0, r2);
    __ leave();
    __ bind(L);
  }
#endif // PRODUCT

  __ lookup_virtual_method(r19, vtable_index, rmethod);

  if (DebugVtables) {
    Label L;
    __ cbz(rmethod, L);
    __ ldr(rscratch1, Address(rmethod, Method::from_compiled_offset()));
    __ cbnz(rscratch1, L);
    __ stop("Vtable entry is NULL");
    __ bind(L);
  }
  // r0: receiver klass
  // rmethod: Method*
  // r2: receiver
  address ame_addr = __ pc();
  __ ldr(rscratch1, Address(rmethod, Method::from_compiled_offset()));
  __ br(rscratch1);

  __ flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("vtable #%d at " PTR_FORMAT "[%d] left over: %d",
                  vtable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
// Used by compiler only; may use only caller saved, non-argument
// registers.
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  // PPC port: use fixed size.
  const int code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new (code_length) VtableStub(true, vtable_index);

  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    int offs = __ load_const_optimized(R11_scratch1, SharedRuntime::nof_megamorphic_calls_addr(), R12_scratch2, true);
    __ lwz(R12_scratch2, offs, R11_scratch1);
    __ addi(R12_scratch2, R12_scratch2, 1);
    __ stw(R12_scratch2, offs, R11_scratch1);
  }
#endif

  assert(VtableStub::receiver_location() == R3_ARG1->as_VMReg(), "receiver expected in R3_ARG1");

  // Get receiver klass.
  const Register rcvr_klass = R11_scratch1;

  // We might implicit NULL fault here.
  address npe_addr = __ pc(); // npe = null pointer exception
  __ load_klass_with_trap_null_check(rcvr_klass, R3);

 // Set method (in case of interpreted method), and destination address.
  int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    // Check offset vs vtable length.
    const Register vtable_len = R12_scratch2;
    __ lwz(vtable_len, InstanceKlass::vtable_length_offset()*wordSize, rcvr_klass);
    __ cmpwi(CCR0, vtable_len, vtable_index*vtableEntry::size());
    __ bge(CCR0, L);
    __ li(R12_scratch2, vtable_index);
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), R3_ARG1, R12_scratch2, false);
    __ bind(L);
  }
#endif

  int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();

  __ ld(R19_method, v_off, rcvr_klass);

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    __ cmpdi(CCR0, R19_method, 0);
    __ bne(CCR0, L);
    __ stop("Vtable entry is ZERO", 102);
    __ bind(L);
  }
#endif

  // If the vtable entry is null, the method is abstract.
  address ame_addr = __ pc(); // ame = abstract method error

  __ load_with_trap_null_check(R12_scratch2, in_bytes(Method::from_compiled_offset()), R19_method);
  __ mtctr(R12_scratch2);
  __ bctr();

  masm->flush();

  guarantee(__ pc() <= s->code_end(), "overflowed buffer");

  s->set_exception_points(npe_addr, ame_addr);

  return s;
}
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  // PPC port: use fixed size.
  const int code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new (code_length) VtableStub(false, itable_index);

  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);
  address start_pc;

#ifndef PRODUCT
  if (CountCompiledCalls) {
    int offs = __ load_const_optimized(R11_scratch1, SharedRuntime::nof_megamorphic_calls_addr(), R12_scratch2, true);
    __ lwz(R12_scratch2, offs, R11_scratch1);
    __ addi(R12_scratch2, R12_scratch2, 1);
    __ stw(R12_scratch2, offs, R11_scratch1);
  }
#endif

  assert(VtableStub::receiver_location() == R3_ARG1->as_VMReg(), "receiver expected in R3_ARG1");

  // Entry arguments:
  //  R19_method: Interface
  //  R3_ARG1:    Receiver

  Label L_no_such_interface;
  const Register rcvr_klass = R11_scratch1,
                 interface  = R12_scratch2,
                 tmp1       = R21_tmp1,
                 tmp2       = R22_tmp2;

  address npe_addr = __ pc(); // npe = null pointer exception
  __ load_klass_with_trap_null_check(rcvr_klass, R3_ARG1);

  // Receiver subtype check against REFC.
  __ ld(interface, CompiledICHolder::holder_klass_offset(), R19_method);
  __ lookup_interface_method(rcvr_klass, interface, noreg,
                             R0, tmp1, tmp2,
                             L_no_such_interface, /*return_method=*/ false);

  // Get Method* and entrypoint for compiler
  __ ld(interface, CompiledICHolder::holder_metadata_offset(), R19_method);
  __ lookup_interface_method(rcvr_klass, interface, itable_index,
                             R19_method, tmp1, tmp2,
                             L_no_such_interface, /*return_method=*/ true);

#ifndef PRODUCT
  if (DebugVtables) {
    Label ok;
    __ cmpd(CCR0, R19_method, 0);
    __ bne(CCR0, ok);
    __ stop("method is null", 103);
    __ bind(ok);
  }
#endif

  // If the vtable entry is null, the method is abstract.
  address ame_addr = __ pc(); // ame = abstract method error

  // Must do an explicit check if implicit checks are disabled.
  assert(!MacroAssembler::needs_explicit_null_check(in_bytes(Method::from_compiled_offset())), "sanity");
  if (!ImplicitNullChecks || !os::zero_page_read_protected()) {
    if (TrapBasedNullChecks) {
      __ trap_null_check(R19_method);
    } else {
      __ cmpdi(CCR0, R19_method, 0);
      __ beq(CCR0, L_no_such_interface);
    }
  }
  __ ld(R12_scratch2, in_bytes(Method::from_compiled_offset()), R19_method);
  __ mtctr(R12_scratch2);
  __ bctr();

  // Handle IncompatibleClassChangeError in itable stubs.
  // More detailed error message.
  // We force resolving of the call site by jumping to the "handle
  // wrong method" stub, and so let the interpreter runtime do all the
  // dirty work.
  __ bind(L_no_such_interface);
  __ load_const_optimized(R11_scratch1, SharedRuntime::get_handle_wrong_method_stub(), R12_scratch2);
  __ mtctr(R11_scratch1);
  __ bctr();

  masm->flush();

  guarantee(__ pc() <= s->code_end(), "overflowed buffer");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 13
0
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  const int code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(code_length) VtableStub(false, itable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

  assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");

  // R0-R3 / R0-R7 registers hold the arguments and cannot be spoiled
  const Register Rclass  = AARCH64_ONLY(R9)  NOT_AARCH64(R4);
  const Register Rlength = AARCH64_ONLY(R10)  NOT_AARCH64(R5);
  const Register Rscan   = AARCH64_ONLY(R11) NOT_AARCH64(R6);
  const Register tmp     = Rtemp;

  assert_different_registers(Ricklass, Rclass, Rlength, Rscan, tmp);

  // Calculate the start of itable (itable goes after vtable)
  const int scale = exact_log2(vtableEntry::size_in_bytes());
  address npe_addr = __ pc();
  __ load_klass(Rclass, R0);
  __ ldr_s32(Rlength, Address(Rclass, Klass::vtable_length_offset()));

  __ add(Rscan, Rclass, in_bytes(Klass::vtable_start_offset()));
  __ add(Rscan, Rscan, AsmOperand(Rlength, lsl, scale));

  // Search through the itable for an interface equal to incoming Ricklass
  // itable looks like [intface][offset][intface][offset][intface][offset]
  const int entry_size = itableOffsetEntry::size() * HeapWordSize;
  assert(itableOffsetEntry::interface_offset_in_bytes() == 0, "not added for convenience");

  Label loop;
  __ bind(loop);
  __ ldr(tmp, Address(Rscan, entry_size, post_indexed));
#ifdef AARCH64
  Label found;
  __ cmp(tmp, Ricklass);
  __ b(found, eq);
  __ cbnz(tmp, loop);
#else
  __ cmp(tmp, Ricklass);  // set ZF and CF if interface is found
  __ cmn(tmp, 0, ne);     // check if tmp == 0 and clear CF if it is
  __ b(loop, ne);
#endif // AARCH64

  assert(StubRoutines::throw_IncompatibleClassChangeError_entry() != NULL, "Check initialization order");
#ifdef AARCH64
  __ jump(StubRoutines::throw_IncompatibleClassChangeError_entry(), relocInfo::runtime_call_type, tmp);
  __ bind(found);
#else
  // CF == 0 means we reached the end of itable without finding icklass
  __ jump(StubRoutines::throw_IncompatibleClassChangeError_entry(), relocInfo::runtime_call_type, noreg, cc);
#endif // !AARCH64

  // Interface found at previous position of Rscan, now load the method oop
  __ ldr_s32(tmp, Address(Rscan, itableOffsetEntry::offset_offset_in_bytes() - entry_size));
  {
    const int method_offset = itableMethodEntry::size() * HeapWordSize * itable_index +
      itableMethodEntry::method_offset_in_bytes();
    __ add_slow(Rmethod, Rclass, method_offset);
  }
  __ ldr(Rmethod, Address(Rmethod, tmp));

  address ame_addr = __ pc();

#ifdef AARCH64
  __ ldr(tmp, Address(Rmethod, Method::from_compiled_offset()));
  __ br(tmp);
#else
  __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
#endif // AARCH64

  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("itable #%d at " PTR_FORMAT "[%d] left over: %d",
                  itable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // FIXME ARM: need correct 'slop' - below is x86 code
  // shut the door on sizing bugs
  //int slop = 8;  // 32-bit offset is this much larger than a 13-bit one
  //assert(itable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 14
0
// Used by compiler only; may use only caller saved, non-argument registers.
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {

  const int   code_length = VtableStub::pd_code_size_limit(true);
  VtableStub *s = new(code_length) VtableStub(true, vtable_index);
  if (s == NULL) { // Indicates OOM In the code cache.
    return NULL;
  }

  ResourceMark    rm;
  CodeBuffer      cb(s->entry_point(), code_length);
  MacroAssembler *masm = new MacroAssembler(&cb);
  address start_pc;
  int     padding_bytes = 0;

#if (!defined(PRODUCT) && defined(COMPILER2))
  if (CountCompiledCalls) {
    // Count unused bytes
    //                  worst case             actual size
    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);

    // Use generic emitter for direct memory increment.
    // Abuse Z_method as scratch register for generic emitter.
    // It is loaded further down anyway before it is first used.
    __ add2mem_32(Address(Z_R1_scratch), 1, Z_method);
  }
#endif

  assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");

  // Get receiver klass.
  // Must do an explicit check if implicit checks are disabled.
  address npe_addr = __ pc(); // npe == NULL ptr exception
  __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
  const Register rcvr_klass = Z_R1_scratch;
  __ load_klass(rcvr_klass, Z_ARG1);

  // Set method (in case of interpreted method), and destination address.
  int entry_offset = in_bytes(InstanceKlass::vtable_start_offset()) +
                     vtable_index * vtableEntry::size_in_bytes();

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    // Check offset vs vtable length.
    const Register vtable_idx = Z_R0_scratch;

    // Count unused bytes.
    //                  worst case             actual size
    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true);

    assert(Immediate::is_uimm12(in_bytes(InstanceKlass::vtable_length_offset())), "disp to large");
    __ z_cl(vtable_idx, in_bytes(InstanceKlass::vtable_length_offset()), rcvr_klass);
    __ z_brl(L);
    __ z_lghi(Z_ARG3, vtable_index);  // Debug code, don't optimize.
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), Z_ARG1, Z_ARG3, false);
    // Count unused bytes (assume worst case here).
    padding_bytes += 12;
    __ bind(L);
  }
#endif

  int v_off = entry_offset + vtableEntry::method_offset_in_bytes();

  // Duplicate safety code from enc_class Java_Dynamic_Call_dynTOC.
  if (Displacement::is_validDisp(v_off)) {
    __ z_lg(Z_method/*method oop*/, v_off, rcvr_klass/*class oop*/);
    // Account for the load_const in the else path.
    padding_bytes += __ load_const_size();
  } else {
    // Worse case, offset does not fit in displacement field.
    __ load_const(Z_method, v_off); // Z_method temporarily holds the offset value.
    __ z_lg(Z_method/*method oop*/, 0, Z_method/*method offset*/, rcvr_klass/*class oop*/);
  }

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    __ z_ltgr(Z_method, Z_method);
    __ z_brne(L);
    __ stop("Vtable entry is ZERO",102);
    __ bind(L);
  }
#endif

  address ame_addr = __ pc(); // ame = abstract method error

  // Must do an explicit check if implicit checks are disabled.
  __ null_check(Z_method, Z_R1_scratch, in_bytes(Method::from_compiled_offset()));
  __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
  __ z_br(Z_R1_scratch);

  masm->flush();

  s->set_exception_points(npe_addr, ame_addr);

  return s;
}
Esempio n. 15
0
VtableStub* VtableStubs::create_itable_stub(int vtable_index) {
  const int   code_length = VtableStub::pd_code_size_limit(false);
  VtableStub *s = new(code_length) VtableStub(false, vtable_index);
  if (s == NULL) { // Indicates OOM in the code cache.
    return NULL;
  }

  ResourceMark    rm;
  CodeBuffer      cb(s->entry_point(), code_length);
  MacroAssembler *masm = new MacroAssembler(&cb);
  address start_pc;
  int     padding_bytes = 0;

#if (!defined(PRODUCT) && defined(COMPILER2))
  if (CountCompiledCalls) {
    // Count unused bytes
    //                  worst case             actual size
    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);

    // Use generic emitter for direct memory increment.
    // Use Z_tmp_1 as scratch register for generic emitter.
    __ add2mem_32((Z_R1_scratch), 1, Z_tmp_1);
  }
#endif

  assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");

  // Entry arguments:
  //  Z_method: Interface
  //  Z_ARG1:   Receiver
  const Register rcvr_klass = Z_tmp_1;    // Used to compute itable_entry_addr.
                                          // Use extra reg to avoid re-load.
  const Register vtable_len = Z_tmp_2;    // Used to compute itable_entry_addr.
  const Register itable_entry_addr = Z_R1_scratch;
  const Register itable_interface  = Z_R0_scratch;

  // Get receiver klass.
  // Must do an explicit check if implicit checks are disabled.
  address npe_addr = __ pc(); // npe == NULL ptr exception
  __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
  __ load_klass(rcvr_klass, Z_ARG1);

  // Load start of itable entries into itable_entry.
  __ z_llgf(vtable_len, Address(rcvr_klass, InstanceKlass::vtable_length_offset()));
  __ z_sllg(vtable_len, vtable_len, exact_log2(vtableEntry::size_in_bytes()));

  // Loop over all itable entries until desired interfaceOop(Rinterface) found.
  const int vtable_base_offset = in_bytes(InstanceKlass::vtable_start_offset());
  // Count unused bytes.
  start_pc = __ pc();
  __ add2reg_with_index(itable_entry_addr, vtable_base_offset + itableOffsetEntry::interface_offset_in_bytes(), rcvr_klass, vtable_len);
  padding_bytes += 20 - (__ pc() - start_pc);

  const int itable_offset_search_inc = itableOffsetEntry::size() * wordSize;
  Label search;
  __ bind(search);

  // Handle IncompatibleClassChangeError in itable stubs.
  // If the entry is NULL then we've reached the end of the table
  // without finding the expected interface, so throw an exception.
  NearLabel   throw_icce;
  __ load_and_test_long(itable_interface, Address(itable_entry_addr));
  __ z_bre(throw_icce); // Throw the exception out-of-line.
  // Count unused bytes.
  start_pc = __ pc();
  __ add2reg(itable_entry_addr, itable_offset_search_inc);
  padding_bytes += 20 - (__ pc() - start_pc);
  __ z_cgr(itable_interface, Z_method);
  __ z_brne(search);

  // Entry found. Itable_entry_addr points to the subsequent entry (itable_offset_search_inc too far).
  // Get offset of vtable for interface.

  const Register vtable_offset = Z_R1_scratch;
  const Register itable_method = rcvr_klass;   // Calculated before.

  const int vtable_offset_offset = (itableOffsetEntry::offset_offset_in_bytes() -
                                    itableOffsetEntry::interface_offset_in_bytes()) -
                                   itable_offset_search_inc;
  __ z_llgf(vtable_offset, vtable_offset_offset, itable_entry_addr);

  // Compute itableMethodEntry and get method and entry point for compiler.
  const int method_offset = (itableMethodEntry::size() * wordSize * vtable_index) +
                            itableMethodEntry::method_offset_in_bytes();

  __ z_lg(Z_method, method_offset, vtable_offset, itable_method);

#ifndef PRODUCT
  if (DebugVtables) {
    Label ok1;
    __ z_ltgr(Z_method, Z_method);
    __ z_brne(ok1);
    __ stop("method is null",103);
    __ bind(ok1);
  }
#endif

  address ame_addr = __ pc();
  // Must do an explicit check if implicit checks are disabled.
  if (!ImplicitNullChecks) {
    __ compare64_and_branch(Z_method, (intptr_t) 0, Assembler::bcondEqual, throw_icce);
  }
  __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
  __ z_br(Z_R1_scratch);

  // Handle IncompatibleClassChangeError in itable stubs.
  __ bind(throw_icce);
  // Count unused bytes
  //                  worst case          actual size
  // We force resolving of the call site by jumping to
  // the "handle wrong method" stub, and so let the
  // interpreter runtime do all the dirty work.
  padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true);
  __ z_br(Z_R1_scratch);

  masm->flush();

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
// These stubs are used by the compiler only.
// Argument registers, which must be preserved:
//   rcx - receiver (always first argument)
//   rdx - second argument (if any)
// Other registers that might be usable:
//   rax - inline cache register (is interface for itable stub)
//   rbx - method (used when calling out to interpreter)
// Available now, but may become callee-save at some point:
//   rsi, rdi
// Note that rax and rdx are also used for return values.
//
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  const int i486_code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new(i486_code_length) VtableStub(true, vtable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), i486_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT

  if (CountCompiledCalls) {
    __ incrementl(ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
  }
#endif /* PRODUCT */

  // get receiver (need to skip return address on top of stack)
  assert(VtableStub::receiver_location() == rcx->as_VMReg(), "receiver expected in rcx");

  // get receiver klass
  address npe_addr = __ pc();
  __ movptr(rax, Address(rcx, oopDesc::klass_offset_in_bytes()));

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    // check offset vs vtable length
    __ cmpl(Address(rax, InstanceKlass::vtable_length_offset()*wordSize), vtable_index*vtableEntry::size());
    __ jcc(Assembler::greater, L);
    __ movl(rbx, vtable_index);
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), rcx, rbx);
    __ bind(L);
  }
#endif // PRODUCT

  const Register method = rbx;

  // load Method* and target address
  __ lookup_virtual_method(rax, vtable_index, method);

  if (DebugVtables) {
    Label L;
    __ cmpptr(method, (int32_t)NULL_WORD);
    __ jcc(Assembler::equal, L);
    __ cmpptr(Address(method, Method::from_compiled_offset()), (int32_t)NULL_WORD);
    __ jcc(Assembler::notZero, L);
    __ stop("Vtable entry is NULL");
    __ bind(L);
  }

  // rax,: receiver klass
  // method (rbx): Method*
  // rcx: receiver
  address ame_addr = __ pc();
  __ jmp( Address(method, Method::from_compiled_offset()));

  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("vtable #%d at "PTR_FORMAT"[%d] left over: %d",
                  vtable_index, s->entry_point(),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // shut the door on sizing bugs
  int slop = 3;  // 32-bit offset is this much larger than an 8-bit one
  assert(vtable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  // Note well: pd_code_size_limit is the absolute minimum we can get away with.  If you
  //            add code here, bump the code stub size returned by pd_code_size_limit!
  const int i486_code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(i486_code_length) VtableStub(false, itable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), i486_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

  // Entry arguments:
  //  rax,: Interface
  //  rcx: Receiver

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ incrementl(ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
  }
#endif /* PRODUCT */
  // get receiver (need to skip return address on top of stack)

  assert(VtableStub::receiver_location() == rcx->as_VMReg(), "receiver expected in rcx");

  // get receiver klass (also an implicit null-check)
  address npe_addr = __ pc();
  __ movptr(rsi, Address(rcx, oopDesc::klass_offset_in_bytes()));

  // Most registers are in use; we'll use rax, rbx, rsi, rdi
  // (If we need to make rsi, rdi callee-save, do a push/pop here.)
  const Register method = rbx;
  Label throw_icce;

  // Get Method* and entrypoint for compiler
  __ lookup_interface_method(// inputs: rec. class, interface, itable index
                             rsi, rax, itable_index,
                             // outputs: method, scan temp. reg
                             method, rdi,
                             throw_icce);

  // method (rbx): Method*
  // rcx: receiver

#ifdef ASSERT
  if (DebugVtables) {
      Label L1;
      __ cmpptr(method, (int32_t)NULL_WORD);
      __ jcc(Assembler::equal, L1);
      __ cmpptr(Address(method, Method::from_compiled_offset()), (int32_t)NULL_WORD);
      __ jcc(Assembler::notZero, L1);
      __ stop("Method* is null");
      __ bind(L1);
    }
#endif // ASSERT

  address ame_addr = __ pc();
  __ jmp(Address(method, Method::from_compiled_offset()));

  __ bind(throw_icce);
  __ jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));
  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("itable #%d at "PTR_FORMAT"[%d] left over: %d",
                  itable_index, s->entry_point(),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // shut the door on sizing bugs
  int slop = 3;  // 32-bit offset is this much larger than an 8-bit one
  assert(itable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 18
0
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  // Note well: pd_code_size_limit is the absolute minimum we can get
  // away with.  If you add code here, bump the code stub size
  // returned by pd_code_size_limit!
  const int code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(code_length) VtableStub(false, itable_index);
  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ lea(r10, ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
    __ incrementw(Address(r10));
  }
#endif

  // Entry arguments:
  //  rscratch2: Interface
  //  j_rarg0: Receiver

  // Free registers (non-args) are r0 (interface), rmethod

  // get receiver (need to skip return address on top of stack)

  assert(VtableStub::receiver_location() == j_rarg0->as_VMReg(), "receiver expected in j_rarg0");
  // get receiver klass (also an implicit null-check)
  address npe_addr = __ pc();

  // Most registers are in use; we'll use r0, rmethod, r10, r11
  __ load_klass(r10, j_rarg0);

  Label throw_icce;

  // Get Method* and entrypoint for compiler
  __ lookup_interface_method(// inputs: rec. class, interface, itable index
                             r10, rscratch2, itable_index,
                             // outputs: method, scan temp. reg
                             rmethod, r11,
                             throw_icce);

  // method (rmethod): Method*
  // j_rarg0: receiver

#ifdef ASSERT
  if (DebugVtables) {
    Label L2;
    __ cbz(rmethod, L2);
    __ ldr(rscratch1, Address(rmethod, Method::from_compiled_offset()));
    __ cbnz(rscratch1, L2);
    __ stop("compiler entrypoint is null");
    __ bind(L2);
  }
#endif // ASSERT

  // rmethod: Method*
  // j_rarg0: receiver
  address ame_addr = __ pc();
  __ ldr(rscratch1, Address(rmethod, Method::from_compiled_offset()));
  __ br(rscratch1);

  __ bind(throw_icce);
  __ far_jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));

  __ flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("itable #%d at " PTR_FORMAT "[%d] left over: %d",
                  itable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 19
0
static void find(intptr_t x, bool print_pc) {
  address addr = (address)x;

  CodeBlob* b = CodeCache::find_blob_unsafe(addr);
  if (b != NULL) {
    if (b->is_buffer_blob()) {
      // the interpreter is generated into a buffer blob
      InterpreterCodelet* i = Interpreter::codelet_containing(addr);
      if (i != NULL) {
        i->print();
        return;
      }
      if (Interpreter::contains(addr)) {
        tty->print_cr(INTPTR_FORMAT " is pointing into interpreter code (not bytecode specific)", addr);
        return;
      }
      //
      if (AdapterHandlerLibrary::contains(b)) {
        AdapterHandlerLibrary::print_handler(b);
      }
      // the stubroutines are generated into a buffer blob
      StubCodeDesc* d = StubCodeDesc::desc_for(addr);
      if (d != NULL) {
        d->print();
        if (print_pc) tty->cr();
        return;
      }
      if (StubRoutines::contains(addr)) {
        tty->print_cr(INTPTR_FORMAT " is pointing to an (unnamed) stub routine", addr);
        return;
      }
      // the InlineCacheBuffer is using stubs generated into a buffer blob
      if (InlineCacheBuffer::contains(addr)) {
        tty->print_cr(INTPTR_FORMAT " is pointing into InlineCacheBuffer", addr);
        return;
      }
      VtableStub* v = VtableStubs::stub_containing(addr);
      if (v != NULL) {
        v->print();
        return;
      }
    }
    if (print_pc && b->is_nmethod()) {
      ResourceMark rm;
      tty->print("%#p: Compiled ", addr);
      ((nmethod*)b)->method()->print_value_on(tty);
      tty->print("  = (CodeBlob*)" INTPTR_FORMAT, b);
      tty->cr();
      return;
    }
    if ( b->is_nmethod()) {
      if (b->is_zombie()) {
        tty->print_cr(INTPTR_FORMAT " is zombie nmethod", b);
      } else if (b->is_not_entrant()) {
        tty->print_cr(INTPTR_FORMAT " is non-entrant nmethod", b);
      }
    }
    b->print();
    return;
  }

  if (Universe::heap()->is_in(addr)) {
    HeapWord* p = Universe::heap()->block_start(addr);
    bool print = false;
    // If we couldn't find it it just may mean that heap wasn't parseable
    // See if we were just given an oop directly
    if (p != NULL && Universe::heap()->block_is_obj(p)) {
      print = true;
    } else if (p == NULL && ((oopDesc*)addr)->is_oop()) {
      p = (HeapWord*) addr;
      print = true;
    }
    if (print) {
      oop(p)->print();
      if (p != (HeapWord*)x && oop(p)->is_constMethod() &&
          constMethodOop(p)->contains(addr)) {
        Thread *thread = Thread::current();
        HandleMark hm(thread);
        methodHandle mh (thread, constMethodOop(p)->method());
        if (!mh->is_native()) {
          tty->print_cr("bci_from(%p) = %d; print_codes():",
                        addr, mh->bci_from(address(x)));
          mh->print_codes();
        }
      }
      return;
    }
  } else if (Universe::heap()->is_in_reserved(addr)) {
    tty->print_cr(INTPTR_FORMAT " is an unallocated location in the heap", addr);
    return;
  }

  if (JNIHandles::is_global_handle((jobject) addr)) {
    tty->print_cr(INTPTR_FORMAT " is a global jni handle", addr);
    return;
  }
  if (JNIHandles::is_weak_global_handle((jobject) addr)) {
    tty->print_cr(INTPTR_FORMAT " is a weak global jni handle", addr);
    return;
  }
  if (JNIHandleBlock::any_contains((jobject) addr)) {
    tty->print_cr(INTPTR_FORMAT " is a local jni handle", addr);
    return;
  }

  for(JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
    // Check for privilege stack
    if (thread->privileged_stack_top() != NULL && thread->privileged_stack_top()->contains(addr)) {
      tty->print_cr(INTPTR_FORMAT " is pointing into the privilege stack for thread: " INTPTR_FORMAT, addr, thread);
      return;
    }
    // If the addr is a java thread print information about that.
    if (addr == (address)thread) {
       thread->print();
       return;
    }
  }

  // Try an OS specific find
  if (os::find(addr)) {
    return;
  }

  if (print_pc) {
    tty->print_cr(INTPTR_FORMAT ": probably in C++ code; check debugger", addr);
    Disassembler::decode(same_page(addr-40,addr),same_page(addr+40,addr));
    return;
  }

  tty->print_cr(INTPTR_FORMAT " is pointing to unknown location", addr);
}
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  // Note well: pd_code_size_limit is the absolute minimum we can get
  // away with.  If you add code here, bump the code stub size
  // returned by pd_code_size_limit!
  const int amd64_code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(amd64_code_length) VtableStub(false, itable_index);
  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), amd64_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ incrementl(ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
  }
#endif

  // Entry arguments:
  //  rax: Interface
  //  j_rarg0: Receiver

  // Free registers (non-args) are rax (interface), rbx

  // get receiver (need to skip return address on top of stack)

  assert(VtableStub::receiver_location() == j_rarg0->as_VMReg(), "receiver expected in j_rarg0");
  // get receiver klass (also an implicit null-check)
  address npe_addr = __ pc();

  // Most registers are in use; we'll use rax, rbx, r10, r11
  // (various calling sequences use r[cd]x, r[sd]i, r[89]; stay away from them)
  __ load_klass(r10, j_rarg0);

  // If we take a trap while this arg is on the stack we will not
  // be able to walk the stack properly. This is not an issue except
  // when there are mistakes in this assembly code that could generate
  // a spurious fault. Ask me how I know...

  const Register method = rbx;
  Label throw_icce;

  // Get methodOop and entrypoint for compiler
  __ lookup_interface_method(// inputs: rec. class, interface, itable index
                             r10, rax, itable_index,
                             // outputs: method, scan temp. reg
                             method, r11,
                             throw_icce);

  // method (rbx): methodOop
  // j_rarg0: receiver

#ifdef ASSERT
  if (DebugVtables) {
    Label L2;
    __ cmpptr(method, (int32_t)NULL_WORD);
    __ jcc(Assembler::equal, L2);
    __ cmpptr(Address(method, methodOopDesc::from_compiled_offset()), (int32_t)NULL_WORD);
    __ jcc(Assembler::notZero, L2);
    __ stop("compiler entrypoint is null");
    __ bind(L2);
  }
#endif // ASSERT

  // rbx: methodOop
  // j_rarg0: receiver
  address ame_addr = __ pc();
  __ jmp(Address(method, methodOopDesc::from_compiled_offset()));

  __ bind(throw_icce);
  __ jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));

  __ flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("itable #%d at "PTR_FORMAT"[%d] left over: %d",
                  itable_index, s->entry_point(),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // shut the door on sizing bugs
  int slop = 3;  // 32-bit offset is this much larger than an 8-bit one
  assert(itable_index > 10 || __ pc() + slop <= s->code_end(), "room for 32-bit offset");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 21
0
// Used by compiler only; may use only caller saved, non-argument registers
// NOTE:  %%%% if any change is made to this stub make sure that the function
//             pd_code_size_limit is changed to ensure the correct size for VtableStub
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  const int sparc_code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), sparc_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ inc_counter(SharedRuntime::nof_megamorphic_calls_addr(), G5, G3_scratch);
  }
#endif /* PRODUCT */

  assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");

  // get receiver klass
  address npe_addr = __ pc();
  __ load_klass(O0, G3_scratch);

  // set Method* (in case of interpreted method), and destination address
#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    // check offset vs vtable length
    __ ld(G3_scratch, in_bytes(Klass::vtable_length_offset()), G5);
    __ cmp_and_br_short(G5, vtable_index*vtableEntry::size(), Assembler::greaterUnsigned, Assembler::pt, L);
    __ set(vtable_index, O2);
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2);
    __ bind(L);
  }
#endif

  __ lookup_virtual_method(G3_scratch, vtable_index, G5_method);

#ifndef PRODUCT
  if (DebugVtables) {
    Label L;
    __ br_notnull_short(G5_method, Assembler::pt, L);
    __ stop("Vtable entry is ZERO");
    __ bind(L);
  }
#endif

  address ame_addr = __ pc();  // if the vtable entry is null, the method is abstract
                               // NOTE: for vtable dispatches, the vtable entry will never be null.

  __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);

  // jump to target (either compiled code or c2iadapter)
  __ JMP(G3_scratch, 0);
  // load Method* (in case we call c2iadapter)
  __ delayed()->nop();

  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("vtable #%d at " PTR_FORMAT "[%d] left over: %d",
                  vtable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // shut the door on sizing bugs
  int slop = 2*BytesPerInstWord;  // 32-bit offset is this much larger than a 13-bit one
  assert(vtable_index > 10 || __ pc() + slop <= s->code_end(), "room for sethi;add");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
Esempio n. 22
0
// NOTE:  %%%% if any change is made to this stub make sure that the function
//             pd_code_size_limit is changed to ensure the correct size for VtableStub
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
  const int sparc_code_length = VtableStub::pd_code_size_limit(false);
  VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index);
  // Can be NULL if there is no free space in the code cache.
  if (s == NULL) {
    return NULL;
  }

  ResourceMark rm;
  CodeBuffer cb(s->entry_point(), sparc_code_length);
  MacroAssembler* masm = new MacroAssembler(&cb);

  Register G3_Klass = G3_scratch;
  Register G5_interface = G5;  // Passed in as an argument
  Label search;

  // Entry arguments:
  //  G5_interface: Interface
  //  O0:           Receiver
  assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");

  // get receiver klass (also an implicit null-check)
  address npe_addr = __ pc();
  __ load_klass(O0, G3_Klass);

  // Push a new window to get some temp registers.  This chops the head of all
  // my 64-bit %o registers in the LION build, but this is OK because no longs
  // are passed in the %o registers.  Instead, longs are passed in G1 and G4
  // and so those registers are not available here.
  __ save(SP,-frame::register_save_words*wordSize,SP);

#ifndef PRODUCT
  if (CountCompiledCalls) {
    __ inc_counter(SharedRuntime::nof_megamorphic_calls_addr(), L0, L1);
  }
#endif /* PRODUCT */

  Label throw_icce;

  Register L5_method = L5;
  __ lookup_interface_method(// inputs: rec. class, interface, itable index
                             G3_Klass, G5_interface, itable_index,
                             // outputs: method, scan temp. reg
                             L5_method, L2, L3,
                             throw_icce);

#ifndef PRODUCT
  if (DebugVtables) {
    Label L01;
    __ br_notnull_short(L5_method, Assembler::pt, L01);
    __ stop("Method* is null");
    __ bind(L01);
  }
#endif

  // If the following load is through a NULL pointer, we'll take an OS
  // exception that should translate into an AbstractMethodError.  We need the
  // window count to be correct at that time.
  __ restore(L5_method, 0, G5_method);
  // Restore registers *before* the AME point.

  address ame_addr = __ pc();   // if the vtable entry is null, the method is abstract
  __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);

  // G5_method:  Method*
  // O0:         Receiver
  // G3_scratch: entry point
  __ JMP(G3_scratch, 0);
  __ delayed()->nop();

  __ bind(throw_icce);
  AddressLiteral icce(StubRoutines::throw_IncompatibleClassChangeError_entry());
  __ jump_to(icce, G3_scratch);
  __ delayed()->restore();

  masm->flush();

  if (PrintMiscellaneous && (WizardMode || Verbose)) {
    tty->print_cr("itable #%d at " PTR_FORMAT "[%d] left over: %d",
                  itable_index, p2i(s->entry_point()),
                  (int)(s->code_end() - s->entry_point()),
                  (int)(s->code_end() - __ pc()));
  }
  guarantee(__ pc() <= s->code_end(), "overflowed buffer");
  // shut the door on sizing bugs
  int slop = 2*BytesPerInstWord;  // 32-bit offset is this much larger than a 13-bit one
  assert(itable_index > 10 || __ pc() + slop <= s->code_end(), "room for sethi;add");

  s->set_exception_points(npe_addr, ame_addr);
  return s;
}
// used by compiler only; may use only caller saved registers eax, ebx, ecx.
// edx holds first int arg, esi, edi, ebp are callee-save & must be preserved.
// Leave reciever in ecx; required behavior when +OptoArgsInRegisters
// is modifed to put first oop in ecx.
//
// NOTE: If this code is used by the C1, the receiver_location is always 0.
VtableStub* VtableStubs::create_vtable_stub(int vtable_index, int receiver_location) {
  const int i486_code_length = VtableStub::pd_code_size_limit(true);
  VtableStub* s = new(i486_code_length) VtableStub(true, vtable_index, receiver_location);
  ResourceMark rm;
  MacroAssembler* masm = new MacroAssembler(new CodeBuffer(s->entry_point(), i486_code_length));

#ifndef PRODUCT
#ifdef COMPILER2
  if (CountCompiledCalls) __ incl(Address((int)OptoRuntime::nof_megamorphic_calls_addr(), relocInfo::none));
#endif
#endif

  // get receiver (need to skip return address on top of stack)
#ifdef COMPILER1
  assert(receiver_location == 0, "receiver is always in ecx - no location info needed");
#else
  if( receiver_location < SharedInfo::stack0 ) {
    assert(receiver_location == ECX_num, "receiver expected in ecx");
  } else {
    __ movl(ecx, Address(esp, SharedInfo::reg2stack(OptoReg::Name(receiver_location)) * wordSize+wordSize/*skip return address*/));
  }
#endif
  // get receiver klass
  address npe_addr = __ pc();
  __ movl(eax, Address(ecx, oopDesc::klass_offset_in_bytes()));
  // compute entry offset (in words)
  int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
#ifndef PRODUCT
  if (DebugVtables) { 
    Label L;
    // check offset vs vtable length
    __ cmpl(Address(eax, instanceKlass::vtable_length_offset()*wordSize), vtable_index*vtableEntry::size());
    __ jcc(Assembler::greater, L);
    __ movl(ebx, vtable_index);
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), ecx, ebx);
    __ bind(L);
  }
#endif // PRODUCT
  // load methodOop and target address
#ifdef COMPILER1
  __ movl(ebx, Address(eax, entry_offset*wordSize + vtableEntry::method_offset_in_bytes()));
  address ame_addr = __ pc();
  __ movl(edx, Address(ebx, methodOopDesc::from_compiled_code_entry_point_offset()));
  if (DebugVtables) {
    Label L;
    __ testl(edx, edx);
    __ jcc(Assembler::notZero, L);
    __ stop("Vtable entry is NULL");
    __ bind(L);
  }
  // eax: receiver klass
  // ebx: methodOop
  // ecx: receiver
  // edx: entry point
  __ jmp(edx);
#else
  __ movl(eax, Address(eax, entry_offset*wordSize + vtableEntry::method_offset_in_bytes()));
  address ame_addr = __ pc();
  __ movl(ebx, Address(eax, methodOopDesc::from_compiled_code_entry_point_offset()));  

  if (DebugVtables) {
    Label L;
    __ testl(ebx, ebx);
    __ jcc(Assembler::notZero, L);
    __ stop("Vtable entry is NULL");
    __ bind(L);
  } 


  // jump to target (either compiled code or c2iadapter)
  // eax: methodOop (in case we call c2iadapter)
  __ jmp(ebx);
#endif // COMPILER1

  masm->flush();
  s->set_exception_points(npe_addr, ame_addr);
  return s;
}