예제 #1
0
// MT-safe patching of a jmp instruction (and following word).
// First patches the second word, and then atomicly replaces
// the first word with the first new instruction word.
// Other processors might briefly see the old first word
// followed by the new second word.  This is OK if the old
// second word is harmless, and the new second word may be
// harmlessly executed in the delay slot of the call.
void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
   assert(Patching_lock->is_locked() ||
         SafepointSynchronize::is_at_safepoint(), "concurrent code patching");
   assert (instr_addr != NULL, "illegal address for code patching");
   NativeGeneralJump* h_jump =  nativeGeneralJump_at (instr_addr); // checking that it is a call
   assert(NativeGeneralJump::instruction_size == 8, "wrong instruction size; must be 8");
   int i0 = ((int*)code_buffer)[0];
   int i1 = ((int*)code_buffer)[1];
   int* contention_addr = (int*) h_jump->addr_at(1*BytesPerInstWord);
   assert(inv_op(*contention_addr) == Assembler::arith_op ||
          *contention_addr == nop_instruction(),
          "must not interfere with original call");
   // The set_long_at calls do the ICacheInvalidate so we just need to do them in reverse order
   h_jump->set_long_at(1*BytesPerInstWord, i1);
   h_jump->set_long_at(0*BytesPerInstWord, i0);
   // NOTE:  It is possible that another thread T will execute
   // only the second patched word.
   // In other words, since the original instruction is this
   //    jmp patching_stub; nop                    (NativeGeneralJump)
   // and the new sequence from the buffer is this:
   //    sethi %hi(K), %r; add %r, %lo(K), %r      (NativeMovConstReg)
   // what T will execute is this:
   //    jmp patching_stub; add %r, %lo(K), %r
   // thereby putting garbage into %r before calling the patching stub.
   // This is OK, because the patching stub ignores the value of %r.

   // Make sure the first-patched instruction, which may co-exist
   // briefly with the call, will do something harmless.
   assert(inv_op(*contention_addr) == Assembler::arith_op ||
          *contention_addr == nop_instruction(),
          "must not interfere with original call");
}
예제 #2
0
void Relocation::pd_set_call_destination(address x) {
  NativeInstruction* ni = nativeInstruction_at(addr());
  if (ni->is_call()) {
    nativeCall_at(addr())->set_destination(x);
  } else if (ni->is_jump()) {
    NativeJump* nj = nativeJump_at(addr());

    // Unresolved jumps are recognized by a destination of -1
    // However 64bit can't actually produce such an address
    // and encodes a jump to self but jump_destination will
    // return a -1 as the signal. We must not relocate this
    // jmp or the ic code will not see it as unresolved.

    if (nj->jump_destination() == (address) -1) {
      x = addr(); // jump to self
    }
    nj->set_jump_destination(x);
  } else if (ni->is_cond_jump()) {
    // %%%% kludge this, for now, until we get a jump_destination method
    address old_dest = nativeGeneralJump_at(addr())->jump_destination();
    address disp = Assembler::locate_operand(addr(), Assembler::call32_operand);
    *(jint*)disp += (x - old_dest);
  } else if (ni->is_mov_literal64()) {
    ((NativeMovConstReg*)ni)->set_data((intptr_t)x);
  } else {
    ShouldNotReachHere();
  }
}
예제 #3
0
void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination, TRAPS) {
  address pc = (address) inst;
  if (inst->is_call()) {
    // NOTE: for call without a mov, the offset must fit a 32-bit immediate
    //       see also CompilerToVM.getMaxCallTargetOffset()
    NativeCall* call = nativeCall_at(pc);
    call->set_destination((address) foreign_call_destination);
    _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand);
  } else if (inst->is_mov_literal64()) {
    NativeMovConstReg* mov = nativeMovConstReg_at(pc);
    mov->set_data((intptr_t) foreign_call_destination);
    _instructions->relocate(mov->instruction_address(), runtime_call_Relocation::spec(), Assembler::imm_operand);
  } else if (inst->is_jump()) {
    NativeJump* jump = nativeJump_at(pc);
    jump->set_jump_destination((address) foreign_call_destination);
    _instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand);
  } else if (inst->is_cond_jump()) {
    address old_dest = nativeGeneralJump_at(pc)->jump_destination();
    address disp = Assembler::locate_operand(pc, Assembler::call32_operand);
    *(jint*) disp += ((address) foreign_call_destination) - old_dest;
    _instructions->relocate(pc, runtime_call_Relocation::spec(), Assembler::call32_operand);
  } else {
    JVMCI_ERROR("unsupported relocation for foreign call");
  }

  TRACE_jvmci_3("relocating (foreign call)  at " PTR_FORMAT, p2i(inst));
}
// MT-safe patching of a long jump instruction.
// First patches first word of instruction to two jmp's that jmps to them
// selfs (spinlock). Then patches the last byte, and then atomicly replaces
// the jmp's with the first 4 byte of the new instruction.
void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
   assert (instr_addr != NULL, "illegal address for code patching");
#ifdef ASSERT
   NativeGeneralJump* n_jump =  nativeGeneralJump_at (instr_addr); // checking that it is a jump
#endif // ASSERT

   // Temporary code
   unsigned char patch[4];
   assert(sizeof(patch)==sizeof(jint), "sanity check");
   patch[0] = 0xEB;       // jmp rel8
   patch[1] = 0xFE;       // jmp to self
   patch[2] = 0xEB;
   patch[3] = 0xFE;
   
   // First patch dummy jmp in place
   *(jint*)instr_addr = *(jint *)patch;

   // Patch 4th byte
   address byte_4_adr = instr_addr+4;
   *byte_4_adr = code_buffer[4];

   // Patch bytes 0-3
   *(jint*)instr_addr = *(jint *)code_buffer;  

#ifdef ASSERT
   // verify patching
   for ( int i = 0; i < NativeCall::instruction_size; i++) { // bytewise comparing
     address ptr = (address)((int)code_buffer + i);     
     int a_byte = (*ptr) & 0xFF;
     assert(*((address)((int)instr_addr + i)) == a_byte, "mt safe patching failed");
   }
#endif

   ICache::invalidate_range(instr_addr, NativeGeneralJump::instruction_size);
}
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
  address stub = find_stub();
  guarantee(stub != NULL, "stub not found");

  if (TraceICs) {
    ResourceMark rm;
    tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
                  p2i(instruction_address()),
                  callee->name_and_sig_as_C_string());
  }

  // Creation also verifies the object.
  NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
#ifndef PRODUCT
  NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());

  assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
         "a) MT-unsafe modification of inline cache");
  assert(method_holder->data() == 0 || jump->jump_destination() == entry,
         "b) MT-unsafe modification of inline cache");
#endif
  // Update stub.
  method_holder->set_data((intptr_t)callee());
  NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
  ICache::invalidate_range(stub, to_interp_stub_size());
  // Update jump to call.
  set_destination_mt_safe(stub);
}
address Relocation::pd_call_destination() {
  NativeInstruction* ni = nativeInstruction_at(addr());
  if (ni->is_call())
    return nativeCall_at(addr())->destination();
#if 0
  else if (ni->is_jump())
    return nativeJump_at(addr())->jump_destination();
  else if (ni->is_cond_jump())
    return nativeGeneralJump_at(addr())->jump_destination();
  else
#endif  /* excise for now */
    { ShouldNotReachHere(); return NULL; }
}
예제 #7
0
void Relocation::pd_set_call_destination(address x) {
  if (NativeCall::is_call_at(addr())) {
    NativeCall* call = nativeCall_at(addr());
    call->set_destination(x);
    return;
  }
  if (NativeFarCall::is_call_at(addr())) {
    NativeFarCall* call = nativeFarCall_at(addr());
    call->set_destination(x);
    return;
  }
  // Special case:  Patchable branch local to the code cache.
  // This will break badly if the code cache grows larger than a few Mb.
  NativeGeneralJump* br = nativeGeneralJump_at(addr());
  br->set_jump_destination(x);
}
void Relocation::pd_set_call_destination(address x, intptr_t off) {
  NativeInstruction* ni = nativeInstruction_at(addr());
  if (ni->is_call())
    nativeCall_at(addr())->set_destination(x);
#if 0 /* excise for now */
  else if (ni->is_jump())
    nativeJump_at(addr())->set_jump_destination(x);
  else if (ni->is_cond_jump()) {
    // %%%% kludge this, for now, until we get a jump_destination method
    address old_dest = nativeGeneralJump_at(addr())->jump_destination();
    address disp = Assembler::locate_operand(addr(), Assembler::call32_operand);
    *(jint*)disp += (x - old_dest);
  }
#endif  /* excise for now */
  else
    { ShouldNotReachHere(); }
}
예제 #9
0
address Relocation::pd_call_destination(address orig_addr) {
  intptr_t adj = 0;
  if (orig_addr != NULL) {
    // We just moved this call instruction from orig_addr to addr().
    // This means its target will appear to have grown by addr() - orig_addr.
    adj = -( addr() - orig_addr );
  }
  if (NativeCall::is_call_at(addr())) {
    NativeCall* call = nativeCall_at(addr());
    return call->destination() + adj;
  }
  if (NativeFarCall::is_call_at(addr())) {
    NativeFarCall* call = nativeFarCall_at(addr());
    return call->destination() + adj;
  }
  // Special case:  Patchable branch local to the code cache.
  // This will break badly if the code cache grows larger than a few Mb.
  NativeGeneralJump* br = nativeGeneralJump_at(addr());
  return br->jump_destination() + adj;
}
예제 #10
0
address Relocation::pd_call_destination(address orig_addr) {
  intptr_t adj = 0;
  if (orig_addr != NULL) {
    // We just moved this call instruction from orig_addr to addr().
    // This means its target will appear to have grown by addr() - orig_addr.
    adj = -( addr() - orig_addr );
  }
  NativeInstruction* ni = nativeInstruction_at(addr());
  if (ni->is_call()) {
    return nativeCall_at(addr())->destination() + adj;
  } else if (ni->is_jump()) {
    return nativeJump_at(addr())->jump_destination() + adj;
  } else if (ni->is_cond_jump()) {
    return nativeGeneralJump_at(addr())->jump_destination() + adj;
  } else if (ni->is_mov_literal64()) {
    return (address) ((NativeMovConstReg*)ni)->data();
  } else {
    ShouldNotReachHere();
    return NULL;
  }
}