Esempio n. 1
0
// If deoptimization happens, this function returns the point of next bytecode to continue execution
address AbstractInterpreter::deopt_continue_after_entry(methodOop method, address bcp, int callee_parameters, bool is_top_frame) {
  assert(method->contains(bcp), "just checkin'");
  Bytecodes::Code code   = Bytecodes::java_code_at(bcp);
  assert(!Interpreter::bytecode_should_reexecute(code), "should not reexecute");
  int             bci    = method->bci_from(bcp);
  int             length = -1; // initial value for debugging
  // compute continuation length
  length = Bytecodes::length_at(bcp);
  // compute result type
  BasicType type = T_ILLEGAL;

  switch (code) {
    case Bytecodes::_invokevirtual  :
    case Bytecodes::_invokespecial  :
    case Bytecodes::_invokestatic   :
    case Bytecodes::_invokeinterface: {
      Thread *thread = Thread::current();
      ResourceMark rm(thread);
      methodHandle mh(thread, method);
      type = Bytecode_invoke_at(mh, bci)->result_type(thread);
      // since the cache entry might not be initialized:
      // (NOT needed for the old calling convension)
      if (!is_top_frame) {
        int index = Bytes::get_native_u2(bcp+1);
        method->constants()->cache()->entry_at(index)->set_parameter_size(callee_parameters);
      }
      break;
    }

   case Bytecodes::_invokedynamic: {
      Thread *thread = Thread::current();
      ResourceMark rm(thread);
      methodHandle mh(thread, method);
      type = Bytecode_invoke_at(mh, bci)->result_type(thread);
      // since the cache entry might not be initialized:
      // (NOT needed for the old calling convension)
      if (!is_top_frame) {
        int index = Bytes::get_native_u4(bcp+1);
        method->constants()->cache()->secondary_entry_at(index)->set_parameter_size(callee_parameters);
      }
      break;
    }

    case Bytecodes::_ldc   :
    case Bytecodes::_ldc_w : // fall through
    case Bytecodes::_ldc2_w:
      {
        Thread *thread = Thread::current();
        ResourceMark rm(thread);
        methodHandle mh(thread, method);
        type = Bytecode_loadconstant_at(mh, bci)->result_type();
        break;
      }

    default:
      type = Bytecodes::result_type(code);
      break;
  }

  // return entry point for computed continuation state & bytecode length
  return
    is_top_frame
    ? Interpreter::deopt_entry (as_TosState(type), length)
    : Interpreter::return_entry(as_TosState(type), length);
}
Esempio n. 2
0
bool MethodComparator::args_same(Bytecodes::Code c_old, Bytecodes::Code c_new) {
  // BytecodeStream returns the correct standard Java bytecodes for various "fast"
  // bytecode versions, so we don't have to bother about them here..
  switch (c_old) {
  case Bytecodes::_new            : // fall through
  case Bytecodes::_anewarray      : // fall through
  case Bytecodes::_multianewarray : // fall through
  case Bytecodes::_checkcast      : // fall through
  case Bytecodes::_instanceof     : {
    u2 cpi_old = _s_old->get_index_u2();
    u2 cpi_new = _s_new->get_index_u2();
    if ((_old_cp->klass_at_noresolve(cpi_old) != _new_cp->klass_at_noresolve(cpi_new)))
        return false;
    if (c_old == Bytecodes::_multianewarray &&
        *(jbyte*)(_s_old->bcp() + 3) != *(jbyte*)(_s_new->bcp() + 3))
      return false;
    break;
  }

  case Bytecodes::_getstatic       : // fall through
  case Bytecodes::_putstatic       : // fall through
  case Bytecodes::_getfield        : // fall through
  case Bytecodes::_putfield        : // fall through
  case Bytecodes::_invokevirtual   : // fall through
  case Bytecodes::_invokespecial   : // fall through
  case Bytecodes::_invokestatic    : // fall through
  case Bytecodes::_invokedynamic   : // fall through
  case Bytecodes::_invokeinterface : {
    int cpci_old = _s_old->has_index_u4() ? _s_old->get_index_u4() : _s_old->get_index_u2_cpcache();
    int cpci_new = _s_new->has_index_u4() ? _s_new->get_index_u4() : _s_new->get_index_u2_cpcache();
    // Check if the names of classes, field/method names and signatures at these indexes
    // are the same. Indices which are really into constantpool cache (rather than constant
    // pool itself) are accepted by the constantpool query routines below.
    if ((_old_cp->klass_ref_at_noresolve(cpci_old) != _new_cp->klass_ref_at_noresolve(cpci_new)) ||
        (_old_cp->name_ref_at(cpci_old) != _new_cp->name_ref_at(cpci_new)) ||
        (_old_cp->signature_ref_at(cpci_old) != _new_cp->signature_ref_at(cpci_new)))
      return false;
    break;
  }

  case Bytecodes::_ldc   : // fall through
  case Bytecodes::_ldc_w : {
    Bytecode_loadconstant* ldc_old = Bytecode_loadconstant_at(_s_old->method(), _s_old->bci());
    Bytecode_loadconstant* ldc_new = Bytecode_loadconstant_at(_s_new->method(), _s_new->bci());
    int cpi_old = ldc_old->pool_index();
    int cpi_new = ldc_new->pool_index();
    constantTag tag_old = _old_cp->tag_at(cpi_old);
    constantTag tag_new = _new_cp->tag_at(cpi_new);
    if (tag_old.is_int() || tag_old.is_float()) {
      if (tag_old.value() != tag_new.value())
        return false;
      if (tag_old.is_int()) {
        if (_old_cp->int_at(cpi_old) != _new_cp->int_at(cpi_new))
          return false;
      } else {
        // Use jint_cast to compare the bits rather than numerical values.
        // This makes a difference for NaN constants.
        if (jint_cast(_old_cp->float_at(cpi_old)) != jint_cast(_new_cp->float_at(cpi_new)))
          return false;
      }
    } else if (tag_old.is_string() || tag_old.is_unresolved_string()) {
      if (! (tag_new.is_unresolved_string() || tag_new.is_string()))
        return false;
      if (strcmp(_old_cp->string_at_noresolve(cpi_old),
                 _new_cp->string_at_noresolve(cpi_new)) != 0)
        return false;
    } else if (tag_old.is_klass() || tag_old.is_unresolved_klass()) {
      // tag_old should be klass - 4881222
      if (! (tag_new.is_unresolved_klass() || tag_new.is_klass()))
        return false;
      if (_old_cp->klass_at_noresolve(cpi_old) !=
          _new_cp->klass_at_noresolve(cpi_new))
        return false;
    } else if (tag_old.is_method_type() && tag_new.is_method_type()) {
      int mti_old = _old_cp->method_type_index_at(cpi_old);
      int mti_new = _new_cp->method_type_index_at(cpi_new);
      if ((_old_cp->symbol_at(mti_old) != _new_cp->symbol_at(mti_new)))
        return false;
    } else if (tag_old.is_method_handle() && tag_new.is_method_handle()) {
      if (_old_cp->method_handle_ref_kind_at(cpi_old) !=
          _new_cp->method_handle_ref_kind_at(cpi_new))
        return false;
      int mhi_old = _old_cp->method_handle_index_at(cpi_old);
      int mhi_new = _new_cp->method_handle_index_at(cpi_new);
      if ((_old_cp->uncached_klass_ref_at_noresolve(mhi_old) != _new_cp->uncached_klass_ref_at_noresolve(mhi_new)) ||
          (_old_cp->uncached_name_ref_at(mhi_old) != _new_cp->uncached_name_ref_at(mhi_new)) ||
          (_old_cp->uncached_signature_ref_at(mhi_old) != _new_cp->uncached_signature_ref_at(mhi_new)))
        return false;
    } else {
      return false;  // unknown tag
    }
    break;
  }

  case Bytecodes::_ldc2_w : {
    u2 cpi_old = _s_old->get_index_u2();
    u2 cpi_new = _s_new->get_index_u2();
    constantTag tag_old = _old_cp->tag_at(cpi_old);
    constantTag tag_new = _new_cp->tag_at(cpi_new);
    if (tag_old.value() != tag_new.value())
      return false;
    if (tag_old.is_long()) {
      if (_old_cp->long_at(cpi_old) != _new_cp->long_at(cpi_new))
        return false;
    } else {
      // Use jlong_cast to compare the bits rather than numerical values.
      // This makes a difference for NaN constants.
      if (jlong_cast(_old_cp->double_at(cpi_old)) != jlong_cast(_new_cp->double_at(cpi_new)))
        return false;
    }
    break;
  }

  case Bytecodes::_bipush :
    if (_s_old->bcp()[1] != _s_new->bcp()[1])
      return false;
    break;

  case Bytecodes::_sipush    :
    if (_s_old->get_index_u2() != _s_new->get_index_u2())
      return false;
    break;

  case Bytecodes::_aload  : // fall through
  case Bytecodes::_astore : // fall through
  case Bytecodes::_dload  : // fall through
  case Bytecodes::_dstore : // fall through
  case Bytecodes::_fload  : // fall through
  case Bytecodes::_fstore : // fall through
  case Bytecodes::_iload  : // fall through
  case Bytecodes::_istore : // fall through
  case Bytecodes::_lload  : // fall through
  case Bytecodes::_lstore : // fall through
  case Bytecodes::_ret    :
    if (_s_old->is_wide() != _s_new->is_wide())
      return false;
    if (_s_old->get_index() != _s_new->get_index())
      return false;
    break;

  case Bytecodes::_goto      : // fall through
  case Bytecodes::_if_acmpeq : // fall through
  case Bytecodes::_if_acmpne : // fall through
  case Bytecodes::_if_icmpeq : // fall through
  case Bytecodes::_if_icmpne : // fall through
  case Bytecodes::_if_icmplt : // fall through
  case Bytecodes::_if_icmpge : // fall through
  case Bytecodes::_if_icmpgt : // fall through
  case Bytecodes::_if_icmple : // fall through
  case Bytecodes::_ifeq      : // fall through
  case Bytecodes::_ifne      : // fall through
  case Bytecodes::_iflt      : // fall through
  case Bytecodes::_ifge      : // fall through
  case Bytecodes::_ifgt      : // fall through
  case Bytecodes::_ifle      : // fall through
  case Bytecodes::_ifnonnull : // fall through
  case Bytecodes::_ifnull    : // fall through
  case Bytecodes::_jsr       : {
    int old_ofs = _s_old->bytecode()->get_offset_s2(c_old);
    int new_ofs = _s_new->bytecode()->get_offset_s2(c_new);
    if (_switchable_test) {
      int old_dest = _s_old->bci() + old_ofs;
      int new_dest = _s_new->bci() + new_ofs;
      if (old_ofs < 0 && new_ofs < 0) {
        if (! _bci_map->old_and_new_locations_same(old_dest, new_dest))
          return false;
      } else if (old_ofs > 0 && new_ofs > 0) {
        _fwd_jmps->append(old_dest);
        _fwd_jmps->append(new_dest);
      } else {
        return false;
      }
    } else {
      if (old_ofs != new_ofs)
        return false;
    }
    break;
  }

  case Bytecodes::_iinc :
    if (_s_old->is_wide() != _s_new->is_wide())
      return false;
    if (! _s_old->is_wide()) {
      // We could use get_index_u1 and get_constant_u1, but it's simpler to grab both bytes at once:
      if (Bytes::get_Java_u2(_s_old->bcp() + 1) != Bytes::get_Java_u2(_s_new->bcp() + 1))
        return false;
    } else {
      // We could use get_index_u2 and get_constant_u2, but it's simpler to grab all four bytes at once:
      if (Bytes::get_Java_u4(_s_old->bcp() + 1) != Bytes::get_Java_u4(_s_new->bcp() + 1))
        return false;
    }
    break;

  case Bytecodes::_goto_w : // fall through
  case Bytecodes::_jsr_w  : {
    int old_ofs = _s_old->bytecode()->get_offset_s4(c_old);
    int new_ofs = _s_new->bytecode()->get_offset_s4(c_new);
    if (_switchable_test) {
      int old_dest = _s_old->bci() + old_ofs;
      int new_dest = _s_new->bci() + new_ofs;
      if (old_ofs < 0 && new_ofs < 0) {
        if (! _bci_map->old_and_new_locations_same(old_dest, new_dest))
          return false;
      } else if (old_ofs > 0 && new_ofs > 0) {
        _fwd_jmps->append(old_dest);
        _fwd_jmps->append(new_dest);
      } else {
        return false;
      }
    } else {
      if (old_ofs != new_ofs)
        return false;
    }
    break;
  }

  case Bytecodes::_lookupswitch : // fall through
  case Bytecodes::_tableswitch  : {
    if (_switchable_test) {
      address aligned_bcp_old = (address) round_to((intptr_t)_s_old->bcp() + 1, jintSize);
      address aligned_bcp_new = (address) round_to((intptr_t)_s_new->bcp() + 1, jintSize);
      int default_old = (int) Bytes::get_Java_u4(aligned_bcp_old);
      int default_new = (int) Bytes::get_Java_u4(aligned_bcp_new);
      _fwd_jmps->append(_s_old->bci() + default_old);
      _fwd_jmps->append(_s_new->bci() + default_new);
      if (c_old == Bytecodes::_lookupswitch) {
        int npairs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + jintSize);
        int npairs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + jintSize);
        if (npairs_old != npairs_new)
          return false;
        for (int i = 0; i < npairs_old; i++) {
          int match_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (2+2*i)*jintSize);
          int match_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (2+2*i)*jintSize);
          if (match_old != match_new)
            return false;
          int ofs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (2+2*i+1)*jintSize);
          int ofs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (2+2*i+1)*jintSize);
          _fwd_jmps->append(_s_old->bci() + ofs_old);
          _fwd_jmps->append(_s_new->bci() + ofs_new);
        }
      } else if (c_old == Bytecodes::_tableswitch) {
        int lo_old = (int) Bytes::get_Java_u4(aligned_bcp_old + jintSize);
        int lo_new = (int) Bytes::get_Java_u4(aligned_bcp_new + jintSize);
        if (lo_old != lo_new)
          return false;
        int hi_old = (int) Bytes::get_Java_u4(aligned_bcp_old + 2*jintSize);
        int hi_new = (int) Bytes::get_Java_u4(aligned_bcp_new + 2*jintSize);
        if (hi_old != hi_new)
          return false;
        for (int i = 0; i < hi_old - lo_old + 1; i++) {
          int ofs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (3+i)*jintSize);
          int ofs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (3+i)*jintSize);
          _fwd_jmps->append(_s_old->bci() + ofs_old);
          _fwd_jmps->append(_s_new->bci() + ofs_new);
        }
      }
    } else { // !_switchable_test, can use fast rough compare
      int len_old = _s_old->instruction_size();
      int len_new = _s_new->instruction_size();
      if (len_old != len_new)
        return false;
      if (memcmp(_s_old->bcp(), _s_new->bcp(), len_old) != 0)
        return false;
    }
    break;
  }
  }

  return true;
}