Exemplo n.º 1
0
// Rewrite a classfile-order CP index into a native-order CPC index.
int Rewriter::rewrite_member_reference(address bcp, int offset) {
  address p = bcp + offset;
  int  cp_index    = Bytes::get_Java_u2(p);
  int  cache_index = cp_entry_to_cp_cache(cp_index);
  Bytes::put_native_u2(p, cache_index);
  return cp_index;
}
Exemplo n.º 2
0
// Creates a constant pool cache given a CPC map
void Rewriter::make_constant_pool_cache(TRAPS) {
  const int length = _cp_cache_map.length();
  constantPoolCacheOop cache =
      oopFactory::new_constantPoolCache(length, CHECK);
  No_Safepoint_Verifier nsv;
  cache->initialize(_cp_cache_map);

  // Don't bother with the next pass if there is no JVM_CONSTANT_InvokeDynamic.
  if (_have_invoke_dynamic) {
    for (int i = 0; i < length; i++) {
      int pool_index = cp_cache_entry_pool_index(i);
      if (pool_index >= 0 &&
          _pool->tag_at(pool_index).is_invoke_dynamic()) {
        int bsm_index = _pool->invoke_dynamic_bootstrap_method_ref_index_at(pool_index);
        assert(_pool->tag_at(bsm_index).is_method_handle(), "must be a MH constant");
        // There is a CP cache entry holding the BSM for these calls.
        int bsm_cache_index = cp_entry_to_cp_cache(bsm_index);
        cache->entry_at(i)->initialize_bootstrap_method_index_in_cache(bsm_cache_index);
      }
    }
  }

  _pool->set_cache(cache);
  cache->set_constant_pool(_pool());
}
Exemplo n.º 3
0
 int add_cp_cache_entry(int cp_index) {
   assert(_pool->tag_at(cp_index).value() != JVM_CONSTANT_InvokeDynamic, "use indy version");
   assert(_first_iteration_cp_cache_limit == -1, "do not add cache entries after first iteration");
   int cache_index = add_map_entry(cp_index, &_cp_map, &_cp_cache_map);
   assert(cp_entry_to_cp_cache(cp_index) == cache_index, "");
   assert(cp_cache_entry_pool_index(cache_index) == cp_index, "");
   return cache_index;
 }
Exemplo n.º 4
0
 int add_cp_cache_entry(int cp_index) {
   assert((cp_index & _secondary_entry_tag) == 0, "bad tag");
   assert(_cp_map[cp_index] == -1, "not twice on same cp_index");
   int cache_index = _cp_cache_map.append(cp_index);
   _cp_map.at_put(cp_index, cache_index);
   assert(cp_entry_to_cp_cache(cp_index) == cache_index, "");
   return cache_index;
 }
Exemplo n.º 5
0
// Rewrite a classfile-order CP index into a native-order CPC index.
void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
  address p = bcp + offset;
  if (!reverse) {
    int  cp_index    = Bytes::get_Java_u2(p);
    int  cache_index = cp_entry_to_cp_cache(cp_index);
    Bytes::put_native_u2(p, cache_index);
  } else {
    int cache_index = Bytes::get_native_u2(p);
    int pool_index = cp_cache_entry_pool_index(cache_index);
    Bytes::put_Java_u2(p, pool_index);
  }
}
// Rewrite a classfile-order CP index into a native-order CPC index.
void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
  address p = bcp + offset;
  if (!reverse) {
    int  cp_index    = Bytes::get_Java_u2(p);
    int  cache_index = cp_entry_to_cp_cache(cp_index);
    Bytes::put_native_u2(p, cache_index);
    if (!_method_handle_invokers.is_empty())
      maybe_rewrite_invokehandle(p - 1, cp_index, reverse);
  } else {
    int cache_index = Bytes::get_native_u2(p);
    int pool_index = cp_cache_entry_pool_index(cache_index);
    Bytes::put_Java_u2(p, pool_index);
    if (!_method_handle_invokers.is_empty())
      maybe_rewrite_invokehandle(p - 1, pool_index, reverse);
  }
}
Exemplo n.º 7
0
// Rewrite some ldc bytecodes to _fast_aldc
void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
                                 bool reverse) {
  if (!reverse) {
    assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
    address p = bcp + offset;
    int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
    constantTag tag = _pool->tag_at(cp_index).value();
    if (tag.is_method_handle() || tag.is_method_type()) {
      int cache_index = cp_entry_to_cp_cache(cp_index);
      if (is_wide) {
        (*bcp) = Bytecodes::_fast_aldc_w;
        assert(cache_index == (u2)cache_index, "index overflow");
        Bytes::put_native_u2(p, cache_index);
      } else {
        (*bcp) = Bytecodes::_fast_aldc;
        assert(cache_index == (u1)cache_index, "index overflow");
        (*p) = (u1)cache_index;
      }
    }
  } else {
    Bytecodes::Code rewritten_bc =
              (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
    if ((*bcp) == rewritten_bc) {
      address p = bcp + offset;
      int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
      int pool_index = cp_cache_entry_pool_index(cache_index);
      if (is_wide) {
        (*bcp) = Bytecodes::_ldc_w;
        assert(pool_index == (u2)pool_index, "index overflow");
        Bytes::put_Java_u2(p, pool_index);
      } else {
        (*bcp) = Bytecodes::_ldc;
        assert(pool_index == (u1)pool_index, "index overflow");
        (*p) = (u1)pool_index;
      }
    }
  }
}