void CompiledMethod::shrink(jint code_size, jint relocation_size) { // The current implementation copies the relocation information down // and "shrinks" the compiled method object in place, allocating a // dummy filler object in the now unused end part. // // The compiled method object will generally not be the last object in // the heap, since the compiler allocates other objects and GC might // have occurred. However, if the GC always does sliding compaction // and the compiler *guarantees* not to hold on to any allocated // object other than the compiled method, we could simply move the // top of the object heap down! // Copy the relocation segment down void* src = field_base(end_offset() - relocation_size); void* dst = field_base(base_offset() + code_size); GUARANTEE(src >= dst, "should be copying down"); jvm_memmove(dst, src, relocation_size); // possibly overlapping regions // Shrink compiled method object size_t new_size = CompiledMethodDesc::allocation_size(code_size + relocation_size); Universe::shrink_object(this, new_size); ((CompiledMethodDesc*) obj())->set_size(code_size + relocation_size); GUARANTEE(object_size() == new_size, "invalid shrunk size"); }
bool CompiledMethod::expand_compiled_code_space(int delta, int relocation_size) { if (ObjectHeap::expand_current_compiled_method(delta)) { if (Verbose) { TTY_TRACE_CR(("Expanding compiled method from %d to %d bytes", size(), size() + delta)); } void* src = field_base(end_offset() - relocation_size); void* dst = DERIVED(void*, src, delta); GUARANTEE(src < dst, "should be copying up"); jvm_memmove(dst, src, relocation_size); // possibly overlapping regions // It's probably OK only to clear dst[-1], but let's just make sure. jvm_memset(src, 0, delta); ((CompiledMethodDesc*) obj())->set_size(size() + delta); if (VerifyGC > 2) { ObjectHeap::verify(); } return true; } else { return false;
jubyte* base_address() { return (jubyte*) field_base(base_offset()); }
// Read juint from stream juint read_Java_u4(int& index) { jint offset = offset_from_byte_index(index); index += sizeof(juint); return Bytes::get_Java_u4((address)field_base(offset)); }
// Read jushort from stream jushort read_native_u2(int& index) { jint offset = offset_from_byte_index(index); index += sizeof(jushort); return Bytes::get_native_u2((address)field_base(offset)); }
inline jdouble* oopDesc::double_field_addr(int offset) const { return (jdouble*) field_base(offset); }
inline jfloat* oopDesc::float_field_addr(int offset) const { return (jfloat*) field_base(offset); }
inline jlong* oopDesc::long_field_addr(int offset) const { return (jlong*) field_base(offset); }
inline jshort* oopDesc::short_field_addr(int offset) const { return (jshort*) field_base(offset); }
inline jint* oopDesc::int_field_addr(int offset) const { return (jint*) field_base(offset); }
inline jboolean* oopDesc::bool_field_addr(int offset) const { return (jboolean*)field_base(offset); }
inline jchar* oopDesc::char_field_addr(int offset) const { return (jchar*) field_base(offset); }
inline jbyte* oopDesc::byte_field_addr(int offset) const { return (jbyte*) field_base(offset); }
inline oop* oopDesc::obj_field_addr(int offset) const { return (oop*) field_base(offset); }
inline address* oopDesc::address_field_addr(int offset) const { return (address*) field_base(offset); }
template <class T> inline T* oopDesc::obj_field_addr(int offset) const { return (T*)field_base(offset); }