void constMethodKlass::oop_follow_contents(oop obj) { assert (obj->is_constMethod(), "object must be constMethod"); constMethodOop cm = constMethodOop(obj); MarkSweep::mark_and_push(cm->adr_method()); MarkSweep::mark_and_push(cm->adr_stackmap_data()); MarkSweep::mark_and_push(cm->adr_exception_table()); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. }
void constMethodKlass::oop_follow_contents(ParCompactionManager* cm, oop obj) { assert (obj->is_constMethod(), "object must be constMethod"); constMethodOop cm_oop = constMethodOop(obj); PSParallelCompact::mark_and_push(cm, cm_oop->adr_method()); PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data()); PSParallelCompact::mark_and_push(cm, cm_oop->adr_exception_table()); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. }
void do_object(oop obj) { // Mark all constMethod objects. if (obj->is_constMethod()) { mark_object(obj); mark_object(constMethodOop(obj)->stackmap_data()); // Exception tables are needed by ci code during compilation. mark_object(constMethodOop(obj)->exception_table()); } // Mark objects referenced by klass objects which are read-only. else if (obj->is_klass()) { Klass* k = Klass::cast((klassOop)obj); mark_object(k->secondary_supers()); // The METHODS() OBJARRAYS CANNOT BE MADE READ-ONLY, even though // it is never modified. Otherwise, they will be pre-marked; the // GC marking phase will skip them; and by skipping them will fail // to mark the methods objects referenced by the array. if (obj->blueprint()->oop_is_instanceKlass()) { instanceKlass* ik = instanceKlass::cast((klassOop)obj); mark_object(ik->method_ordering()); mark_object(ik->local_interfaces()); mark_object(ik->transitive_interfaces()); mark_object(ik->fields()); mark_object(ik->class_annotations()); mark_object_recursive_skipping_klasses(ik->fields_annotations()); mark_object_recursive_skipping_klasses(ik->methods_annotations()); mark_object_recursive_skipping_klasses(ik->methods_parameter_annotations()); mark_object_recursive_skipping_klasses(ik->methods_default_annotations()); typeArrayOop inner_classes = ik->inner_classes(); if (inner_classes != NULL) { mark_object(inner_classes); } } } }
void constMethodKlass::oop_verify_on(oop obj, outputStream* st) { Klass::oop_verify_on(obj, st); guarantee(obj->is_constMethod(), "object must be constMethod"); constMethodOop m = constMethodOop(obj); guarantee(m->is_perm(), "should be in permspace"); // Verification can occur during oop construction before the method or // other fields have been initialized. if (!obj->partially_loaded()) { guarantee(m->method()->is_perm(), "should be in permspace"); guarantee(m->method()->is_method(), "should be method"); typeArrayOop stackmap_data = m->stackmap_data(); guarantee(stackmap_data == NULL || stackmap_data->is_perm(), "should be in permspace"); guarantee(m->exception_table()->is_perm(), "should be in permspace"); guarantee(m->exception_table()->is_typeArray(), "should be type array"); address m_end = (address)((oop*) m + m->size()); address compressed_table_start = m->code_end(); guarantee(compressed_table_start <= m_end, "invalid method layout"); address compressed_table_end = compressed_table_start; // Verify line number table if (m->has_linenumber_table()) { CompressedLineNumberReadStream stream(m->compressed_linenumber_table()); while (stream.read_pair()) { guarantee(stream.bci() >= 0 && stream.bci() <= m->code_size(), "invalid bci in line number table"); } compressed_table_end += stream.position(); } guarantee(compressed_table_end <= m_end, "invalid method layout"); // Verify checked exceptions and local variable tables if (m->has_checked_exceptions()) { u2* addr = m->checked_exceptions_length_addr(); guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); } if (m->has_localvariable_table()) { u2* addr = m->localvariable_table_length_addr(); guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); } // Check compressed_table_end relative to uncompressed_table_start u2* uncompressed_table_start; if (m->has_localvariable_table()) { uncompressed_table_start = (u2*) m->localvariable_table_start(); } else { if (m->has_checked_exceptions()) { uncompressed_table_start = (u2*) m->checked_exceptions_start(); } else { uncompressed_table_start = (u2*) m_end; } } int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end; int max_gap = align_object_size(1)*BytesPerWord; guarantee(gap >= 0 && gap < max_gap, "invalid method layout"); } }
int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) { assert (obj->is_constMethod(), "object must be constMethod"); constMethodOop cm = constMethodOop(obj); blk->do_oop(cm->adr_method()); blk->do_oop(cm->adr_stackmap_data()); blk->do_oop(cm->adr_exception_table()); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); return size; }
int constMethodKlass::oop_adjust_pointers(oop obj) { assert(obj->is_constMethod(), "should be constMethod"); constMethodOop cm = constMethodOop(obj); MarkSweep::adjust_pointer(cm->adr_method()); MarkSweep::adjust_pointer(cm->adr_stackmap_data()); MarkSweep::adjust_pointer(cm->adr_exception_table()); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. return size; }
void constMethodKlass::oop_print_on(oop obj, outputStream* st) { ResourceMark rm; assert(obj->is_constMethod(), "must be constMethod"); Klass::oop_print_on(obj, st); constMethodOop m = constMethodOop(obj); st->print(" - method: " INTPTR_FORMAT " ", (address)m->method()); m->method()->print_value_on(st); st->cr(); st->print(" - exceptions: " INTPTR_FORMAT "\n", (address)m->exception_table()); if (m->has_stackmap_table()) { st->print(" - stackmap data: "); m->stackmap_data()->print_value_on(st); st->cr(); } }
int constMethodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj, HeapWord* beg_addr, HeapWord* end_addr) { assert(obj->is_constMethod(), "should be constMethod"); constMethodOop cm_oop = constMethodOop(obj); oop* const beg_oop = MAX2((oop*)beg_addr, cm_oop->oop_block_beg()); oop* const end_oop = MIN2((oop*)end_addr, cm_oop->oop_block_end()); for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) { PSParallelCompact::adjust_pointer(cur_oop); } return cm_oop->object_size(); }
int constMethodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { assert(obj->is_constMethod(), "should be constMethod"); constMethodOop cm_oop = constMethodOop(obj); #if 0 PSParallelCompact::adjust_pointer(cm_oop->adr_method()); PSParallelCompact::adjust_pointer(cm_oop->adr_exception_table()); PSParallelCompact::adjust_pointer(cm_oop->adr_stackmap_data()); #endif oop* const beg_oop = cm_oop->oop_block_beg(); oop* const end_oop = cm_oop->oop_block_end(); for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) { PSParallelCompact::adjust_pointer(cur_oop); } return cm_oop->object_size(); }
int constMethodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) { assert (obj->is_constMethod(), "object must be constMethod"); constMethodOop cm = constMethodOop(obj); oop* adr; adr = cm->adr_method(); if (mr.contains(adr)) blk->do_oop(adr); adr = cm->adr_stackmap_data(); if (mr.contains(adr)) blk->do_oop(adr); adr = cm->adr_exception_table(); if (mr.contains(adr)) blk->do_oop(adr); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. return size; }
bool constMethodKlass::oop_is_conc_safe(oop obj) const { assert(obj->is_constMethod(), "must be constMethod oop"); return constMethodOop(obj)->is_conc_safe(); }
bool constMethodKlass::oop_is_parsable(oop obj) const { assert(obj->is_constMethod(), "must be constMethod oop"); return constMethodOop(obj)->object_is_parsable(); }
int constMethodKlass::oop_size(oop obj) const { assert(obj->is_constMethod(), "must be constMethod oop"); return constMethodOop(obj)->object_size(); }
// The exception_table is the last field set when loading an object. void constMethodKlass::oop_set_partially_loaded(oop obj) { assert(obj->is_constMethod(), "object must be klass"); constMethodOop m = constMethodOop(obj); // Temporarily set exception_table to point to self m->set_exception_table((typeArrayOop)obj); }
bool constMethodKlass::oop_partially_loaded(oop obj) const { assert(obj->is_constMethod(), "object must be klass"); constMethodOop m = constMethodOop(obj); // check whether exception_table points to self (flag for partially loaded) return m->exception_table() == (typeArrayOop)obj; }
static void find(intptr_t x, bool print_pc) { address addr = (address)x; CodeBlob* b = CodeCache::find_blob_unsafe(addr); if (b != NULL) { if (b->is_buffer_blob()) { // the interpreter is generated into a buffer blob InterpreterCodelet* i = Interpreter::codelet_containing(addr); if (i != NULL) { i->print(); return; } if (Interpreter::contains(addr)) { tty->print_cr(INTPTR_FORMAT " is pointing into interpreter code (not bytecode specific)", addr); return; } // if (AdapterHandlerLibrary::contains(b)) { AdapterHandlerLibrary::print_handler(b); } // the stubroutines are generated into a buffer blob StubCodeDesc* d = StubCodeDesc::desc_for(addr); if (d != NULL) { d->print(); if (print_pc) tty->cr(); return; } if (StubRoutines::contains(addr)) { tty->print_cr(INTPTR_FORMAT " is pointing to an (unnamed) stub routine", addr); return; } // the InlineCacheBuffer is using stubs generated into a buffer blob if (InlineCacheBuffer::contains(addr)) { tty->print_cr(INTPTR_FORMAT " is pointing into InlineCacheBuffer", addr); return; } VtableStub* v = VtableStubs::stub_containing(addr); if (v != NULL) { v->print(); return; } } if (print_pc && b->is_nmethod()) { ResourceMark rm; tty->print("%#p: Compiled ", addr); ((nmethod*)b)->method()->print_value_on(tty); tty->print(" = (CodeBlob*)" INTPTR_FORMAT, b); tty->cr(); return; } if ( b->is_nmethod()) { if (b->is_zombie()) { tty->print_cr(INTPTR_FORMAT " is zombie nmethod", b); } else if (b->is_not_entrant()) { tty->print_cr(INTPTR_FORMAT " is non-entrant nmethod", b); } } b->print(); return; } if (Universe::heap()->is_in(addr)) { HeapWord* p = Universe::heap()->block_start(addr); bool print = false; // If we couldn't find it it just may mean that heap wasn't parseable // See if we were just given an oop directly if (p != NULL && Universe::heap()->block_is_obj(p)) { print = true; } else if (p == NULL && ((oopDesc*)addr)->is_oop()) { p = (HeapWord*) addr; print = true; } if (print) { oop(p)->print(); if (p != (HeapWord*)x && oop(p)->is_constMethod() && constMethodOop(p)->contains(addr)) { Thread *thread = Thread::current(); HandleMark hm(thread); methodHandle mh (thread, constMethodOop(p)->method()); if (!mh->is_native()) { tty->print_cr("bci_from(%p) = %d; print_codes():", addr, mh->bci_from(address(x))); mh->print_codes(); } } return; } } else if (Universe::heap()->is_in_reserved(addr)) { tty->print_cr(INTPTR_FORMAT " is an unallocated location in the heap", addr); return; } if (JNIHandles::is_global_handle((jobject) addr)) { tty->print_cr(INTPTR_FORMAT " is a global jni handle", addr); return; } if (JNIHandles::is_weak_global_handle((jobject) addr)) { tty->print_cr(INTPTR_FORMAT " is a weak global jni handle", addr); return; } if (JNIHandleBlock::any_contains((jobject) addr)) { tty->print_cr(INTPTR_FORMAT " is a local jni handle", addr); return; } for(JavaThread *thread = Threads::first(); thread; thread = thread->next()) { // Check for privilege stack if (thread->privileged_stack_top() != NULL && thread->privileged_stack_top()->contains(addr)) { tty->print_cr(INTPTR_FORMAT " is pointing into the privilege stack for thread: " INTPTR_FORMAT, addr, thread); return; } // If the addr is a java thread print information about that. if (addr == (address)thread) { thread->print(); return; } } // Try an OS specific find if (os::find(addr)) { return; } if (print_pc) { tty->print_cr(INTPTR_FORMAT ": probably in C++ code; check debugger", addr); Disassembler::decode(same_page(addr-40,addr),same_page(addr+40,addr)); return; } tty->print_cr(INTPTR_FORMAT " is pointing to unknown location", addr); }
// Short version of printing constMethodOop - just print the name of the // method it belongs to. void constMethodKlass::oop_print_value_on(oop obj, outputStream* st) { assert(obj->is_constMethod(), "must be constMethod"); constMethodOop m = constMethodOop(obj); st->print(" const part of method " ); m->method()->print_value_on(st); }