// this is hidden by using the StackFrameStream.) This is used when // doing follow_oops and oops_do. // // 3) The RegisterMap keeps track of the values of callee-saved registers // from frame to frame (hence, the name). For some stack traversal the // values of the callee-saved registers does not matter, e.g., if you // only need the static properies such as frame type, pc, and such. // Updating of the RegisterMap can be turned off by instantiating the // register map as: RegisterMap map(thread, false); class RegisterMap : public StackObj { public: typedef julong LocationValidType; enum { reg_count = CORE_ONLY(RegisterImpl::number_of_registers) NOT_CORE(REG_COUNT), location_valid_type_size = sizeof(LocationValidType)*8, location_valid_size = (reg_count+location_valid_type_size-1)/location_valid_type_size }; private: intptr_t* _location[reg_count]; // Location of registers (intptr_t* looks better than address in the debugger) LocationValidType _location_valid[location_valid_size]; bool _include_argument_oops; // Should include argument_oop marked locations for compiler JavaThread* _thread; // Reference to current thread bool _update_map; // Tells if the register map need to be updated // when traversing the stack intptr_t* _not_at_call_id; // Location of a frame where the pc is not at a call (NULL if no frame exist) public: debug_only(intptr_t* _update_for_id;) // Assert that RegisterMap is not updated twice for same frame RegisterMap(JavaThread *thread, bool update_map = true); RegisterMap(const RegisterMap* map);
// Install the redefinition of a class -- // The original instanceKlass object (k_h) always represents the latest // version of the respective class. However, during class redefinition we swap // or replace much of its content with that of the instanceKlass object created // from the bytes of the redefine (k_h_new). Specifically, k_h points to the new // constantpool and methods objects, which we take from k_h_new. k_h_new, in turn, // assumes the role of the previous class version, with the old constantpool and // methods (taken from k_h) attached to it. k_h links to k_h_new to create a // linked list of class versions. void VM_RedefineClasses::redefine_single_class(jclass j_clazz, instanceKlassHandle k_h_new, TRAPS) { oop mirror = JNIHandles::resolve_non_null(j_clazz); klassOop k_oop = java_lang_Class::as_klassOop(mirror); instanceKlassHandle k_h = instanceKlassHandle(THREAD, k_oop); // Remove all breakpoints in methods of this class JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints(); jvmti_breakpoints.clearall_in_class_at_safepoint(k_oop); // Deoptimize all compiled code that depends on this class NOT_CORE(Universe::flush_evol_dependents_on(k_h)); _old_methods = k_h->methods(); _new_methods = k_h_new->methods(); _evolving_koop = k_oop; _old_constants = k_h->constants(); // flush the cached jmethodID fields for _old_methods flush_method_jmethod_id_cache(); // Patch the indexes into the constantpool from the array of fields of the evolving // class. This is required, because the layout of the new constantpool can be different, // so old indexes corresponding to field names and signatures can become invalid. patch_indexes_for_fields(k_h, k_h_new); // Make new constantpool object (and methodOops via it) point to the original class object k_h_new->constants()->set_pool_holder(k_h()); // Replace methods and constantpool k_h->set_methods(_new_methods); k_h_new->set_methods(_old_methods); // To prevent potential GCing of the old methods, // and to be able to undo operation easily. constantPoolOop old_constants = k_h->constants(); k_h->set_constants(k_h_new->constants()); k_h_new->set_constants(old_constants); // See the previous comment. check_methods_and_mark_as_old(); transfer_old_native_function_registrations(); // Replace inner_classes typeArrayOop old_inner_classes = k_h->inner_classes(); k_h->set_inner_classes(k_h_new->inner_classes()); k_h_new->set_inner_classes(old_inner_classes); // Initialize the vtable and interface table after // methods have been rewritten { ResourceMark rm(THREAD); k_h->vtable()->initialize_vtable(THREAD); // No exception can happen here k_h->itable()->initialize_itable(); } // Copy the "source file name" attribute from new class version k_h->set_source_file_name(k_h_new->source_file_name()); // Copy the "source debug extension" attribute from new class version k_h->set_source_debug_extension(k_h_new->source_debug_extension()); // Use of javac -g could be different in the old and the new if (k_h_new->access_flags().has_localvariable_table() != k_h->access_flags().has_localvariable_table()) { AccessFlags flags = k_h->access_flags(); if (k_h_new->access_flags().has_localvariable_table()) { flags.set_has_localvariable_table(); } else { flags.clear_has_localvariable_table(); } k_h->set_access_flags(flags); } // Replace class annotation fields values typeArrayOop old_class_annotations = k_h->class_annotations(); k_h->set_class_annotations(k_h_new->class_annotations()); k_h_new->set_class_annotations(old_class_annotations); // Replace fields annotation fields values objArrayOop old_fields_annotations = k_h->fields_annotations(); k_h->set_fields_annotations(k_h_new->fields_annotations()); k_h_new->set_fields_annotations(old_fields_annotations); // Replace methods annotation fields values objArrayOop old_methods_annotations = k_h->methods_annotations(); k_h->set_methods_annotations(k_h_new->methods_annotations()); k_h_new->set_methods_annotations(old_methods_annotations); // Replace methods parameter annotation fields values objArrayOop old_methods_parameter_annotations = k_h->methods_parameter_annotations(); k_h->set_methods_parameter_annotations(k_h_new->methods_parameter_annotations()); k_h_new->set_methods_parameter_annotations(old_methods_parameter_annotations); // Replace methods default annotation fields values objArrayOop old_methods_default_annotations = k_h->methods_default_annotations(); k_h->set_methods_default_annotations(k_h_new->methods_default_annotations()); k_h_new->set_methods_default_annotations(old_methods_default_annotations); // Replace major version number of class file u2 old_major_version = k_h->major_version(); k_h->set_major_version(k_h_new->major_version()); k_h_new->set_major_version(old_major_version); // Replace CP indexes for class and name+type of enclosing method u2 old_class_idx = k_h->enclosing_method_class_index(); u2 old_method_idx = k_h->enclosing_method_method_index(); k_h->set_enclosing_method_indices(k_h_new->enclosing_method_class_index(), k_h_new->enclosing_method_method_index()); k_h_new->set_enclosing_method_indices(old_class_idx, old_method_idx); // Maintain a linked list of versions of this class. // List is in ascending age order. Current version (k_h) is the head. if (k_h->has_previous_version()) { k_h_new->set_previous_version(k_h->previous_version()); } k_h->set_previous_version(k_h_new); // Adjust constantpool caches and vtables for all classes // that reference methods of the evolved class. SystemDictionary::classes_do(adjust_cpool_cache_and_vtable); k_h->set_rewritten_by_redefine(true); }
// This method contains no policy. You should probably // be calling invoke() instead. void PSMarkSweep::invoke_no_policy(bool& notify_ref_lock, bool clear_all_softrefs) { assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); assert(ref_processor() != NULL, "Sanity"); if (GC_locker::is_active()) return; ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); PSYoungGen* young_gen = heap->young_gen(); PSOldGen* old_gen = heap->old_gen(); PSPermGen* perm_gen = heap->perm_gen(); // Increment the invocation count heap->increment_total_collections(); // We need to track unique mark sweep invocations as well. _total_invocations++; if (PrintHeapAtGC) { gclog_or_tty->print_cr(" {Heap before GC invocations=%d:", heap->total_collections()); Universe::print(); } // Fill in TLABs heap->ensure_parseability(); if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) { HandleMark hm; // Discard invalid handles created during verification tty->print(" VerifyBeforeGC:"); Universe::verify(true); } { HandleMark hm; TraceTime t1("Full GC", PrintGC, true, gclog_or_tty); TraceCollectorStats tcs(counters()); if (TraceGen1Time) accumulated_time()->start(); // Let the size policy know we're starting AdaptiveSizePolicy* size_policy = heap->size_policy(); size_policy->major_collection_begin(); // When collecting the permanent generation methodOops may be moving, // so we either have to flush all bcp data or convert it into bci. NOT_CORE(CodeCache::gc_prologue()); Threads::gc_prologue(); // Capture heap size before collection for printing. size_t prev_used = heap->used(); // Capture perm gen size before collection for sizing. size_t perm_gen_prev_used = perm_gen->used_in_bytes(); bool marked_for_unloading = false; allocate_stacks(); NOT_PRODUCT(ref_processor()->verify_no_references_recorded()); COMPILER2_ONLY(DerivedPointerTable::clear()); ref_processor()->enable_discovery(); mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); mark_sweep_phase2(); // Don't add any more derived pointers during phase3 COMPILER2_ONLY(assert(DerivedPointerTable::is_active(), "Sanity")); COMPILER2_ONLY(DerivedPointerTable::set_active(false)); mark_sweep_phase3(); mark_sweep_phase4(); restore_marks(); deallocate_stacks(); // "free at last gc" is calculated from these. Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); Universe::set_heap_used_at_last_gc(Universe::heap()->used()); bool all_empty = young_gen->eden_space()->is_empty() && young_gen->from_space()->is_empty() && young_gen->to_space()->is_empty(); BarrierSet* bs = heap->barrier_set(); if (bs->is_a(BarrierSet::ModRef)) { ModRefBarrierSet* modBS = (ModRefBarrierSet*)bs; MemRegion old_mr = heap->old_gen()->reserved(); MemRegion perm_mr = heap->perm_gen()->reserved(); assert(old_mr.end() <= perm_mr.start(), "Generations out of order"); if (all_empty) { modBS->clear(MemRegion(old_mr.start(), perm_mr.end())); } else { modBS->invalidate(MemRegion(old_mr.start(), perm_mr.end())); } } Threads::gc_epilogue(); NOT_CORE(CodeCache::gc_epilogue()); COMPILER2_ONLY(DerivedPointerTable::update_pointers()); notify_ref_lock |= ref_processor()->enqueue_discovered_references(); // Update time of last GC reset_millis_since_last_gc(); // Let the size policy know we're done size_policy->major_collection_end(old_gen->used_in_bytes()); if (UseAdaptiveSizePolicy) { if (PrintAdaptiveSizePolicy) { tty->print_cr("AdaptiveSizeStart: collection: %d ", heap->total_collections()); } // Calculate optimial free space amounts size_policy->compute_generation_free_space(young_gen->used_in_bytes(), old_gen->used_in_bytes(), perm_gen->used_in_bytes(), true /* full gc*/); // Resize old and young generations old_gen->resize(size_policy->calculated_old_free_size_in_bytes()); young_gen->resize(size_policy->calculated_eden_size_in_bytes(), size_policy->calculated_survivor_size_in_bytes()); if (PrintAdaptiveSizePolicy) { tty->print_cr("AdaptiveSizeStop: collection: %d ", heap->total_collections()); } } // We collected the perm gen, so we'll resize it here. perm_gen->compute_new_size(perm_gen_prev_used); if (TraceGen1Time) accumulated_time()->stop(); if (PrintGC) { heap->print_heap_change(prev_used); } heap->update_counters(); } if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) { HandleMark hm; // Discard invalid handles created during verification tty->print(" VerifyAfterGC:"); Universe::verify(false); } NOT_PRODUCT(ref_processor()->verify_no_references_recorded()); if (PrintHeapAtGC) { gclog_or_tty->print_cr(" Heap after GC invocations=%d:", heap->total_collections()); Universe::print(); gclog_or_tty->print("} "); } }