bool DefNewGeneration::expand(size_t bytes) { MutexLocker x(ExpandHeap_lock); HeapWord* prev_high = (HeapWord*) _virtual_space.high(); bool success = _virtual_space.expand_by(bytes); if (success && ZapUnusedHeapArea) { // Mangle newly committed space immediately because it // can be done here more simply that after the new // spaces have been computed. HeapWord* new_high = (HeapWord*) _virtual_space.high(); MemRegion mangle_region(prev_high, new_high); SpaceMangler::mangle_region(mangle_region); } // Do not attempt an expand-to-the reserve size. The // request should properly observe the maximum size of // the generation so an expand-to-reserve should be // unnecessary. Also a second call to expand-to-reserve // value potentially can cause an undue expansion. // For example if the first expand fail for unknown reasons, // but the second succeeds and expands the heap to its maximum // value. if (GC_locker::is_active()) { if (PrintGC && Verbose) { gclog_or_tty->print_cr("Garbage collection disabled, " "expanded heap instead"); } } return success; }
bool CardGeneration::grow_by(size_t bytes) { assert_correct_size_change_locking(); bool result = _virtual_space.expand_by(bytes); if (result) { size_t new_word_size = heap_word_size(_virtual_space.committed_size()); MemRegion mr(space()->bottom(), new_word_size); // Expand card table GenCollectedHeap::heap()->barrier_set()->resize_covered_region(mr); // Expand shared block offset array _bts->resize(new_word_size); // Fix for bug #4668531 if (ZapUnusedHeapArea) { MemRegion mangle_region(space()->end(), (HeapWord*)_virtual_space.high()); SpaceMangler::mangle_region(mangle_region); } // Expand space -- also expands space's BOT // (which uses (part of) shared array above) space()->set_end((HeapWord*)_virtual_space.high()); // update the space and generation capacity counters update_counters(); size_t new_mem_size = _virtual_space.committed_size(); size_t old_mem_size = new_mem_size - bytes; log_trace(gc, heap)("Expanding %s from " SIZE_FORMAT "K by " SIZE_FORMAT "K to " SIZE_FORMAT "K", name(), old_mem_size/K, bytes/K, new_mem_size/K); } return result; }
Generation::Generation(ReservedSpace rs, size_t initial_size, int level) : _level(level), _ref_processor(NULL) { if (!_virtual_space.initialize(rs, initial_size)) { vm_exit_during_initialization("Could not reserve enough space for " "object heap"); } // Mangle all of the the initial generation. if (ZapUnusedHeapArea) { MemRegion mangle_region((HeapWord*)_virtual_space.low(), (HeapWord*)_virtual_space.high()); SpaceMangler::mangle_region(mangle_region); } _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(), (HeapWord*)_virtual_space.high_boundary()); }
bool OneContigSpaceCardGeneration::grow_by(size_t bytes) { assert_locked_or_safepoint(ExpandHeap_lock); bool result = _virtual_space.expand_by(bytes); if (result) { size_t new_word_size = heap_word_size(_virtual_space.committed_size()); MemRegion mr(_the_space->bottom(), new_word_size); // Expand card table Universe::heap()->barrier_set()->resize_covered_region(mr); // Expand shared block offset array _bts->resize(new_word_size); // Fix for bug #4668531 if (ZapUnusedHeapArea) { MemRegion mangle_region(_the_space->end(), (HeapWord*)_virtual_space.high()); SpaceMangler::mangle_region(mangle_region); } // Expand space -- also expands space's BOT // (which uses (part of) shared array above) _the_space->set_end((HeapWord*)_virtual_space.high()); // update the space and generation capacity counters update_counters(); if (Verbose && PrintGC) { size_t new_mem_size = _virtual_space.committed_size(); size_t old_mem_size = new_mem_size - bytes; gclog_or_tty->print_cr("Expanding %s from " SIZE_FORMAT "K by " SIZE_FORMAT "K to " SIZE_FORMAT "K", name(), old_mem_size/K, bytes/K, new_mem_size/K); } } return result; }
// NOTE! We need to be careful about resizing. During a GC, multiple // allocators may be active during heap expansion. If we allow the // heap resizing to become visible before we have correctly resized // all heap related data structures, we may cause program failures. void PSOldGen::post_resize() { // First construct a memregion representing the new size MemRegion new_memregion((HeapWord*)_virtual_space.low(), (HeapWord*)_virtual_space.high()); size_t new_word_size = new_memregion.word_size(); // Block offset table resize FIX ME!!!!! // _bts->resize(new_word_size); Universe::heap()->barrier_set()->resize_covered_region(new_memregion); // Did we expand? if (object_space()->end() < (HeapWord*) _virtual_space.high()) { // We need to mangle the newly expanded area. The memregion spans // end -> new_end, we assume that top -> end is already mangled. // This cannot be safely tested for, as allocation may be taking // place. MemRegion mangle_region(object_space()->end(),(HeapWord*) _virtual_space.high()); object_space()->mangle_region(mangle_region); } // ALWAYS do this last!! object_space()->set_end((HeapWord*) _virtual_space.high()); assert(new_word_size == heap_word_size(object_space()->capacity_in_bytes()), "Sanity"); }
bool PSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) { const size_t alignment = virtual_space()->alignment(); size_t orig_size = virtual_space()->committed_size(); bool size_changed = false; // There used to be this guarantee there. // guarantee ((eden_size + 2*survivor_size) <= _max_gen_size, "incorrect input arguments"); // Code below forces this requirement. In addition the desired eden // size and disired survivor sizes are desired goals and may // exceed the total generation size. assert(min_gen_size() <= orig_size && orig_size <= max_size(), "just checking"); // Adjust new generation size const size_t eden_plus_survivors = align_size_up(eden_size + 2 * survivor_size, alignment); size_t desired_size = MAX2(MIN2(eden_plus_survivors, max_size()), min_gen_size()); assert(desired_size <= max_size(), "just checking"); if (desired_size > orig_size) { // Grow the generation size_t change = desired_size - orig_size; assert(change % alignment == 0, "just checking"); HeapWord* prev_high = (HeapWord*) virtual_space()->high(); if (!virtual_space()->expand_by(change)) { return false; // Error if we fail to resize! } if (ZapUnusedHeapArea) { // Mangle newly committed space immediately because it // can be done here more simply that after the new // spaces have been computed. HeapWord* new_high = (HeapWord*) virtual_space()->high(); MemRegion mangle_region(prev_high, new_high); SpaceMangler::mangle_region(mangle_region); } size_changed = true; } else if (desired_size < orig_size) { size_t desired_change = orig_size - desired_size; assert(desired_change % alignment == 0, "just checking"); desired_change = limit_gen_shrink(desired_change); if (desired_change > 0) { virtual_space()->shrink_by(desired_change); reset_survivors_after_shrink(); size_changed = true; } } else { if (Verbose && PrintGC) { if (orig_size == gen_size_limit()) { gclog_or_tty->print_cr("PSYoung generation size at maximum: " SIZE_FORMAT "K", orig_size/K); } else if (orig_size == min_gen_size()) { gclog_or_tty->print_cr("PSYoung generation size at minium: " SIZE_FORMAT "K", orig_size/K); } } } if (size_changed) { post_resize(); if (Verbose && PrintGC) { size_t current_size = virtual_space()->committed_size(); gclog_or_tty->print_cr("PSYoung generation size changed: " SIZE_FORMAT "K->" SIZE_FORMAT "K", orig_size/K, current_size/K); } } guarantee(eden_plus_survivors <= virtual_space()->committed_size() || virtual_space()->committed_size() == max_size(), "Sanity"); return true; }
// Similar to PSYoungGen::resize_generation() but // allows sum of eden_size and 2 * survivor_size to exceed _max_gen_size // expands at the low end of the virtual space // moves the boundary between the generations in order to expand // some additional diagnostics // If no additional changes are required, this can be deleted // and the changes factored back into PSYoungGen::resize_generation(). bool ASPSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) { const size_t alignment = virtual_space()->alignment(); size_t orig_size = virtual_space()->committed_size(); bool size_changed = false; // There used to be a guarantee here that // (eden_size + 2*survivor_size) <= _max_gen_size // This requirement is enforced by the calculation of desired_size // below. It may not be true on entry since the size of the // eden_size is no bounded by the generation size. assert(max_size() == reserved().byte_size(), "max gen size problem?"); assert(min_gen_size() <= orig_size && orig_size <= max_size(), "just checking"); // Adjust new generation size const size_t eden_plus_survivors = align_size_up(eden_size + 2 * survivor_size, alignment); size_t desired_size = MAX2(MIN2(eden_plus_survivors, gen_size_limit()), min_gen_size()); assert(desired_size <= gen_size_limit(), "just checking"); if (desired_size > orig_size) { // Grow the generation size_t change = desired_size - orig_size; HeapWord* prev_low = (HeapWord*) virtual_space()->low(); if (!virtual_space()->expand_by(change)) { return false; } if (ZapUnusedHeapArea) { // Mangle newly committed space immediately because it // can be done here more simply that after the new // spaces have been computed. HeapWord* new_low = (HeapWord*) virtual_space()->low(); assert(new_low < prev_low, "Did not grow"); MemRegion mangle_region(new_low, prev_low); SpaceMangler::mangle_region(mangle_region); } size_changed = true; } else if (desired_size < orig_size) { size_t desired_change = orig_size - desired_size; // How much is available for shrinking. size_t available_bytes = limit_gen_shrink(desired_change); size_t change = MIN2(desired_change, available_bytes); virtual_space()->shrink_by(change); size_changed = true; } else { if (Verbose && PrintGC) { if (orig_size == gen_size_limit()) { gclog_or_tty->print_cr("ASPSYoung generation size at maximum: " SIZE_FORMAT "K", orig_size/K); } else if (orig_size == min_gen_size()) { gclog_or_tty->print_cr("ASPSYoung generation size at minium: " SIZE_FORMAT "K", orig_size/K); } } } if (size_changed) { reset_after_change(); if (Verbose && PrintGC) { size_t current_size = virtual_space()->committed_size(); gclog_or_tty->print_cr("ASPSYoung generation size changed: " SIZE_FORMAT "K->" SIZE_FORMAT "K", orig_size/K, current_size/K); } } guarantee(eden_plus_survivors <= virtual_space()->committed_size() || virtual_space()->committed_size() == max_size(), "Sanity"); return true; }
void ContiguousSpace::mangle_unused_area() { // to-space is used for storing marks during mark-sweep mangle_region(MemRegion(top(), end())); }