DefNewGeneration::DefNewGeneration(ReservedSpace rs, size_t initial_size, int level, const char* policy) : Generation(rs, initial_size, level), _objs_with_preserved_marks(NULL), _preserved_marks_of_objs(NULL), _promo_failure_scan_stack(NULL), _promo_failure_drain_in_progress(false), _should_allocate_from_space(false) { MemRegion cmr((HeapWord*)_virtual_space.low(), (HeapWord*)_virtual_space.high()); Universe::heap()->barrier_set()->resize_covered_region(cmr); if (GenCollectedHeap::heap()->collector_policy()->has_soft_ended_eden()) { _eden_space = new ConcEdenSpace(this); } else { _eden_space = new EdenSpace(this); } _from_space = new ContiguousSpace(); _to_space = new ContiguousSpace(); if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) vm_exit_during_initialization("Could not allocate a new gen space"); // Compute the maximum eden and survivor space sizes. These sizes // are computed assuming the entire reserved space is committed. // These values are exported as performance counters. uintx alignment = GenCollectedHeap::heap()->collector_policy()->min_alignment(); uintx size = _virtual_space.reserved_size(); _max_survivor_size = compute_survivor_size(size, alignment); _max_eden_size = size - (2*_max_survivor_size); // allocate the performance counters // Generation counters -- generation 0, 3 subspaces _gen_counters = new GenerationCounters("new", 0, 3, &_virtual_space); _gc_counters = new CollectorCounters(policy, 0); _eden_counters = new CSpaceCounters("eden", 0, _max_eden_size, _eden_space, _gen_counters); _from_counters = new CSpaceCounters("s0", 1, _max_survivor_size, _from_space, _gen_counters); _to_counters = new CSpaceCounters("s1", 2, _max_survivor_size, _to_space, _gen_counters); compute_space_boundaries(0, SpaceDecorator::Clear, SpaceDecorator::Mangle); update_counters(); _next_gen = NULL; _tenuring_threshold = MaxTenuringThreshold; _pretenure_size_threshold_words = PretenureSizeThreshold >> LogHeapWordSize; }
DefNewGeneration::DefNewGeneration(ReservedSpace rs, size_t initial_size, const char* policy) : Generation(rs, initial_size), _promo_failure_drain_in_progress(false), _should_allocate_from_space(false) { MemRegion cmr((HeapWord*)_virtual_space.low(), (HeapWord*)_virtual_space.high()); GenCollectedHeap* gch = GenCollectedHeap::heap(); gch->barrier_set()->resize_covered_region(cmr); _eden_space = new ContiguousSpace(); _from_space = new ContiguousSpace(); _to_space = new ContiguousSpace(); if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) { vm_exit_during_initialization("Could not allocate a new gen space"); } // Compute the maximum eden and survivor space sizes. These sizes // are computed assuming the entire reserved space is committed. // These values are exported as performance counters. uintx alignment = gch->collector_policy()->space_alignment(); uintx size = _virtual_space.reserved_size(); _max_survivor_size = compute_survivor_size(size, alignment); _max_eden_size = size - (2*_max_survivor_size); // allocate the performance counters GenCollectorPolicy* gcp = gch->gen_policy(); // Generation counters -- generation 0, 3 subspaces _gen_counters = new GenerationCounters("new", 0, 3, gcp->min_young_size(), gcp->max_young_size(), &_virtual_space); _gc_counters = new CollectorCounters(policy, 0); _eden_counters = new CSpaceCounters("eden", 0, _max_eden_size, _eden_space, _gen_counters); _from_counters = new CSpaceCounters("s0", 1, _max_survivor_size, _from_space, _gen_counters); _to_counters = new CSpaceCounters("s1", 2, _max_survivor_size, _to_space, _gen_counters); compute_space_boundaries(0, SpaceDecorator::Clear, SpaceDecorator::Mangle); update_counters(); _old_gen = NULL; _tenuring_threshold = MaxTenuringThreshold; _pretenure_size_threshold_words = PretenureSizeThreshold >> LogHeapWordSize; _gc_timer = new (ResourceObj::C_HEAP, mtGC) STWGCTimer(); }
void DefNewGeneration::compute_space_boundaries(uintx minimum_eden_size) { // All space sizes must be multiples of car size in order for the CarTable to work. // Note that the CarTable is used with and without train gc (for fast lookup). uintx alignment = CarSpace::car_size(); // Compute sizes uintx size = _virtual_space.committed_size(); uintx survivor_size = compute_survivor_size(size, alignment); uintx eden_size = size - (2*survivor_size); assert(eden_size > 0 && survivor_size <= eden_size, "just checking"); if (eden_size < minimum_eden_size) { // May happen due to 64Kb rounding, if so adjust eden size back up minimum_eden_size = align_size_up(minimum_eden_size, alignment); uintx maximum_survivor_size = (size - minimum_eden_size) / 2; uintx unaligned_survivor_size = align_size_down(maximum_survivor_size, alignment); survivor_size = MAX2(unaligned_survivor_size, alignment); eden_size = size - (2*survivor_size); assert(eden_size > 0 && survivor_size <= eden_size, "just checking"); assert(eden_size >= minimum_eden_size, "just checking"); } char *eden_start = _virtual_space.low(); char *from_start = eden_start + eden_size; char *to_start = from_start + survivor_size; char *to_end = to_start + survivor_size; assert(to_end == _virtual_space.high(), "just checking"); assert(Space::is_aligned((HeapWord*)eden_start), "checking alignment"); assert(Space::is_aligned((HeapWord*)from_start), "checking alignment"); assert(Space::is_aligned((HeapWord*)to_start), "checking alignment"); MemRegion edenMR((HeapWord*)eden_start, (HeapWord*)from_start); MemRegion fromMR((HeapWord*)from_start, (HeapWord*)to_start); MemRegion toMR ((HeapWord*)to_start, (HeapWord*)to_end); eden()->initialize(edenMR, (minimum_eden_size == 0)); from()->initialize(fromMR, true); to()->initialize(toMR , true); if (jvmpi::is_event_enabled(JVMPI_EVENT_ARENA_NEW)) { CollectedHeap* ch = Universe::heap(); jvmpi::post_arena_new_event(ch->addr_to_arena_id(eden_start), "Eden"); jvmpi::post_arena_new_event(ch->addr_to_arena_id(from_start), "Semi"); jvmpi::post_arena_new_event(ch->addr_to_arena_id(to_start), "Semi"); } }
DefNewGeneration::DefNewGeneration(ReservedSpace rs, size_t initial_size, int level, const char* policy) : Generation(rs, initial_size, level) { MemRegion cmr((HeapWord*)_virtual_space.low(), (HeapWord*)_virtual_space.high()); Universe::heap()->barrier_set()->resize_covered_region(cmr); _eden_space = new EdenSpace(this); _from_space = new ContiguousSpace(); _to_space = new ContiguousSpace(); if (_eden_space == NULL || _from_space == NULL || _to_space == NULL) vm_exit_during_initialization("Could not allocate a new gen space"); // Compute the maximum eden and survivor space sizes. These sizes // are computed assuming the entire reserved space is committed. // These values are exported as performance counters. uintx alignment = CarSpace::car_size(); uintx size = _virtual_space.reserved_size(); uintx max_survivor_size = compute_survivor_size(size, alignment); uintx max_eden_size = size - (2*max_survivor_size); // allocate the performance counters // Generation counters -- generation 0, 3 subspaces _gen_counters = new GenerationCounters(PERF_GC, "new", 0, 3, &_virtual_space); _gc_counters = new CollectorCounters(PERF_GC, policy, 0); const char* ns = _gen_counters->name_space(); _eden_counters = new CSpaceCounters(ns, "eden", 0, max_eden_size, _eden_space); _from_counters = new CSpaceCounters(ns, "s0", 1, max_survivor_size, _from_space); _to_counters = new CSpaceCounters(ns, "s1", 2, max_survivor_size, _to_space); compute_space_boundaries(0); update_counters(); _next_gen = NULL; _tenuring_threshold = MaxTenuringThreshold; _pretenure_size_threshold_words = PretenureSizeThreshold >> LogHeapWordSize; }
size_t DefNewGeneration::max_capacity() const { const size_t alignment = GenCollectedHeap::heap()->collector_policy()->space_alignment(); const size_t reserved_bytes = reserved().byte_size(); return reserved_bytes - compute_survivor_size(reserved_bytes, alignment); }
void DefNewGeneration::compute_space_boundaries(uintx minimum_eden_size, bool clear_space, bool mangle_space) { uintx alignment = GenCollectedHeap::heap()->collector_policy()->space_alignment(); // If the spaces are being cleared (only done at heap initialization // currently), the survivor spaces need not be empty. // Otherwise, no care is taken for used areas in the survivor spaces // so check. assert(clear_space || (to()->is_empty() && from()->is_empty()), "Initialization of the survivor spaces assumes these are empty"); // Compute sizes uintx size = _virtual_space.committed_size(); uintx survivor_size = compute_survivor_size(size, alignment); uintx eden_size = size - (2*survivor_size); assert(eden_size > 0 && survivor_size <= eden_size, "just checking"); if (eden_size < minimum_eden_size) { // May happen due to 64Kb rounding, if so adjust eden size back up minimum_eden_size = align_size_up(minimum_eden_size, alignment); uintx maximum_survivor_size = (size - minimum_eden_size) / 2; uintx unaligned_survivor_size = align_size_down(maximum_survivor_size, alignment); survivor_size = MAX2(unaligned_survivor_size, alignment); eden_size = size - (2*survivor_size); assert(eden_size > 0 && survivor_size <= eden_size, "just checking"); assert(eden_size >= minimum_eden_size, "just checking"); } char *eden_start = _virtual_space.low(); char *from_start = eden_start + eden_size; char *to_start = from_start + survivor_size; char *to_end = to_start + survivor_size; assert(to_end == _virtual_space.high(), "just checking"); assert(Space::is_aligned((HeapWord*)eden_start), "checking alignment"); assert(Space::is_aligned((HeapWord*)from_start), "checking alignment"); assert(Space::is_aligned((HeapWord*)to_start), "checking alignment"); MemRegion edenMR((HeapWord*)eden_start, (HeapWord*)from_start); MemRegion fromMR((HeapWord*)from_start, (HeapWord*)to_start); MemRegion toMR ((HeapWord*)to_start, (HeapWord*)to_end); // A minimum eden size implies that there is a part of eden that // is being used and that affects the initialization of any // newly formed eden. bool live_in_eden = minimum_eden_size > 0; // If not clearing the spaces, do some checking to verify that // the space are already mangled. if (!clear_space) { // Must check mangling before the spaces are reshaped. Otherwise, // the bottom or end of one space may have moved into another // a failure of the check may not correctly indicate which space // is not properly mangled. if (ZapUnusedHeapArea) { HeapWord* limit = (HeapWord*) _virtual_space.high(); eden()->check_mangled_unused_area(limit); from()->check_mangled_unused_area(limit); to()->check_mangled_unused_area(limit); } } // Reset the spaces for their new regions. eden()->initialize(edenMR, clear_space && !live_in_eden, SpaceDecorator::Mangle); // If clear_space and live_in_eden, we will not have cleared any // portion of eden above its top. This can cause newly // expanded space not to be mangled if using ZapUnusedHeapArea. // We explicitly do such mangling here. if (ZapUnusedHeapArea && clear_space && live_in_eden && mangle_space) { eden()->mangle_unused_area(); } from()->initialize(fromMR, clear_space, mangle_space); to()->initialize(toMR, clear_space, mangle_space); // Set next compaction spaces. eden()->set_next_compaction_space(from()); // The to-space is normally empty before a compaction so need // not be considered. The exception is during promotion // failure handling when to-space can contain live objects. from()->set_next_compaction_space(NULL); }
size_t DefNewGeneration::max_capacity() const { const size_t alignment = CarSpace::car_size(); const size_t reserved_bytes = reserved().byte_size(); return reserved_bytes - compute_survivor_size(reserved_bytes, alignment); }