bool ParMarkBitMap::initialize(MemRegion covered_region) { const idx_t bits = bits_required(covered_region); // The bits will be divided evenly between two bitmaps; each of them should be // an integral number of words. assert(bits % (BitsPerWord * 2) == 0, "region size unaligned"); const size_t words = bits / BitsPerWord; const size_t raw_bytes = words * sizeof(idx_t); const size_t page_sz = os::page_size_for_region(raw_bytes, raw_bytes, 10); const size_t granularity = os::vm_allocation_granularity(); const size_t bytes = align_size_up(raw_bytes, MAX2(page_sz, granularity)); const size_t rs_align = page_sz == (size_t) os::vm_page_size() ? 0 : MAX2(page_sz, granularity); ReservedSpace rs(bytes, rs_align, rs_align > 0); os::trace_page_sizes("par bitmap", raw_bytes, raw_bytes, page_sz, rs.base(), rs.size()); MemTracker::record_virtual_memory_type((address)rs.base(), mtGC); _virtual_space = new PSVirtualSpace(rs, page_sz); if (_virtual_space != NULL && _virtual_space->expand_by(bytes)) { _region_start = covered_region.start(); _region_size = covered_region.word_size(); idx_t* map = (idx_t*)_virtual_space->reserved_low_addr(); _beg_bits.set_map(map); _beg_bits.set_size(bits / 2); _end_bits.set_map(map + words / 2); _end_bits.set_size(bits / 2); return true; } _region_start = 0; _region_size = 0; if (_virtual_space != NULL) { delete _virtual_space; _virtual_space = NULL; // Release memory reserved in the space. rs.release(); } return false; }
inline ParMarkBitMap::idx_t ParMarkBitMap::words_required(MemRegion covered_region) { return bits_required(covered_region) / BitsPerWord; }
inline ParMarkBitMap::idx_t ParMarkBitMap::bits_required(MemRegion covered_region) { return bits_required(covered_region.word_size()); }