virtual MemPointer* current() const { #ifdef ASSERT MemPointer* cur_rec = _itr.current(); if (cur_rec != NULL) { MemPointer* prev_rec = _itr.peek_prev(); MemPointer* next_rec = _itr.peek_next(); assert(prev_rec == NULL || prev_rec->addr() < cur_rec->addr(), "Sorting order"); assert(next_rec == NULL || next_rec->addr() > cur_rec->addr(), "Sorting order"); } #endif return _itr.current(); }
VMRecordIterator(MemPointerArray* arr) : _itr(arr) { MemPointerRecord* cur = (MemPointerRecord*)_itr.current(); MemPointerRecord* next = (MemPointerRecord*)_itr.peek_next(); while (next != NULL) { assert(cur != NULL, "Sanity check"); assert(((SeqMemPointerRecord*)next)->seq() > ((SeqMemPointerRecord*)cur)->seq(), "pre-sort order"); if (is_duplicated_record(cur, next)) { _itr.next(); next = (MemPointerRecord*)_itr.peek_next(); } else { break; } } }
void BaselineReporter::report_virtual_memory_map(const MemBaseline& baseline) { _outputer.start_virtual_memory_map(); MemBaseline* pBL = const_cast<MemBaseline*>(&baseline); MemPointerArrayIteratorImpl itr = MemPointerArrayIteratorImpl(pBL->_vm_map); VMMemRegionEx* rgn = (VMMemRegionEx*)itr.current(); while (rgn != NULL) { if (rgn->is_reserved_region()) { _outputer.reserved_memory_region(FLAGS_TO_MEMORY_TYPE(rgn->flags()), rgn->base(), rgn->base() + rgn->size(), amount_in_current_scale(rgn->size()), rgn->pc()); } else { _outputer.committed_memory_region(rgn->base(), rgn->base() + rgn->size(), amount_in_current_scale(rgn->size()), rgn->pc()); } rgn = (VMMemRegionEx*)itr.next(); } _outputer.done_virtual_memory_map(); }
virtual MemPointer* current() const { return _itr.current(); }