virtual MemPointer* next() {
   MemPointerRecord* next_rec = (MemPointerRecord*)_itr.next();
   // arena memory record is a special case, which we have to compare
   // sequence number against its associated arena record.
   if (next_rec != NULL && next_rec->is_arena_memory_record()) {
     MemPointerRecord* prev_rec = (MemPointerRecord*)_itr.peek_prev();
     // if there is an associated arena record, it has to be previous
     // record because of sorting order (by address) - NMT generates a pseudo address
     // for arena's size record by offsetting arena's address, that guarantees
     // the order of arena record and it's size record.
     if (prev_rec != NULL && prev_rec->is_arena_record() &&
       next_rec->is_memory_record_of_arena(prev_rec)) {
       if (prev_rec->seq() > next_rec->seq()) {
         // Skip this arena memory record
         // Two scenarios:
         //   - if the arena record is an allocation record, this early
         //     size record must be leftover by previous arena,
         //     and the last size record should have size = 0.
         //   - if the arena record is a deallocation record, this
         //     size record should be its cleanup record, which should
         //     also have size = 0. In other world, arena alway reset
         //     its size before gone (see Arena's destructor)
         assert(next_rec->size() == 0, "size not reset");
         return _itr.next();
       } else {
         assert(prev_rec->is_allocation_record(),
           "Arena size record ahead of allocation record");
       }
     }
   }
   return next_rec;
 }
  virtual MemPointer* current() const {
#ifdef ASSERT
    MemPointer* cur_rec = _itr.current();
    if (cur_rec != NULL) {
      MemPointer* prev_rec = _itr.peek_prev();
      MemPointer* next_rec = _itr.peek_next();
      assert(prev_rec == NULL || prev_rec->addr() < cur_rec->addr(), "Sorting order");
      assert(next_rec == NULL || next_rec->addr() > cur_rec->addr(), "Sorting order");
    }
#endif
    return _itr.current();
  }