void ObjPtrQueue::verify_oops_in_buffer() { if (_buf == NULL) return; for (size_t i = _index; i < _sz; i += oopSize) { oop obj = (oop)_buf[byte_index_to_index((int)i)]; assert(obj != NULL && obj->is_oop(true /* ignore mark word */), "Not an oop"); } }
void ObjPtrQueue::apply_closure_to_buffer(ObjectClosure* cl, void** buf, size_t index, size_t sz) { if (cl == NULL) return; for (size_t i = index; i < sz; i += oopSize) { oop obj = (oop)buf[byte_index_to_index((int)i)]; // There can be NULL entries because of destructors. if (obj != NULL) { cl->do_object(obj); } } }
void PtrQueue::enqueue_known_active(void* ptr) { assert(0 <= _index && _index <= _sz, "Invariant."); assert(_index == 0 || _buf != NULL, "invariant"); while (_index == 0) { handle_zero_index(); } assert(_index > 0, "postcondition"); _index -= oopSize; _buf[byte_index_to_index((int)_index)] = ptr; assert(0 <= _index && _index <= _sz, "Invariant."); }
void PtrQueue::flush() { if (!_perm && _buf != NULL) { if (_index == _sz) { // No work to do. qset()->deallocate_buffer(_buf); } else { // We must NULL out the unused entries, then enqueue. for (size_t i = 0; i < _index; i += oopSize) { _buf[byte_index_to_index((int)i)] = NULL; } qset()->enqueue_complete_buffer(_buf); } _buf = NULL; _index = 0; } }
bool DirtyCardQueue::apply_closure_to_buffer(CardTableEntryClosure* cl, void** buf, size_t index, size_t sz, bool consume, int worker_i) { if (cl == NULL) return true; for (size_t i = index; i < sz; i += oopSize) { int ind = byte_index_to_index((int)i); jbyte* card_ptr = (jbyte*)buf[ind]; if (card_ptr != NULL) { // Set the entry to null, so we don't do it again (via the test // above) if we reconsider this buffer. if (consume) buf[ind] = NULL; if (!cl->do_card_ptr(card_ptr, worker_i)) return false; } } return true; }
// are compacted toward the top of the buffer. void SATBMarkQueue::filter() { G1CollectedHeap* g1h = G1CollectedHeap::heap(); void** buf = _buf; if (buf == NULL) { // nothing to do return; } // Used for sanity checking at the end of the loop. DEBUG_ONLY(size_t entries = 0; size_t retained = 0;) assert(_index <= _sz, "invariant"); void** limit = &buf[byte_index_to_index(_index)]; void** src = &buf[byte_index_to_index(_sz)]; void** dst = src; while (limit < src) { DEBUG_ONLY(entries += 1;) --src; void* entry = *src; // NULL the entry so that unused parts of the buffer contain NULLs // at the end. If we are going to retain it we will copy it to its // final place. If we have retained all entries we have visited so // far, we'll just end up copying it to the same place. *src = NULL; if (requires_marking(entry, g1h) && !g1h->isMarkedNext((oop)entry)) { --dst;