void LargeAllocation::lastChanceToFinalize() { m_weakSet.lastChanceToFinalize(); clearMarked(); clearNewlyAllocated(); sweep(); }
void MarkedBlock::lastChanceToFinalize() { m_weakSet.lastChanceToFinalize(); clearNewlyAllocated(); clearMarksWithCollectionType<FullCollection>(); sweep(); }
void MarkedBlock::canonicalizeCellLivenessData(const FreeList& freeList) { HEAP_LOG_BLOCK_STATE_TRANSITION(this); FreeCell* head = freeList.head; if (m_state == Marked) { // If the block is in the Marked state then we know that: // 1) It was not used for allocation during the previous allocation cycle. // 2) It may have dead objects, and we only know them to be dead by the // fact that their mark bits are unset. // Hence if the block is Marked we need to leave it Marked. ASSERT(!head); return; } ASSERT(m_state == FreeListed); // Roll back to a coherent state for Heap introspection. Cells newly // allocated from our free list are not currently marked, so we need another // way to tell what's live vs dead. ASSERT(!m_newlyAllocated); m_newlyAllocated = adoptPtr(new WTF::Bitmap<atomsPerBlock>()); SetNewlyAllocatedFunctor functor(this); forEachCell(functor); FreeCell* next; for (FreeCell* current = head; current; current = next) { next = current->next; reinterpret_cast<JSCell*>(current)->zap(); clearNewlyAllocated(current); } m_state = Marked; }