void GCLargeAlloc::Finalize() { m_startedFinalize = true; LargeBlock **prev = &m_blocks; while (*prev) { LargeBlock *b = *prev; if ((b->flags[0] & kMark) == 0) { GCAssert((b->flags[0] & kQueued) == 0); GC* gc = b->gc; // GC::Finalize calls GC::MarkOrClearWeakRefs before calling GCAlloc::Finalize, // ergo there should be no unmarked objects with weak refs. GCAssertMsg((b->flags[0] & kHasWeakRef) == 0, "No unmarked object should have a weak ref at this point"); // Large blocks may be allocated by finalizers for large blocks, creating contention // for the block list. Yet the block list must be live, since eg GetUsageInfo may be // called by the finalizers (or their callees). // // Unlink the block from the list early to avoid contention. *prev = Next(b); b->next = NULL; void *item = b+1; if (b->flags[0] & kFinalizable) { GCFinalizedObject *obj = (GCFinalizedObject *) item; obj = (GCFinalizedObject *) GetUserPointer(obj); obj->~GCFinalizedObject(); #if defined(_DEBUG) if(b->rcobject) { gc->RCObjectZeroCheck((RCObject*)obj); } #endif } // GC::GetWeakRef will not allow a weak reference to be created to an object that // is ready for destruction. GCAssertMsg((b->flags[0] & kHasWeakRef) == 0, "No unmarked object should have a weak ref at this point"); #ifdef MMGC_HOOKS if(m_gc->heap->HooksEnabled()) { #ifdef MMGC_MEMORY_PROFILER if(GCHeap::GetGCHeap()->GetProfiler()) m_totalAskSize -= GCHeap::GetGCHeap()->GetProfiler()->GetAskSize(GetUserPointer(item)); #endif m_gc->heap->FinalizeHook(GetUserPointer(item), b->size - DebugSize()); } #endif // The block is not empty until now, so now add it. gc->AddToLargeEmptyBlockList(b); continue; } // clear marks b->flags[0] &= ~(kMark|kQueued); prev = (LargeBlock**)(&b->next); } m_startedFinalize = false; }