void* MarkedAllocator::allocateSlowCase(size_t bytes) { ASSERT(m_heap->vm()->currentThreadIsHoldingAPILock()); doTestCollectionsIfNeeded(); ASSERT(!m_markedSpace->isIterating()); ASSERT(!m_freeList.head); m_heap->didAllocate(m_freeList.bytes); void* result = tryAllocate(bytes); if (LIKELY(result != 0)) return result; if (m_heap->collectIfNecessaryOrDefer()) { result = tryAllocate(bytes); if (result) return result; } ASSERT(!m_heap->shouldCollect()); MarkedBlock* block = allocateBlock(bytes); ASSERT(block); addBlock(block); result = tryAllocate(bytes); ASSERT(result); return result; }
void* MarkedAllocator::allocateSlowCaseImpl(GCDeferralContext* deferralContext, bool crashOnFailure) { SuperSamplerScope superSamplerScope(false); ASSERT(m_heap->vm()->currentThreadIsHoldingAPILock()); doTestCollectionsIfNeeded(deferralContext); ASSERT(!m_markedSpace->isIterating()); m_heap->didAllocate(m_freeList.originalSize); didConsumeFreeList(); AllocatingScope healpingHeap(*m_heap); m_heap->collectIfNecessaryOrDefer(deferralContext); void* result = tryAllocateWithoutCollecting(); if (LIKELY(result != 0)) return result; MarkedBlock::Handle* block = tryAllocateBlock(); if (!block) { if (crashOnFailure) RELEASE_ASSERT_NOT_REACHED(); else return nullptr; } addBlock(block); result = allocateIn(block); ASSERT(result); return result; }