char * LargeHeapBucket::SnailAlloc(Recycler * recycler, size_t sizeCat, size_t size, ObjectInfoBits attributes, bool nothrow) { char * memBlock; Assert((attributes & InternalObjectInfoBitMask) == attributes); // No free memory, try to collect with allocated bytes and time heuristic, and concurrently #if ENABLE_CONCURRENT_GC BOOL collected = recycler->disableCollectOnAllocationHeuristics ? recycler->FinishConcurrent<FinishConcurrentOnAllocation>() : recycler->CollectNow<CollectOnAllocation>(); #else BOOL collected = recycler->disableCollectOnAllocationHeuristics ? FALSE : recycler->CollectNow<CollectOnAllocation>(); #endif if (!collected) { memBlock = TryAllocFromNewHeapBlock(recycler, sizeCat, size, attributes, nothrow); if (memBlock != nullptr) { return memBlock; } // Can't even allocate a new block, we need force a collection and // allocate some free memory, add a new heap block again, or throw out of memory AllocationVerboseTrace(recycler->GetRecyclerFlagsTable(), _u("LargeHeapBucket::AddLargeHeapBlock failed, forcing in-thread collection\n")); recycler->CollectNow<CollectNowForceInThread>(); } memBlock = TryAlloc(recycler, sizeCat, attributes); if (memBlock != nullptr) { return memBlock; } memBlock = TryAllocFromNewHeapBlock(recycler, sizeCat, size, attributes, nothrow); if (memBlock != nullptr) { return memBlock; } if (nothrow == false) { // Can't add a heap block, we are out of memory // Since nothrow is false, we can throw right here recycler->OutOfMemory(); } return nullptr; }
void SmallHeapBlockAllocator<TBlockType>::TrackNativeAllocatedObjects() { Assert(this->freeObjectList != nullptr && endAddress != nullptr); Assert(this->heapBlock != nullptr); #if defined(PROFILE_RECYCLER_ALLOC) || defined(RECYCLER_MEMORY_VERIFY) || defined(MEMSPECT_TRACKING) || defined(ETW_MEMORY_TRACKING) if (pfnTrackNativeAllocatedObjectCallBack == nullptr) { return; } if (lastNonNativeBumpAllocatedBlock == nullptr) { #ifdef RECYCLER_PAGE_HEAP Assert((char *)this->freeObjectList == this->heapBlock->GetAddress() || ((SmallHeapBlock*) this->heapBlock)->InPageHeapMode()); #else Assert((char *)this->freeObjectList == this->heapBlock->GetAddress()); #endif return; } Recycler * recycler = this->heapBlock->heapBucket->heapInfo->recycler; size_t sizeCat = this->heapBlock->heapBucket->sizeCat; char * curr = lastNonNativeBumpAllocatedBlock + sizeCat; Assert(curr <= (char *)this->freeObjectList); #if DBG_DUMP AllocationVerboseTrace(recycler->GetRecyclerFlagsTable(), _u("TrackNativeAllocatedObjects: recycler = 0x%p, sizeCat = %u, lastRuntimeAllocatedBlock = 0x%p, freeObjectList = 0x%p, nativeAllocatedObjectCount = %u\n"), recycler, sizeCat, this->lastNonNativeBumpAllocatedBlock, this->freeObjectList, ((char *)this->freeObjectList - curr) / sizeCat); #endif while (curr < (char *)this->freeObjectList) { pfnTrackNativeAllocatedObjectCallBack(recycler, curr, sizeCat); curr += sizeCat; } #elif defined(RECYCLER_PERF_COUNTERS) if (lastNonNativeBumpAllocatedBlock == nullptr) { return; } size_t sizeCat = this->heapBlock->heapBucket->sizeCat; char * curr = lastNonNativeBumpAllocatedBlock + sizeCat; Assert(curr <= (char *)this->freeObjectList); size_t byteCount = ((char *)this->freeObjectList - curr); #if DBG_DUMP AllocationVerboseTrace(_u("TrackNativeAllocatedObjects: recycler = 0x%p, sizeCat = %u, lastRuntimeAllocatedBlock = 0x%p, freeObjectList = 0x%p, nativeAllocatedObjectCount = %u\n"), recycler, sizeCat, this->lastNonNativeBumpAllocatedBlock, this->freeObjectList, ((char *)this->freeObjectList - curr) / sizeCat); #endif RECYCLER_PERF_COUNTER_ADD(LiveObject, byteCount / sizeCat); RECYCLER_PERF_COUNTER_ADD(LiveObjectSize, byteCount); RECYCLER_PERF_COUNTER_SUB(FreeObjectSize, byteCount); RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockLiveObject, byteCount / sizeCat); RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockLiveObjectSize, byteCount); RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockFreeObjectSize, byteCount); #else #error Not implemented #endif }