void* MemoryManager::mallocSmallSizeSlow(uint32_t bytes, unsigned index) { size_t nbytes = smallIndex2Size(index); static constexpr unsigned nContigTab[] = { #define SMALL_SIZE(index, lg_grp, lg_delta, ndelta, lg_delta_lookup, ncontig) \ ncontig, SMALL_SIZES #undef SMALL_SIZE }; unsigned nContig = nContigTab[index]; size_t contigMin = nContig * nbytes; unsigned contigInd = smallSize2Index(contigMin); for (unsigned i = contigInd; i < kNumSmallSizes; ++i) { FTRACE(4, "MemoryManager::mallocSmallSizeSlow({}-->{}, {}): contigMin={}, " "contigInd={}, try i={}\n", bytes, nbytes, index, contigMin, contigInd, i); void* p = m_freelists[i].maybePop(); if (p != nullptr) { FTRACE(4, "MemoryManager::mallocSmallSizeSlow({}-->{}, {}): " "contigMin={}, contigInd={}, use i={}, size={}, p={}\n", bytes, nbytes, index, contigMin, contigInd, i, smallIndex2Size(i), p); // Split tail into preallocations and store them back into freelists. uint32_t availBytes = smallIndex2Size(i); uint32_t tailBytes = availBytes - nbytes; if (tailBytes > 0) { void* tail = (void*)(uintptr_t(p) + nbytes); splitTail(tail, tailBytes, nContig - 1, nbytes, index); } return p; } } // No available free list items; carve new space from the current slab. return slabAlloc(bytes, index); }
NEVER_INLINE void* MemoryManager::smartMallocSlab(size_t padbytes) { SmallNode* n = (SmallNode*) slabAlloc(padbytes); n->padbytes = padbytes; FTRACE(1, "smartMallocSlab: {} -> {}\n", padbytes, static_cast<void*>(n + 1)); return n + 1; }