BackRefIdx BackRefIdx::newBackRef(bool largeObj) { BackRefBlock *blockToUse; void **toUse; BackRefIdx res; bool lastBlockFirstUsed = false; do { MALLOC_ASSERT(backRefMaster, ASSERT_TEXT); blockToUse = backRefMaster->findFreeBlock(); if (!blockToUse) return BackRefIdx(); toUse = NULL; { // the block is locked to find a reference MallocMutex::scoped_lock lock(blockToUse->blockMutex); if (blockToUse->freeList) { toUse = (void**)blockToUse->freeList; blockToUse->freeList = blockToUse->freeList->next; MALLOC_ASSERT(!blockToUse->freeList || ((uintptr_t)blockToUse->freeList>=(uintptr_t)blockToUse && (uintptr_t)blockToUse->freeList < (uintptr_t)blockToUse + slabSize), ASSERT_TEXT); } else if (blockToUse->allocatedCount < BR_MAX_CNT) { toUse = (void**)blockToUse->bumpPtr; blockToUse->bumpPtr = (FreeObject*)((uintptr_t)blockToUse->bumpPtr - sizeof(void*)); if (blockToUse->allocatedCount == BR_MAX_CNT-1) { MALLOC_ASSERT((uintptr_t)blockToUse->bumpPtr < (uintptr_t)blockToUse+sizeof(BackRefBlock), ASSERT_TEXT); blockToUse->bumpPtr = NULL; } } if (toUse) { if (!blockToUse->allocatedCount && !backRefMaster->listForUse) lastBlockFirstUsed = true; blockToUse->allocatedCount++; } } // end of lock scope } while (!toUse); // The first thread that uses the last block requests new space in advance; // possible failures are ignored. if (lastBlockFirstUsed) backRefMaster->requestNewSpace(); res.master = blockToUse->myNum; uintptr_t offset = ((uintptr_t)toUse - ((uintptr_t)blockToUse + sizeof(BackRefBlock)))/sizeof(void*); // Is offset too big? MALLOC_ASSERT(!(offset >> 15), ASSERT_TEXT); res.offset = offset; if (largeObj) res.largeObj = largeObj; return res; }
void ExtMemoryPool::freeLargeObject(void *object) { LargeObjectHdr *header = (LargeObjectHdr*)object - 1; // overwrite backRefIdx to simplify double free detection header->backRefIdx = BackRefIdx(); if (!loc.put(this, header->memoryBlock)) { removeBackRef(header->memoryBlock->backRefIdx); backend.putLargeBlock(header->memoryBlock); STAT_increment(getThreadId(), ThreadCommonCounters, freeLargeObj); } }
BackRefIdx BackRefIdx::newBackRef(bool largeObj) { BackRefBlock *blockToUse; void **toUse; BackRefIdx res; do { { // global lock taken to find a block MallocMutex::scoped_lock lock(backRefMutex); MALLOC_ASSERT(backRefMaster, ASSERT_TEXT); if (! (blockToUse = backRefMaster->findFreeBlock())) return BackRefIdx(); } toUse = NULL; { // the block is locked to find a reference MallocMutex::scoped_lock lock(blockToUse->blockMutex); if (blockToUse->freeList) { toUse = (void**)blockToUse->freeList; blockToUse->freeList = blockToUse->freeList->next; MALLOC_ASSERT(!blockToUse->freeList || ((uintptr_t)blockToUse->freeList>=(uintptr_t)blockToUse && (uintptr_t)blockToUse->freeList < (uintptr_t)blockToUse + blockSize), ASSERT_TEXT); } else if (blockToUse->allocatedCount < BR_MAX_CNT) { toUse = (void**)blockToUse->bumpPtr; blockToUse->bumpPtr = (FreeObject*)((uintptr_t)blockToUse->bumpPtr - sizeof(void*)); if (blockToUse->allocatedCount == BR_MAX_CNT-1) { MALLOC_ASSERT((uintptr_t)blockToUse->bumpPtr < (uintptr_t)blockToUse+sizeof(BackRefBlock), ASSERT_TEXT); blockToUse->bumpPtr = NULL; } } if (toUse) blockToUse->allocatedCount++; } // end of lock scope } while (!toUse); res.master = blockToUse->myNum; uintptr_t offset = ((uintptr_t)toUse - ((uintptr_t)blockToUse + sizeof(BackRefBlock)))/sizeof(void*); // Is offset too big? MALLOC_ASSERT(!(offset >> 15), ASSERT_TEXT); res.offset = offset; if (largeObj) res.largeObj = largeObj; return res; }