void FixedAlloc::Destroy() { // Free all of the blocks while (m_firstBlock) { #ifdef MMGC_MEMORY_PROFILER if(m_firstBlock->numAlloc > 0 && m_heap->GetStatus() != kMemAbort) { union { char* mem_c; uint32_t* mem; }; mem_c = m_firstBlock->items; unsigned int itemNum = 0; while(itemNum++ < m_itemsPerBlock) { if(IsInUse(m_firstBlock, mem)) { GCLog("Leaked %d byte item. Addr: 0x%p\n", GetItemSize(), GetUserPointer(mem)); PrintAllocStackTrace(GetUserPointer(mem)); } mem_c += m_itemSize; } } #ifdef MMGC_MEMORY_INFO //check for writes on deleted memory VerifyFreeBlockIntegrity(m_firstBlock->firstFree, m_firstBlock->size); #endif #endif FreeChunk(m_firstBlock); } m_firstBlock = NULL; }
FixedAlloc::~FixedAlloc() { // Free all of the blocks while (m_firstBlock) { #ifdef MEMORY_INFO if(m_firstBlock->numAlloc > 0) { // go through every memory location, if the fourth 4 bytes cast as // an integer isn't 0xedededed then its allocated space and the integer is // an index into the stack trace table, the first 4 bytes will contain // the freelist pointer for free'd items (which is why the trace index is // stored in the second 4) // first 4 bytes - free list pointer // 2nd 4 bytes - alloc stack trace // 3rd 4 bytes - free stack trace // 4th 4 bytes - 0xedededed if freed correctly unsigned int *mem = (unsigned int*) m_firstBlock->items; unsigned int itemNum = 0; while(itemNum++ < m_itemsPerBlock) { unsigned int fourthInt = *(mem+3); if(fourthInt != 0xedededed) { GCDebugMsg(false, "Leaked %d byte item. Addr: 0x%x\n", GetItemSize(), mem+2); PrintStackTraceByIndex(*(mem+1)); } mem += (m_itemSize / sizeof(int)); } GCAssert(false); } // go through every item on the free list and make sure it wasn't written to // after being poisoned. void *item = m_firstBlock->firstFree; while(item) { #ifdef MMGC_64BIT for(int i=3, n=(m_firstBlock->size>>2)-3; i<n; i++) #else for(int i=3, n=(m_firstBlock->size>>2)-1; i<n; i++) #endif { unsigned int data = ((int*)item)[i]; if(data != 0xedededed) { GCDebugMsg(false, "Object 0x%x was written to after it was deleted, allocation trace:"); PrintStackTrace((int*)item+2); GCDebugMsg(false, "Deletion trace:"); PrintStackTrace((int*)item+3); GCDebugMsg(true, "Deleted item write violation!"); } } // next free item item = *((void**)item); } #endif FreeChunk(m_firstBlock); } }
void VDChunkedBuffer::UnlockRead(uint32 size) { if (!size) return; ChunkInfo& ci = mActiveChunks.front(); mChunkTail += size; if (mChunkTail >= ci.mChunkSize) { mChunkTail = 0; FreeChunk(); } }
/** * Frees allocation associated with passed in pointer. * * @param Pointer Pointer to free. */ void FBestFitAllocator::Free( void* Pointer ) { SCOPE_SECONDS_COUNTER(TimeSpentInAllocator); // Look up pointer in TMap. FMemoryChunk* MatchingChunk = PointerToChunkMap.FindRef( (PTRINT) Pointer ); check( MatchingChunk ); // Remove the entry PointerToChunkMap.Remove((PTRINT) Pointer); // Update usage stats in a thread safe way. appInterlockedAdd( &AllocatedMemorySize, -MatchingChunk->Size ); appInterlockedAdd( &AvailableMemorySize, +MatchingChunk->Size ); // Free the chunk. FreeChunk(MatchingChunk); }
GCAlloc::~GCAlloc() { CoalesceQuickList(); // Free all of the blocks GCAssertMsg(GetNumAlloc() == 0, "You have leaks"); while (m_firstBlock) { #ifdef MMGC_MEMORY_INFO //check where any item within this block wasn't written to after being poisoned VerifyFreeBlockIntegrity(m_firstBlock->firstFree, m_firstBlock->size); #endif //MMGC_MEMORY_INFO GCBlock *b = m_firstBlock; UnlinkChunk(b); FreeChunk(b); } }
bool GCAlloc::Sweep(GCBlock *b) { GCAssert(b->needsSweeping); RemoveFromSweepList(b); SweepGuts(b); if(b->numItems == 0) { UnlinkChunk(b); FreeChunk(b); return true; } AddToFreeList(b); return false; }
void FixedAlloc::Destroy() { // Free all of the blocks while (m_firstBlock) { #ifdef MMGC_MEMORY_PROFILER if(m_firstBlock->numAlloc > 0 && m_heap->GetStatus() != kMemAbort) { union { char* mem_c; uint32_t* mem; }; mem_c = m_firstBlock->items; unsigned int itemNum = 0; while(itemNum++ < m_itemsPerBlock) { if(IsInUse(m_firstBlock, mem)) { // supress output in release build UNLESS the profiler is on #ifndef GCDEBUG if(m_heap->GetProfiler() != NULL) #endif { GCLog("Leaked %d byte item. Addr: 0x%p\n", GetItemSize(), GetUserPointer(mem)); PrintAllocStackTrace(GetUserPointer(mem)); } } mem_c += m_itemSize; } } #ifdef MMGC_MEMORY_INFO //check for writes on deleted memory VerifyFreeBlockIntegrity(m_firstBlock->firstFree, m_firstBlock->size); #endif #endif // Note, don't cache any state across this call; FreeChunk may temporarily // release locks held if the true type of this allocator is FixedAllocSafe. FreeChunk(m_firstBlock); } m_firstBlock = NULL; }
void SampleCache::Precache( const std::string & file_name ) { Mix_Chunk * precache = LoadSound( file_name ); FreeChunk( precache ); // this does not remove the sample from cache }
/** * Defragment the memory. Memory is moved around using the specified policy. * The function tries to perform non-overlapping memory transfers as much as possible. */ void FBestFitAllocator::DefragmentMemory( FDefragmentationPolicy &Policy ) { #if !FINAL_RELEASE || FINAL_RELEASE_DEBUGCONSOLE DOUBLE StartTime = appSeconds(); INT NumHolesBefore = 0; INT NumHolesAfter = 0; INT LargestHoleBefore = GetLargestAvailableAllocation(&NumHolesBefore); INT LargestHoleAfter = 0; #endif INT TotalRelocationSize = 0; INT NumRelocations = 0; // Find the first free chunk. FMemoryChunk* AvailableChunk = FirstChunk; while ( AvailableChunk && !AvailableChunk->bIsAvailable ) { AvailableChunk = AvailableChunk->NextChunk; } // Process the next used chunk. FMemoryChunk* Chunk = AvailableChunk ? AvailableChunk->NextChunk : NULL; // Relocate all subsequent used chunks to the beginning of the free chunk. while ( Chunk ) { FMemoryChunk* BestChunk = AvailableChunk; #if MINIMIZE_NUMBER_OF_OVERLAPPING_RELOCATIONS // Would this be an overlapped memory-move? if ( Chunk->Size > AvailableChunk->Size ) { // Try to move it out of the way (to the last possible free chunk)! FMemoryChunk* AnotherAvailableChunk = FirstFreeChunk; while ( AnotherAvailableChunk ) { if ( AnotherAvailableChunk->Size >= Chunk->Size && AnotherAvailableChunk->Base > BestChunk->Base ) { BestChunk = AnotherAvailableChunk; } AnotherAvailableChunk = AnotherAvailableChunk->NextFreeChunk; } } #endif UBOOL bCouldRelocate = Policy.Relocate(BestChunk->Base, Chunk->Base, Chunk->Size); if ( bCouldRelocate ) { NumRelocations++; TotalRelocationSize += Chunk->Size; // Update our book-keeping. PointerToChunkMap.Remove((PTRINT) Chunk->Base); PointerToChunkMap.Set((PTRINT) BestChunk->Base, BestChunk); BestChunk->UnlinkFree(); // Mark as being in use. if ( BestChunk->Size > Chunk->Size ) { Split(BestChunk, Chunk->Size); // Split to create a new free chunk } else if ( BestChunk->Size < Chunk->Size ) { // Overlapping relocation. We're just "sliding" memory down one step. check( Chunk->PreviousChunk == BestChunk ); INT HoleSize = BestChunk->Size; BestChunk->Size = Chunk->Size; Chunk->Base = BestChunk->Base + BestChunk->Size; Chunk->Size = HoleSize; } // Free this chunk and Coalesce. FreeChunk(Chunk); } else { // Got a non-relocatable used chunk. Try relocate as many others into the free chunk as we can and move on. FMemoryChunk* AnotherUsedChunk = Chunk->NextChunk; while ( AnotherUsedChunk && AnotherUsedChunk->bIsAvailable ) { AnotherUsedChunk = AnotherUsedChunk->NextChunk; } while ( AnotherUsedChunk && AvailableChunk && AvailableChunk->bIsAvailable ) { // Find the next used chunk now, before we free the current one. FMemoryChunk* NextUsedChunk = AnotherUsedChunk->NextChunk; while ( NextUsedChunk && NextUsedChunk->bIsAvailable ) { NextUsedChunk = NextUsedChunk->NextChunk; } if ( AnotherUsedChunk->Size <= AvailableChunk->Size ) { if ( Policy.Relocate(AvailableChunk->Base, AnotherUsedChunk->Base, AnotherUsedChunk->Size) ) { NumRelocations++; TotalRelocationSize += AnotherUsedChunk->Size; // Update our book-keeping. PointerToChunkMap.Remove((PTRINT) AnotherUsedChunk->Base); PointerToChunkMap.Set((PTRINT) AvailableChunk->Base, AvailableChunk); AvailableChunk->UnlinkFree(); // Mark as being in use. if ( AvailableChunk->Size > AnotherUsedChunk->Size ) { Split(AvailableChunk, AnotherUsedChunk->Size); // Split to create a new free chunk AvailableChunk = AvailableChunk->NextChunk; } else { check( AvailableChunk->Size == AnotherUsedChunk->Size ); } FreeChunk(AnotherUsedChunk); } } AnotherUsedChunk = NextUsedChunk; } // AvailableChunk is now filled up as much as possible. Skip it. AvailableChunk = AvailableChunk ? AvailableChunk->NextChunk : NULL; } // If we used up our current free chunk, find the next one. while ( AvailableChunk && !AvailableChunk->bIsAvailable ) { AvailableChunk = AvailableChunk->NextChunk; } // Process the next used chunk. Chunk = AvailableChunk ? AvailableChunk->NextChunk : NULL; } #if !FINAL_RELEASE DOUBLE Duration = appSeconds() - StartTime; LargestHoleAfter = GetLargestAvailableAllocation( &NumHolesAfter ); debugf( TEXT("DEFRAG: %.1f ms, Available: %.3f MB, NumRelocations: %d, Relocated: %.3f MB, NumHolesBefore: %d, NumHolesAfter: %d, LargestHoleBefore: %.3f MB, LargestHoleAfter: %.3f MB"), Duration*1000.0, AvailableMemorySize/1024.0f/1024.0f, NumRelocations, FLOAT(TotalRelocationSize)/1024.0f/1024.0f, NumHolesBefore, NumHolesAfter, FLOAT(LargestHoleBefore)/1024.f/1024.0f, FLOAT(LargestHoleAfter)/1024.0f/1024.0f ); #endif }
/** * Tries to reallocate texture memory in-place (without relocating), * by adjusting the base address of the allocation but keeping the end address the same. * * @param OldBaseAddress Pointer to the original allocation * @param NewBaseAddress New desired baseaddress for the allocation (adjusting the size so the end stays the same) * @returns TRUE if it succeeded **/ UBOOL FBestFitAllocator::Reallocate( void* OldBaseAddress, void* NewBaseAddress ) { SCOPE_SECONDS_COUNTER(TimeSpentInAllocator); // Look up pointer in TMap. FMemoryChunk* MatchingChunk = PointerToChunkMap.FindRef( PTRINT(OldBaseAddress) ); check( MatchingChunk && PTRINT(OldBaseAddress) == PTRINT(MatchingChunk->Base) ); INT MemoryAdjustment = Abs<INT>(PTRINT(NewBaseAddress) - PTRINT(OldBaseAddress)); // Are we growing the allocation? if ( PTRINT(NewBaseAddress) < PTRINT(OldBaseAddress) ) { // Is there enough free memory immediately before this chunk? FMemoryChunk* PrevChunk = MatchingChunk->PreviousChunk; if ( PrevChunk && PrevChunk->bIsAvailable && PrevChunk->Size >= MemoryAdjustment ) { PointerToChunkMap.Remove( PTRINT(OldBaseAddress) ); // Shrink the previous and grow the current chunk. PrevChunk->Size -= MemoryAdjustment; MatchingChunk->Base -= MemoryAdjustment; MatchingChunk->Size += MemoryAdjustment; check(PTRINT(NewBaseAddress) == PTRINT(MatchingChunk->Base)); PointerToChunkMap.Set( PTRINT(NewBaseAddress), MatchingChunk ); if ( PrevChunk->Size == 0 ) { delete PrevChunk; } // Update usage stats in a thread safe way. appInterlockedAdd( &AllocatedMemorySize, +MemoryAdjustment ); appInterlockedAdd( &AvailableMemorySize, -MemoryAdjustment ); return TRUE; } } else { // We're shrinking the allocation. check( MemoryAdjustment <= MatchingChunk->Size ); FMemoryChunk* PrevChunk = MatchingChunk->PreviousChunk; if ( PrevChunk ) { // Shrink the current chunk. MatchingChunk->Base += MemoryAdjustment; MatchingChunk->Size -= MemoryAdjustment; // Grow the previous chunk. INT OriginalPrevSize = PrevChunk->Size; PrevChunk->Size += MemoryAdjustment; // If the previous chunk was "in use", split it and insert a 2nd free chunk. if ( !PrevChunk->bIsAvailable ) { Split( PrevChunk, OriginalPrevSize ); } } else { // This was the first chunk, split it. Split( MatchingChunk, MemoryAdjustment ); // We're going to use the new chunk. Mark it as "used memory". MatchingChunk = MatchingChunk->NextChunk; MatchingChunk->UnlinkFree(); // Make the original chunk "free memory". FreeChunk( MatchingChunk->PreviousChunk ); } check(PTRINT(NewBaseAddress) == PTRINT(MatchingChunk->Base)); PointerToChunkMap.Remove( PTRINT(OldBaseAddress) ); PointerToChunkMap.Set( PTRINT(NewBaseAddress), MatchingChunk ); // Update usage stats in a thread safe way. appInterlockedAdd( &AllocatedMemorySize, -MemoryAdjustment ); appInterlockedAdd( &AvailableMemorySize, +MemoryAdjustment ); return TRUE; } return FALSE; }
ChunkAlloc::~ChunkAlloc() { // Free all of the blocks while ( firstBlock ) FreeChunk(firstBlock); }