status_t rtm_realloc(void** _buffer, size_t newSize) { if (_buffer == NULL) return B_BAD_VALUE; TRACE("rtm_realloc(%p, %lu)\n", *_buffer, newSize); void* oldBuffer = *_buffer; // find pool rtm_pool* pool = pool_for(oldBuffer); if (pool == NULL) { void* buffer = realloc(oldBuffer, newSize); if (buffer != NULL) { *_buffer = buffer; return B_OK; } return B_NO_MEMORY; } MutexLocker _(&pool->lock); if (newSize == 0) { TRACE("realloc(%p, %lu) -> NULL\n", oldBuffer, newSize); pool->Free(oldBuffer); *_buffer = NULL; return B_OK; } size_t copySize = newSize; if (oldBuffer != NULL) { FreeChunk* oldChunk = FreeChunk::SetToAllocated(oldBuffer); // Check if the old buffer still fits, and if it makes sense to keep it if (oldChunk->Size() >= newSize && newSize > oldChunk->Size() / 3) { TRACE("realloc(%p, %lu) old buffer is large enough\n", oldBuffer, newSize); return B_OK; } if (copySize > oldChunk->Size()) copySize = oldChunk->Size(); } void* newBuffer = rtm_alloc(pool, newSize); if (newBuffer == NULL) return B_NO_MEMORY; if (oldBuffer != NULL) { memcpy(newBuffer, oldBuffer, copySize); pool->Free(oldBuffer); } TRACE("realloc(%p, %lu) -> %p\n", oldBuffer, newSize, newBuffer); *_buffer = newBuffer; return B_OK; }
void* rtm_alloc(rtm_pool* pool, size_t size) { if (pool == NULL) return malloc(size); if (pool->heap_base == NULL || size == 0) return NULL; MutexLocker _(&pool->lock); // align the size requirement to a kAlignment bytes boundary size = (size - 1 + kAlignment) & ~(size_t)(kAlignment - 1); if (size > pool->available) { TRACE("malloc(): Out of memory!\n"); return NULL; } FreeChunk* chunk = pool->free_anchor.Next(); FreeChunk* last = &pool->free_anchor; while (chunk && chunk->Size() < size) { last = chunk; chunk = chunk->Next(); } if (chunk == NULL) { // could not find a free chunk as large as needed TRACE("malloc(): Out of memory!\n"); return NULL; } if (chunk->Size() > size + sizeof(FreeChunk) + kAlignment) { // if this chunk is bigger than the requested size, // we split it to form two chunks (with a minimal // size of kAlignment allocatable bytes). FreeChunk* freeChunk = chunk->Split(size); last->SetNext(freeChunk); // re-enqueue the free chunk at the correct position freeChunk->Remove(pool, last); freeChunk->Enqueue(pool); } else { // remove the chunk from the free list last->SetNext(chunk->Next()); } pool->available -= size + sizeof(uint32); TRACE("malloc(%lu) -> %p\n", size, chunk->AllocatedAddress()); return chunk->AllocatedAddress(); }
status_t rtm_phys_size_for(void* buffer) { if (buffer == NULL) return 0; FreeChunk* chunk = FreeChunk::SetToAllocated(buffer); return chunk->Size(); }
status_t rtm_size_for(void* buffer) { if (buffer == NULL) return 0; FreeChunk* chunk = FreeChunk::SetToAllocated(buffer); // TODO: we currently always return the actual chunk size, not the allocated // one return chunk->Size(); }
void FreeChunk::Enqueue(rtm_pool* pool) { FreeChunk* chunk = pool->free_anchor.fNext; FreeChunk* last = &pool->free_anchor; while (chunk && chunk->Size() < fSize) { last = chunk; chunk = chunk->fNext; } fNext = chunk; last->fNext = this; }