void _Heap_Get_free_information( Heap_Control *the_heap, Heap_Information *info ) { Heap_Block *the_block; Heap_Block *const tail = _Heap_Free_list_tail(the_heap); info->number = 0; info->largest = 0; info->total = 0; for(the_block = _Heap_Free_list_first(the_heap); the_block != tail; the_block = the_block->next) { uint32_t const the_size = _Heap_Block_size(the_block); /* As we always coalesce free blocks, prev block must have been used. */ _HAssert(_Heap_Is_prev_used(the_block)); info->number++; info->total += the_size; if ( info->largest < the_size ) info->largest = the_size; } }
Heap_Block *_Heap_Greedy_allocate( Heap_Control *heap, const uintptr_t *block_sizes, size_t block_count ) { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); Heap_Block *allocated_blocks = NULL; Heap_Block *blocks = NULL; Heap_Block *current; size_t i; _Heap_Protection_free_all_delayed_blocks( heap ); for (i = 0; i < block_count; ++i) { void *next = _Heap_Allocate( heap, block_sizes [i] ); if ( next != NULL ) { Heap_Block *next_block = _Heap_Block_of_alloc_area( (uintptr_t) next, heap->page_size ); next_block->next = allocated_blocks; allocated_blocks = next_block; } } while ( (current = _Heap_Free_list_first( heap )) != free_list_tail ) { _Heap_Block_allocate( heap, current, _Heap_Alloc_area_of_block( current ), _Heap_Block_size( current ) - HEAP_BLOCK_HEADER_SIZE ); current->next = blocks; blocks = current; } while ( allocated_blocks != NULL ) { current = allocated_blocks; allocated_blocks = allocated_blocks->next; _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( current ) ); } return blocks; }
static bool _Heap_Walk_is_in_free_list( Heap_Control *heap, Heap_Block *block ) { const Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); const Heap_Block *free_block = _Heap_Free_list_first( heap ); while ( free_block != free_list_tail ) { if ( free_block == block ) { return true; } free_block = free_block->next; } return false; }
static void _Heap_Free_block( Heap_Control *heap, Heap_Block *block ) { Heap_Statistics *const stats = &heap->stats; Heap_Block *first_free; /* Statistics */ ++stats->used_blocks; --stats->frees; /* * The _Heap_Free() will place the block to the head of free list. We want * the new block at the end of the free list. So that initial and earlier * areas are consumed first. */ _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( block ) ); _Heap_Protection_free_all_delayed_blocks( heap ); first_free = _Heap_Free_list_first( heap ); _Heap_Free_list_remove( first_free ); _Heap_Free_list_insert_before( _Heap_Free_list_tail( heap ), first_free ); }
void *_Heap_Allocate_aligned_with_boundary( Heap_Control *heap, uintptr_t alloc_size, uintptr_t alignment, uintptr_t boundary ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const block_size_floor = alloc_size + HEAP_BLOCK_HEADER_SIZE - HEAP_ALLOC_BONUS; uintptr_t const page_size = heap->page_size; Heap_Block *block = NULL; uintptr_t alloc_begin = 0; uint32_t search_count = 0; bool search_again = false; if ( block_size_floor < alloc_size ) { /* Integer overflow occured */ return NULL; } if ( boundary != 0 ) { if ( boundary < alloc_size ) { return NULL; } if ( alignment == 0 ) { alignment = page_size; } } do { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); block = _Heap_Free_list_first( heap ); while ( block != free_list_tail ) { _HAssert( _Heap_Is_prev_used( block ) ); _Heap_Protection_block_check( heap, block ); /* * The HEAP_PREV_BLOCK_USED flag is always set in the block size_and_flag * field. Thus the value is about one unit larger than the real block * size. The greater than operator takes this into account. */ if ( block->size_and_flag > block_size_floor ) { if ( alignment == 0 ) { alloc_begin = _Heap_Alloc_area_of_block( block ); } else { alloc_begin = _Heap_Check_block( heap, block, alloc_size, alignment, boundary ); } } /* Statistics */ ++search_count; if ( alloc_begin != 0 ) { break; } block = block->next; } search_again = _Heap_Protection_free_delayed_blocks( heap, alloc_begin ); } while ( search_again ); if ( alloc_begin != 0 ) { /* Statistics */ ++stats->allocs; stats->searches += search_count; block = _Heap_Block_allocate( heap, block, alloc_begin, alloc_size ); _Heap_Check_allocation( heap, block, alloc_begin, alloc_size, alignment, boundary ); } /* Statistics */ if ( stats->max_search < search_count ) { stats->max_search = search_count; } return (void *) alloc_begin; }
static bool _Heap_Walk_check_free_list( int source, Heap_Walk_printer printer, Heap_Control *heap ) { uintptr_t const page_size = heap->page_size; const Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); const Heap_Block *const first_free_block = _Heap_Free_list_first( heap ); const Heap_Block *prev_block = free_list_tail; const Heap_Block *free_block = first_free_block; while ( free_block != free_list_tail ) { if ( !_Heap_Is_block_in_heap( heap, free_block ) ) { (*printer)( source, true, "free block 0x%08x: not in heap\n", free_block ); return false; } if ( !_Heap_Is_aligned( _Heap_Alloc_area_of_block( free_block ), page_size ) ) { (*printer)( source, true, "free block 0x%08x: alloc area not page aligned\n", free_block ); return false; } if ( _Heap_Is_used( free_block ) ) { (*printer)( source, true, "free block 0x%08x: is used\n", free_block ); return false; } if ( free_block->prev != prev_block ) { (*printer)( source, true, "free block 0x%08x: invalid previous block 0x%08x\n", free_block, free_block->prev ); return false; } prev_block = free_block; free_block = free_block->next; } return true; }
static bool _Heap_Walk_check_free_block( int source, Heap_Walk_printer printer, Heap_Control *heap, Heap_Block *block ) { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); Heap_Block *const free_list_head = _Heap_Free_list_head( heap ); Heap_Block *const first_free_block = _Heap_Free_list_first( heap ); Heap_Block *const last_free_block = _Heap_Free_list_last( heap ); bool const prev_used = _Heap_Is_prev_used( block ); uintptr_t const block_size = _Heap_Block_size( block ); Heap_Block *const next_block = _Heap_Block_at( block, block_size ); (*printer)( source, false, "block 0x%08x: size %u, prev 0x%08x%s, next 0x%08x%s\n", block, block_size, block->prev, block->prev == first_free_block ? " (= first free)" : (block->prev == free_list_head ? " (= head)" : ""), block->next, block->next == last_free_block ? " (= last free)" : (block->next == free_list_tail ? " (= tail)" : "") ); if ( block_size != next_block->prev_size ) { (*printer)( source, true, "block 0x%08x: size %u != size %u (in next block 0x%08x)\n", block, block_size, next_block->prev_size, next_block ); return false; } if ( !prev_used ) { (*printer)( source, true, "block 0x%08x: two consecutive blocks are free\n", block ); return false; } if ( !_Heap_Walk_is_in_free_list( heap, block ) ) { (*printer)( source, true, "block 0x%08x: free block not in free list\n", block ); return false; } return true; }
uintptr_t _Heap_Initialize( Heap_Control *heap, void *heap_area_begin_ptr, uintptr_t heap_area_size, uintptr_t page_size ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const heap_area_begin = (uintptr_t) heap_area_begin_ptr; uintptr_t const heap_area_end = heap_area_begin + heap_area_size; uintptr_t alloc_area_begin = heap_area_begin + HEAP_BLOCK_HEADER_SIZE; uintptr_t alloc_area_size = 0; uintptr_t first_block_begin = 0; uintptr_t first_block_size = 0; uintptr_t min_block_size = 0; uintptr_t overhead = 0; Heap_Block *first_block = NULL; Heap_Block *last_block = NULL; if ( page_size == 0 ) { page_size = CPU_ALIGNMENT; } else { page_size = _Heap_Align_up( page_size, CPU_ALIGNMENT ); if ( page_size < CPU_ALIGNMENT ) { /* Integer overflow */ return 0; } } min_block_size = _Heap_Align_up( sizeof( Heap_Block ), page_size ); alloc_area_begin = _Heap_Align_up( alloc_area_begin, page_size ); first_block_begin = alloc_area_begin - HEAP_BLOCK_HEADER_SIZE; overhead = HEAP_BLOCK_HEADER_SIZE + (first_block_begin - heap_area_begin); first_block_size = heap_area_size - overhead; first_block_size = _Heap_Align_down ( first_block_size, page_size ); alloc_area_size = first_block_size - HEAP_BLOCK_HEADER_SIZE; if ( heap_area_end < heap_area_begin || heap_area_size <= overhead || first_block_size < min_block_size ) { /* Invalid area or area too small */ return 0; } /* First block */ first_block = (Heap_Block *) first_block_begin; first_block->prev_size = page_size; first_block->size_and_flag = first_block_size | HEAP_PREV_BLOCK_USED; first_block->next = _Heap_Free_list_tail( heap ); first_block->prev = _Heap_Free_list_head( heap ); /* * Last block. * * The next block of the last block is the first block. Since the first * block indicates that the previous block is used, this ensures that the * last block appears as used for the _Heap_Is_used() and _Heap_Is_free() * functions. */ last_block = _Heap_Block_at( first_block, first_block_size ); last_block->prev_size = first_block_size; last_block->size_and_flag = first_block_begin - (uintptr_t) last_block; /* Heap control */ heap->page_size = page_size; heap->min_block_size = min_block_size; heap->area_begin = heap_area_begin; heap->area_end = heap_area_end; heap->first_block = first_block; heap->last_block = last_block; _Heap_Free_list_head( heap )->next = first_block; _Heap_Free_list_tail( heap )->prev = first_block; /* Statistics */ stats->size = first_block_size; stats->free_size = first_block_size; stats->min_free_size = first_block_size; stats->free_blocks = 1; stats->max_free_blocks = 1; stats->used_blocks = 0; stats->max_search = 0; stats->allocs = 0; stats->searches = 0; stats->frees = 0; stats->resizes = 0; stats->instance = instance++; _HAssert( _Heap_Is_aligned( heap->page_size, CPU_ALIGNMENT ) ); _HAssert( _Heap_Is_aligned( heap->min_block_size, page_size ) ); _HAssert( _Heap_Is_aligned( _Heap_Alloc_area_of_block( first_block ), page_size ) ); _HAssert( _Heap_Is_aligned( _Heap_Alloc_area_of_block( last_block ), page_size ) ); return alloc_area_size; }