Heap_Resize_status _Heap_Resize_block( Heap_Control *heap, void *alloc_begin_ptr, uintptr_t new_alloc_size, uintptr_t *old_size, uintptr_t *new_size ) { uintptr_t const page_size = heap->page_size; uintptr_t const alloc_begin = (uintptr_t) alloc_begin_ptr; Heap_Block *const block = _Heap_Block_of_alloc_area( alloc_begin, page_size ); *old_size = 0; *new_size = 0; if ( _Heap_Is_block_in_heap( heap, block ) ) { _Heap_Protection_block_check( heap, block ); return _Heap_Resize_block_checked( heap, block, alloc_begin, new_alloc_size, old_size, new_size ); } return HEAP_RESIZE_FATAL_ERROR; }
void *_Heap_Allocate_aligned_with_boundary( Heap_Control *heap, uintptr_t alloc_size, uintptr_t alignment, uintptr_t boundary ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const block_size_floor = alloc_size + HEAP_BLOCK_HEADER_SIZE - HEAP_ALLOC_BONUS; uintptr_t const page_size = heap->page_size; Heap_Block *block = NULL; uintptr_t alloc_begin = 0; uint32_t search_count = 0; bool search_again = false; if ( block_size_floor < alloc_size ) { /* Integer overflow occured */ return NULL; } if ( boundary != 0 ) { if ( boundary < alloc_size ) { return NULL; } if ( alignment == 0 ) { alignment = page_size; } } do { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); block = _Heap_Free_list_first( heap ); while ( block != free_list_tail ) { _HAssert( _Heap_Is_prev_used( block ) ); _Heap_Protection_block_check( heap, block ); /* * The HEAP_PREV_BLOCK_USED flag is always set in the block size_and_flag * field. Thus the value is about one unit larger than the real block * size. The greater than operator takes this into account. */ if ( block->size_and_flag > block_size_floor ) { if ( alignment == 0 ) { alloc_begin = _Heap_Alloc_area_of_block( block ); } else { alloc_begin = _Heap_Check_block( heap, block, alloc_size, alignment, boundary ); } } /* Statistics */ ++search_count; if ( alloc_begin != 0 ) { break; } block = block->next; } search_again = _Heap_Protection_free_delayed_blocks( heap, alloc_begin ); } while ( search_again ); if ( alloc_begin != 0 ) { /* Statistics */ ++stats->allocs; stats->searches += search_count; block = _Heap_Block_allocate( heap, block, alloc_begin, alloc_size ); _Heap_Check_allocation( heap, block, alloc_begin, alloc_size, alignment, boundary ); } /* Statistics */ if ( stats->max_search < search_count ) { stats->max_search = search_count; } return (void *) alloc_begin; }
bool _Heap_Free( Heap_Control *heap, void *alloc_begin_ptr ) { Heap_Statistics *const stats = &heap->stats; uintptr_t alloc_begin; Heap_Block *block; Heap_Block *next_block = NULL; uintptr_t block_size = 0; uintptr_t next_block_size = 0; bool next_is_free = false; /* * If NULL return true so a free on NULL is considered a valid release. This * is a special case that could be handled by the in heap check how-ever that * would result in false being returned which is wrong. */ if ( alloc_begin_ptr == NULL ) { return true; } alloc_begin = (uintptr_t) alloc_begin_ptr; block = _Heap_Block_of_alloc_area( alloc_begin, heap->page_size ); if ( !_Heap_Is_block_in_heap( heap, block ) ) { return false; } _Heap_Protection_block_check( heap, block ); block_size = _Heap_Block_size( block ); next_block = _Heap_Block_at( block, block_size ); if ( !_Heap_Is_block_in_heap( heap, next_block ) ) { return false; } _Heap_Protection_block_check( heap, next_block ); if ( !_Heap_Is_prev_used( next_block ) ) { _Heap_Protection_block_error( heap, block ); return false; } if ( !_Heap_Protection_determine_block_free( heap, block ) ) { return true; } next_block_size = _Heap_Block_size( next_block ); next_is_free = next_block != heap->last_block && !_Heap_Is_prev_used( _Heap_Block_at( next_block, next_block_size )); if ( !_Heap_Is_prev_used( block ) ) { uintptr_t const prev_size = block->prev_size; Heap_Block * const prev_block = _Heap_Block_at( block, -prev_size ); if ( !_Heap_Is_block_in_heap( heap, prev_block ) ) { _HAssert( false ); return( false ); } /* As we always coalesce free blocks, the block that preceedes prev_block must have been used. */ if ( !_Heap_Is_prev_used ( prev_block) ) { _HAssert( false ); return( false ); } if ( next_is_free ) { /* coalesce both */ uintptr_t const size = block_size + prev_size + next_block_size; _Heap_Free_list_remove( next_block ); stats->free_blocks -= 1; prev_block->size_and_flag = size | HEAP_PREV_BLOCK_USED; next_block = _Heap_Block_at( prev_block, size ); _HAssert(!_Heap_Is_prev_used( next_block)); next_block->prev_size = size; } else { /* coalesce prev */ uintptr_t const size = block_size + prev_size; prev_block->size_and_flag = size | HEAP_PREV_BLOCK_USED; next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED; next_block->prev_size = size; } } else if ( next_is_free ) { /* coalesce next */ uintptr_t const size = block_size + next_block_size; _Heap_Free_list_replace( next_block, block ); block->size_and_flag = size | HEAP_PREV_BLOCK_USED; next_block = _Heap_Block_at( block, size ); next_block->prev_size = size; } else { /* no coalesce */ /* Add 'block' to the head of the free blocks list as it tends to produce less fragmentation than adding to the tail. */ _Heap_Free_list_insert_after( _Heap_Free_list_head( heap), block ); block->size_and_flag = block_size | HEAP_PREV_BLOCK_USED; next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED; next_block->prev_size = block_size; /* Statistics */ ++stats->free_blocks; if ( stats->max_free_blocks < stats->free_blocks ) { stats->max_free_blocks = stats->free_blocks; } } /* Statistics */ --stats->used_blocks; ++stats->frees; stats->free_size += block_size; return( true ); }