Heap_Block *_Heap_Greedy_allocate( Heap_Control *heap, const uintptr_t *block_sizes, size_t block_count ) { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); Heap_Block *allocated_blocks = NULL; Heap_Block *blocks = NULL; Heap_Block *current; size_t i; _Heap_Protection_free_all_delayed_blocks( heap ); for (i = 0; i < block_count; ++i) { void *next = _Heap_Allocate( heap, block_sizes [i] ); if ( next != NULL ) { Heap_Block *next_block = _Heap_Block_of_alloc_area( (uintptr_t) next, heap->page_size ); next_block->next = allocated_blocks; allocated_blocks = next_block; } } while ( (current = _Heap_Free_list_first( heap )) != free_list_tail ) { _Heap_Block_allocate( heap, current, _Heap_Alloc_area_of_block( current ), _Heap_Block_size( current ) - HEAP_BLOCK_HEADER_SIZE ); current->next = blocks; blocks = current; } while ( allocated_blocks != NULL ) { current = allocated_blocks; allocated_blocks = allocated_blocks->next; _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( current ) ); } return blocks; }
static uintptr_t _Heap_Check_block( const Heap_Control *heap, const Heap_Block *block, uintptr_t alloc_size, uintptr_t alignment, uintptr_t boundary ) { uintptr_t const page_size = heap->page_size; uintptr_t const min_block_size = heap->min_block_size; uintptr_t const block_begin = (uintptr_t) block; uintptr_t const block_size = _Heap_Block_size( block ); uintptr_t const block_end = block_begin + block_size; uintptr_t const alloc_begin_floor = _Heap_Alloc_area_of_block( block ); uintptr_t const alloc_begin_ceiling = block_end - min_block_size + HEAP_BLOCK_HEADER_SIZE + page_size - 1; uintptr_t alloc_end = block_end + HEAP_ALLOC_BONUS; uintptr_t alloc_begin = alloc_end - alloc_size; alloc_begin = _Heap_Align_down( alloc_begin, alignment ); /* Ensure that the we have a valid new block at the end */ if ( alloc_begin > alloc_begin_ceiling ) { alloc_begin = _Heap_Align_down( alloc_begin_ceiling, alignment ); } alloc_end = alloc_begin + alloc_size; /* Ensure boundary constaint */ if ( boundary != 0 ) { uintptr_t const boundary_floor = alloc_begin_floor + alloc_size; uintptr_t boundary_line = _Heap_Align_down( alloc_end, boundary ); while ( alloc_begin < boundary_line && boundary_line < alloc_end ) { if ( boundary_line < boundary_floor ) { return 0; } alloc_begin = boundary_line - alloc_size; alloc_begin = _Heap_Align_down( alloc_begin, alignment ); alloc_end = alloc_begin + alloc_size; boundary_line = _Heap_Align_down( alloc_end, boundary ); } } /* Ensure that the we have a valid new block at the beginning */ if ( alloc_begin >= alloc_begin_floor ) { uintptr_t const alloc_block_begin = (uintptr_t) _Heap_Block_of_alloc_area( alloc_begin, page_size ); uintptr_t const free_size = alloc_block_begin - block_begin; if ( free_size >= min_block_size || free_size == 0 ) { return alloc_begin; } } return 0; }
static void _Heap_Protection_delay_block_free( Heap_Control *heap, Heap_Block *block ) { uintptr_t *const pattern_begin = (uintptr_t *) _Heap_Alloc_area_of_block( block ); uintptr_t *const pattern_end = (uintptr_t *) ((uintptr_t) block + _Heap_Block_size( block ) + HEAP_ALLOC_BONUS); uintptr_t const delayed_free_block_count = heap->Protection.delayed_free_block_count; uintptr_t *current = NULL; block->Protection_begin.next_delayed_free_block = block; block->Protection_begin.task = _Thread_Get_executing(); if ( delayed_free_block_count > 0 ) { Heap_Block *const last = heap->Protection.last_delayed_free_block; last->Protection_begin.next_delayed_free_block = block; } else { heap->Protection.first_delayed_free_block = block; } heap->Protection.last_delayed_free_block = block; heap->Protection.delayed_free_block_count = delayed_free_block_count + 1; for ( current = pattern_begin; current != pattern_end; ++current ) { *current = HEAP_FREE_PATTERN; } }
static void test_heap_do_block_allocate( int variant, void *p2 ) { Heap_Block *const block = _Heap_Block_of_alloc_area( (uintptr_t) p2, test_page_size()); uintptr_t const alloc_box_begin = _Heap_Alloc_area_of_block( block ); uintptr_t const alloc_box_size = _Heap_Block_size( block ); uintptr_t const alloc_box_end = alloc_box_begin + alloc_box_size; uintptr_t alloc_begin = 0; uintptr_t alloc_size = 0; puts( "\tallocate block at the beginning"); alloc_begin = alloc_box_begin; alloc_size = 0; test_block_alloc( variant, 0, alloc_begin, alloc_size ); puts( "\tallocate block full space"); alloc_begin = alloc_box_begin; alloc_size = alloc_box_size + HEAP_ALLOC_BONUS - HEAP_BLOCK_HEADER_SIZE; test_block_alloc( variant, 1, alloc_begin, alloc_size ); puts( "\tallocate block in the middle"); alloc_begin = alloc_box_begin + TEST_DEFAULT_PAGE_SIZE; alloc_size = 0; test_block_alloc( variant, 2, alloc_begin, alloc_size ); puts( "\tallocate block at the end"); alloc_begin = alloc_box_end - TEST_DEFAULT_PAGE_SIZE; alloc_size = TEST_DEFAULT_PAGE_SIZE + HEAP_ALLOC_BONUS - HEAP_BLOCK_HEADER_SIZE; test_block_alloc( variant, 3, alloc_begin, alloc_size ); }
Heap_Block *_Heap_Block_allocate( Heap_Control *heap, Heap_Block *block, uintptr_t alloc_begin, uintptr_t alloc_size ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const alloc_area_begin = _Heap_Alloc_area_of_block( block ); uintptr_t const alloc_area_offset = alloc_begin - alloc_area_begin; Heap_Block *free_list_anchor = NULL; _HAssert( alloc_area_begin <= alloc_begin ); if ( _Heap_Is_free( block ) ) { free_list_anchor = block->prev; _Heap_Free_list_remove( block ); /* Statistics */ --stats->free_blocks; ++stats->used_blocks; stats->free_size -= _Heap_Block_size( block ); } else { free_list_anchor = _Heap_Free_list_head( heap ); } if ( alloc_area_offset < heap->page_size ) { alloc_size += alloc_area_offset; block = _Heap_Block_allocate_from_begin( heap, block, free_list_anchor, alloc_size ); } else { block = _Heap_Block_allocate_from_end( heap, block, free_list_anchor, alloc_begin, alloc_size ); } /* Statistics */ if ( stats->min_free_size > stats->free_size ) { stats->min_free_size = stats->free_size; } return block; }
void _Heap_Greedy_free( Heap_Control *heap, Heap_Block *blocks ) { while ( blocks != NULL ) { Heap_Block *current = blocks; blocks = blocks->next; _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( current ) ); } }
static void test_heap_extend(void) { bool ret = false; Heap_Control *heap = &TestHeap; uint8_t *area_begin = TestHeapMemory; uint8_t *sub_area_begin; uint8_t *sub_area_end; _Heap_Initialize( heap, area_begin + 768, 256, 0 ); sub_area_begin = (uint8_t *) heap->first_block; sub_area_end = (uint8_t *) _Heap_Alloc_area_of_block( heap->last_block ); puts( "heap extend - link below" ); ret = _Protected_heap_Extend( heap, area_begin + 0, 256 ); test_heap_assert( ret, true ); puts( "heap extend - merge below overlap" ); ret = _Protected_heap_Extend( heap, sub_area_begin - 128, 256 ); test_heap_assert( ret, false ); puts( "heap extend - merge below" ); ret = _Protected_heap_Extend( heap, sub_area_begin - 256, 256 ); test_heap_assert( ret, true ); puts( "heap extend - merge above overlap" ); ret = _Protected_heap_Extend( heap, sub_area_end - 128, 256 ); test_heap_assert( ret, false ); puts( "heap extend - merge above" ); ret = _Protected_heap_Extend( heap, sub_area_end, 256 ); test_heap_assert( ret, true ); puts( "heap extend - link above" ); ret = _Protected_heap_Extend( heap, area_begin + 1536, 256 ); test_heap_assert( ret, true ); puts( "heap extend - area too small" ); ret = _Protected_heap_Extend( heap, area_begin + 2048, 0 ); test_heap_assert( ret, false ); puts( "heap extend - invalid area" ); ret = _Protected_heap_Extend( heap, (void *) -1, 2 ); test_heap_assert( ret, false ); area_begin = (uint8_t *) (((uintptr_t) area_begin) | 1); _Heap_Initialize( heap, area_begin + 768, 256, 0 ); puts( "heap extend - merge below with align up" ); ret = _Protected_heap_Extend( heap, area_begin + 512, 256 ); test_heap_assert( ret, true ); }
static void _Heap_Check_allocation( const Heap_Control *heap, const Heap_Block *block, uintptr_t alloc_begin, uintptr_t alloc_size, uintptr_t alignment, uintptr_t boundary ) { uintptr_t const min_block_size = heap->min_block_size; uintptr_t const page_size = heap->page_size; uintptr_t const block_begin = (uintptr_t) block; uintptr_t const block_size = _Heap_Block_size( block ); uintptr_t const block_end = block_begin + block_size; uintptr_t const alloc_end = alloc_begin + alloc_size; uintptr_t const alloc_area_begin = _Heap_Alloc_area_of_block( block ); uintptr_t const alloc_area_offset = alloc_begin - alloc_area_begin; _HAssert( block_size >= min_block_size ); _HAssert( block_begin < block_end ); _HAssert( _Heap_Is_aligned( block_begin + HEAP_BLOCK_HEADER_SIZE, page_size ) ); _HAssert( _Heap_Is_aligned( block_size, page_size ) ); _HAssert( alloc_end <= block_end + HEAP_ALLOC_BONUS ); _HAssert( alloc_area_begin == block_begin + HEAP_BLOCK_HEADER_SIZE); _HAssert( alloc_area_offset < page_size ); _HAssert( _Heap_Is_aligned( alloc_area_begin, page_size ) ); if ( alignment == 0 ) { _HAssert( alloc_begin == alloc_area_begin ); } else { _HAssert( _Heap_Is_aligned( alloc_begin, alignment ) ); } if ( boundary != 0 ) { uintptr_t boundary_line = _Heap_Align_down( alloc_end, boundary ); _HAssert( alloc_size <= boundary ); _HAssert( boundary_line <= alloc_begin || alloc_end <= boundary_line ); } }
static void _Heap_Protection_check_free_block( Heap_Control *heap, Heap_Block *block ) { uintptr_t *const pattern_begin = (uintptr_t *) _Heap_Alloc_area_of_block( block ); uintptr_t *const pattern_end = (uintptr_t *) ((uintptr_t) block + _Heap_Block_size( block ) + HEAP_ALLOC_BONUS); uintptr_t *current = NULL; for ( current = pattern_begin; current != pattern_end; ++current ) { if ( *current != HEAP_FREE_PATTERN ) { _Heap_Protection_block_error( heap, block ); break; } } }
static bool _Heap_Protection_free_delayed_blocks( Heap_Control *heap, uintptr_t alloc_begin ) { bool search_again = false; uintptr_t const blocks_to_free_count = (heap->Protection.delayed_free_block_count + heap->Protection.delayed_free_fraction - 1) / heap->Protection.delayed_free_fraction; if ( alloc_begin == 0 && blocks_to_free_count > 0 ) { Heap_Block *block_to_free = heap->Protection.first_delayed_free_block; uintptr_t count = 0; for ( count = 0; count < blocks_to_free_count; ++count ) { Heap_Block *next_block_to_free; if ( !_Heap_Is_block_in_heap( heap, block_to_free ) ) { _Heap_Protection_block_error( heap, block_to_free ); } next_block_to_free = block_to_free->Protection_begin.next_delayed_free_block; block_to_free->Protection_begin.next_delayed_free_block = HEAP_PROTECTION_OBOLUS; _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( block_to_free ) ); block_to_free = next_block_to_free; } heap->Protection.delayed_free_block_count -= blocks_to_free_count; heap->Protection.first_delayed_free_block = block_to_free; search_again = true; } return search_again; }
static void _Heap_Free_block( Heap_Control *heap, Heap_Block *block ) { Heap_Statistics *const stats = &heap->stats; Heap_Block *first_free; /* Statistics */ ++stats->used_blocks; --stats->frees; /* * The _Heap_Free() will place the block to the head of free list. We want * the new block at the end of the free list. So that initial and earlier * areas are consumed first. */ _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( block ) ); _Heap_Protection_free_all_delayed_blocks( heap ); first_free = _Heap_Free_list_first( heap ); _Heap_Free_list_remove( first_free ); _Heap_Free_list_insert_before( _Heap_Free_list_tail( heap ), first_free ); }
void *_Heap_Allocate_aligned_with_boundary( Heap_Control *heap, uintptr_t alloc_size, uintptr_t alignment, uintptr_t boundary ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const block_size_floor = alloc_size + HEAP_BLOCK_HEADER_SIZE - HEAP_ALLOC_BONUS; uintptr_t const page_size = heap->page_size; Heap_Block *block = NULL; uintptr_t alloc_begin = 0; uint32_t search_count = 0; bool search_again = false; if ( block_size_floor < alloc_size ) { /* Integer overflow occured */ return NULL; } if ( boundary != 0 ) { if ( boundary < alloc_size ) { return NULL; } if ( alignment == 0 ) { alignment = page_size; } } do { Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); block = _Heap_Free_list_first( heap ); while ( block != free_list_tail ) { _HAssert( _Heap_Is_prev_used( block ) ); _Heap_Protection_block_check( heap, block ); /* * The HEAP_PREV_BLOCK_USED flag is always set in the block size_and_flag * field. Thus the value is about one unit larger than the real block * size. The greater than operator takes this into account. */ if ( block->size_and_flag > block_size_floor ) { if ( alignment == 0 ) { alloc_begin = _Heap_Alloc_area_of_block( block ); } else { alloc_begin = _Heap_Check_block( heap, block, alloc_size, alignment, boundary ); } } /* Statistics */ ++search_count; if ( alloc_begin != 0 ) { break; } block = block->next; } search_again = _Heap_Protection_free_delayed_blocks( heap, alloc_begin ); } while ( search_again ); if ( alloc_begin != 0 ) { /* Statistics */ ++stats->allocs; stats->searches += search_count; block = _Heap_Block_allocate( heap, block, alloc_begin, alloc_size ); _Heap_Check_allocation( heap, block, alloc_begin, alloc_size, alignment, boundary ); } /* Statistics */ if ( stats->max_search < search_count ) { stats->max_search = search_count; } return (void *) alloc_begin; }
static bool _Heap_Walk_check_control( int source, Heap_Walk_printer printer, Heap_Control *heap ) { uintptr_t const page_size = heap->page_size; uintptr_t const min_block_size = heap->min_block_size; Heap_Block *const first_free_block = _Heap_Free_list_first( heap ); Heap_Block *const last_free_block = _Heap_Free_list_last( heap ); Heap_Block *const first_block = heap->first_block; Heap_Block *const last_block = heap->last_block; (*printer)( source, false, "page size %u, min block size %u\n" "\tarea begin 0x%08x, area end 0x%08x\n" "\tfirst block 0x%08x, last block 0x%08x\n" "\tfirst free 0x%08x, last free 0x%08x\n", page_size, min_block_size, heap->area_begin, heap->area_end, first_block, last_block, first_free_block, last_free_block ); if ( page_size == 0 ) { (*printer)( source, true, "page size is zero\n" ); return false; } if ( !_Addresses_Is_aligned( (void *) page_size ) ) { (*printer)( source, true, "page size %u not CPU aligned\n", page_size ); return false; } if ( !_Heap_Is_aligned( min_block_size, page_size ) ) { (*printer)( source, true, "min block size %u not page aligned\n", min_block_size ); return false; } if ( !_Heap_Is_aligned( _Heap_Alloc_area_of_block( first_block ), page_size ) ) { (*printer)( source, true, "first block 0x%08x: alloc area not page aligned\n", first_block ); return false; } if ( !_Heap_Is_prev_used( first_block ) ) { (*printer)( source, true, "first block: HEAP_PREV_BLOCK_USED is cleared\n" ); return false; } if ( _Heap_Is_free( last_block ) ) { (*printer)( source, true, "last block: is free\n" ); return false; } if ( _Heap_Block_at( last_block, _Heap_Block_size( last_block ) ) != first_block ) { (*printer)( source, true, "last block: next block is not the first block\n" ); return false; } return _Heap_Walk_check_free_list( source, printer, heap ); }
Heap_Extend_status _Heap_Extend( Heap_Control *heap, void *area_begin_ptr, uintptr_t area_size, uintptr_t *amount_extended ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const area_begin = (uintptr_t) area_begin_ptr; uintptr_t const heap_area_begin = heap->area_begin; uintptr_t const heap_area_end = heap->area_end; uintptr_t const new_heap_area_end = heap_area_end + area_size; uintptr_t extend_size = 0; Heap_Block *const last_block = heap->last_block; /* * There are five possibilities for the location of starting * address: * * 1. non-contiguous lower address (NOT SUPPORTED) * 2. contiguous lower address (NOT SUPPORTED) * 3. in the heap (ERROR) * 4. contiguous higher address (SUPPORTED) * 5. non-contiguous higher address (NOT SUPPORTED) * * As noted, this code only supports (4). */ if ( area_begin >= heap_area_begin && area_begin < heap_area_end ) { return HEAP_EXTEND_ERROR; /* case 3 */ } else if ( area_begin != heap_area_end ) { return HEAP_EXTEND_NOT_IMPLEMENTED; /* cases 1, 2, and 5 */ } /* * Currently only case 4 should make it to this point. * The basic trick is to make the extend area look like a used * block and free it. */ heap->area_end = new_heap_area_end; extend_size = new_heap_area_end - (uintptr_t) last_block - HEAP_BLOCK_HEADER_SIZE; extend_size = _Heap_Align_down( extend_size, heap->page_size ); *amount_extended = extend_size; if( extend_size >= heap->min_block_size ) { Heap_Block *const new_last_block = _Heap_Block_at( last_block, extend_size ); _Heap_Block_set_size( last_block, extend_size ); new_last_block->size_and_flag = ((uintptr_t) heap->first_block - (uintptr_t) new_last_block) | HEAP_PREV_BLOCK_USED; heap->last_block = new_last_block; /* Statistics */ stats->size += extend_size; ++stats->used_blocks; --stats->frees; /* Do not count subsequent call as actual free() */ _Heap_Free( heap, (void *) _Heap_Alloc_area_of_block( last_block )); } return HEAP_EXTEND_SUCCESSFUL; }
static void test_heap_allocate(void) { void *p1 = NULL; void *p2 = NULL; void *p3 = NULL; uintptr_t alloc_size = 0; uintptr_t alignment = 0; uintptr_t boundary = 0; uintptr_t page_size = 0; uintptr_t first_page_begin = 0; uintptr_t previous_last_block_begin = 0; uintptr_t previous_last_page_begin = 0; uintptr_t last_block_begin = 0; uintptr_t last_alloc_begin = 0; test_heap_init( TEST_DEFAULT_PAGE_SIZE ); last_block_begin = (uintptr_t) TestHeap.last_block; last_alloc_begin = _Heap_Alloc_area_of_block( TestHeap.last_block ); puts( "run tests for _Heap_Allocate_aligned_with_boundary()"); puts( "\tcheck if NULL will be returned if size causes integer overflow" ); alloc_size = (uintptr_t ) -1; alignment = 0; boundary = 0; test_init_and_alloc( alloc_size, alignment, boundary, NULL ); puts( "\ttry to allocate more space than the one which fits in the boundary" ); alloc_size = 2; alignment = 0; boundary = alloc_size - 1; test_init_and_alloc( alloc_size, alignment, boundary, NULL ); puts( "\tcheck if alignment will be set to page size if only a boundary is given" ); alloc_size = 1; boundary = 1; alignment = 0; p1 = test_init_and_alloc_simple( alloc_size, alignment, boundary ); alignment = test_page_size(); test_init_and_alloc( alloc_size, alignment, boundary, p1 ); puts( "\tcreate a block which is bigger then the first free space" ); alignment = 0; boundary = 0; alloc_size = test_page_size(); p1 = test_init_and_alloc_simple( alloc_size, alignment, boundary ); p2 = test_alloc_simple( alloc_size, alignment, boundary ); rtems_test_assert( p2 ); test_free( p1 ); alloc_size = 2 * alloc_size; p3 = test_alloc_simple( alloc_size, alignment, boundary ); rtems_test_assert( p1 != p3 ); puts( "\tset boundary before allocation begin" ); alloc_size = 1; alignment = 0; boundary = last_alloc_begin - test_page_size(); p1 = test_init_and_alloc_simple( alloc_size, alignment, boundary ); rtems_test_assert( (uintptr_t ) p1 >= boundary ); puts( "\tset boundary between allocation begin and end" ); alloc_size = test_page_size(); alignment = 0; boundary = last_alloc_begin - alloc_size / 2; p1 = test_init_and_alloc_simple( alloc_size, alignment, boundary ); rtems_test_assert( (uintptr_t ) p1 + alloc_size <= boundary ); puts( "\tset boundary after allocation end" ); alloc_size = 1; alignment = 0; boundary = last_alloc_begin; p1 = test_init_and_alloc_simple( alloc_size, alignment, boundary ); rtems_test_assert( (uintptr_t ) p1 + alloc_size < boundary ); puts( "\tset boundary on allocation end" ); alloc_size = TEST_DEFAULT_PAGE_SIZE - HEAP_BLOCK_HEADER_SIZE; alignment = 0; boundary = last_block_begin; p1 = (void *) (last_alloc_begin - TEST_DEFAULT_PAGE_SIZE); test_init_and_alloc( alloc_size, alignment, boundary, p1); puts( "\talign the allocation to different positions in the block header" ); page_size = sizeof(uintptr_t); alloc_size = 1; boundary = 0; test_heap_init( page_size ); /* Force the page size to a small enough value */ TestHeap.page_size = page_size; alignment = first_page_begin - sizeof(uintptr_t); p1 = test_alloc( alloc_size, alignment, boundary, NULL ); first_page_begin = ((uintptr_t) TestHeap.first_block ) + HEAP_BLOCK_HEADER_SIZE; alignment = first_page_begin + sizeof(uintptr_t); p1 = test_alloc( alloc_size, alignment, boundary, NULL ); first_page_begin = ((uintptr_t) TestHeap.first_block ) + HEAP_BLOCK_HEADER_SIZE; alignment = first_page_begin; p1 = test_alloc_simple( alloc_size, alignment, boundary ); puts( "\tallocate last block with different boundarys" ); page_size = TEST_DEFAULT_PAGE_SIZE; test_heap_init( page_size ); previous_last_block_begin = ((uintptr_t) TestHeap.last_block ) - TestHeap.min_block_size; previous_last_page_begin = previous_last_block_begin + HEAP_BLOCK_HEADER_SIZE; alloc_size = TestHeap.page_size - HEAP_BLOCK_HEADER_SIZE; alignment = sizeof(uintptr_t); boundary = 0; p1 = test_alloc( alloc_size, alignment, boundary, (void *) (previous_last_page_begin + sizeof(uintptr_t))); test_heap_init( page_size ); boundary = ((uintptr_t) TestHeap.last_block ); p1 = test_alloc( alloc_size, alignment, boundary, (void *) previous_last_page_begin ); puts( "\tbreak the boundaries and aligns more than one time" ); page_size = CPU_ALIGNMENT * 20; alloc_size = page_size / 4; alignment = page_size / 5; boundary = page_size / 4; test_heap_init( page_size ); p1 = (void *) (_Heap_Alloc_area_of_block( TestHeap.last_block ) - page_size ); test_alloc( alloc_size, alignment, boundary, p1); puts( "\tdifferent combinations, so that there is no valid block at the end" ); page_size = sizeof(uintptr_t); test_heap_init( 0 ); /* Force the page size to a small enough value */ TestHeap.page_size = page_size; alloc_size = 1; alignment = (uintptr_t) TestHeap.last_block; boundary = 0; p1 = test_alloc( alloc_size, alignment, boundary, NULL ); boundary = (uintptr_t) TestHeap.last_block; p1 = test_alloc( alloc_size, alignment, boundary, NULL ); alloc_size = 0; p1 = test_alloc( alloc_size, alignment, boundary, NULL ); alloc_size = 1; alignment = sizeof(uintptr_t); boundary = 0; p1 = test_alloc_simple( alloc_size, alignment, boundary ); puts( "\ttry to create a block, which is not possible because of the alignment and boundary" ); alloc_size = 2; boundary = _Heap_Alloc_area_of_block( TestHeap.first_block ) + _Heap_Block_size( TestHeap.first_block ) / 2; alignment = boundary - 1; p1 = test_init_and_alloc( alloc_size, alignment, boundary, NULL ); alloc_size = 2; alignment = _Heap_Alloc_area_of_block( TestHeap.first_block ); boundary = alignment + 1; p1 = test_init_and_alloc( alloc_size, alignment, boundary, NULL ); }
static bool _Heap_Walk_check_free_list( int source, Heap_Walk_printer printer, Heap_Control *heap ) { uintptr_t const page_size = heap->page_size; const Heap_Block *const free_list_tail = _Heap_Free_list_tail( heap ); const Heap_Block *const first_free_block = _Heap_Free_list_first( heap ); const Heap_Block *prev_block = free_list_tail; const Heap_Block *free_block = first_free_block; while ( free_block != free_list_tail ) { if ( !_Heap_Is_block_in_heap( heap, free_block ) ) { (*printer)( source, true, "free block 0x%08x: not in heap\n", free_block ); return false; } if ( !_Heap_Is_aligned( _Heap_Alloc_area_of_block( free_block ), page_size ) ) { (*printer)( source, true, "free block 0x%08x: alloc area not page aligned\n", free_block ); return false; } if ( _Heap_Is_used( free_block ) ) { (*printer)( source, true, "free block 0x%08x: is used\n", free_block ); return false; } if ( free_block->prev != prev_block ) { (*printer)( source, true, "free block 0x%08x: invalid previous block 0x%08x\n", free_block, free_block->prev ); return false; } prev_block = free_block; free_block = free_block->next; } return true; }
uintptr_t _Heap_Initialize( Heap_Control *heap, void *heap_area_begin_ptr, uintptr_t heap_area_size, uintptr_t page_size ) { Heap_Statistics *const stats = &heap->stats; uintptr_t const heap_area_begin = (uintptr_t) heap_area_begin_ptr; uintptr_t const heap_area_end = heap_area_begin + heap_area_size; uintptr_t alloc_area_begin = heap_area_begin + HEAP_BLOCK_HEADER_SIZE; uintptr_t alloc_area_size = 0; uintptr_t first_block_begin = 0; uintptr_t first_block_size = 0; uintptr_t min_block_size = 0; uintptr_t overhead = 0; Heap_Block *first_block = NULL; Heap_Block *last_block = NULL; if ( page_size == 0 ) { page_size = CPU_ALIGNMENT; } else { page_size = _Heap_Align_up( page_size, CPU_ALIGNMENT ); if ( page_size < CPU_ALIGNMENT ) { /* Integer overflow */ return 0; } } min_block_size = _Heap_Align_up( sizeof( Heap_Block ), page_size ); alloc_area_begin = _Heap_Align_up( alloc_area_begin, page_size ); first_block_begin = alloc_area_begin - HEAP_BLOCK_HEADER_SIZE; overhead = HEAP_BLOCK_HEADER_SIZE + (first_block_begin - heap_area_begin); first_block_size = heap_area_size - overhead; first_block_size = _Heap_Align_down ( first_block_size, page_size ); alloc_area_size = first_block_size - HEAP_BLOCK_HEADER_SIZE; if ( heap_area_end < heap_area_begin || heap_area_size <= overhead || first_block_size < min_block_size ) { /* Invalid area or area too small */ return 0; } /* First block */ first_block = (Heap_Block *) first_block_begin; first_block->prev_size = page_size; first_block->size_and_flag = first_block_size | HEAP_PREV_BLOCK_USED; first_block->next = _Heap_Free_list_tail( heap ); first_block->prev = _Heap_Free_list_head( heap ); /* * Last block. * * The next block of the last block is the first block. Since the first * block indicates that the previous block is used, this ensures that the * last block appears as used for the _Heap_Is_used() and _Heap_Is_free() * functions. */ last_block = _Heap_Block_at( first_block, first_block_size ); last_block->prev_size = first_block_size; last_block->size_and_flag = first_block_begin - (uintptr_t) last_block; /* Heap control */ heap->page_size = page_size; heap->min_block_size = min_block_size; heap->area_begin = heap_area_begin; heap->area_end = heap_area_end; heap->first_block = first_block; heap->last_block = last_block; _Heap_Free_list_head( heap )->next = first_block; _Heap_Free_list_tail( heap )->prev = first_block; /* Statistics */ stats->size = first_block_size; stats->free_size = first_block_size; stats->min_free_size = first_block_size; stats->free_blocks = 1; stats->max_free_blocks = 1; stats->used_blocks = 0; stats->max_search = 0; stats->allocs = 0; stats->searches = 0; stats->frees = 0; stats->resizes = 0; stats->instance = instance++; _HAssert( _Heap_Is_aligned( heap->page_size, CPU_ALIGNMENT ) ); _HAssert( _Heap_Is_aligned( heap->min_block_size, page_size ) ); _HAssert( _Heap_Is_aligned( _Heap_Alloc_area_of_block( first_block ), page_size ) ); _HAssert( _Heap_Is_aligned( _Heap_Alloc_area_of_block( last_block ), page_size ) ); return alloc_area_size; }