Esempio n. 1
0
static void _Heap_Merge_below(
  Heap_Control *heap,
  uintptr_t extend_area_begin,
  Heap_Block *first_block
)
{
  uintptr_t const page_size = heap->page_size;
  uintptr_t const new_first_block_alloc_begin =
    _Heap_Align_up( extend_area_begin + HEAP_BLOCK_HEADER_SIZE, page_size );
  uintptr_t const new_first_block_begin =
    new_first_block_alloc_begin - HEAP_BLOCK_HEADER_SIZE;
  uintptr_t const first_block_begin = (uintptr_t) first_block;
  uintptr_t const new_first_block_size =
    first_block_begin - new_first_block_begin;
  Heap_Block *const new_first_block = (Heap_Block *) new_first_block_begin;

  new_first_block->prev_size = first_block->prev_size;
  new_first_block->size_and_flag = new_first_block_size | HEAP_PREV_BLOCK_USED;

  _Heap_Free_block( heap, new_first_block );
}
void _Heap_Block_split(
  Heap_Control *heap,
  Heap_Block *block,
  Heap_Block *free_list_anchor,
  uintptr_t alloc_size
)
{
  Heap_Statistics *const stats = &heap->stats;

  uintptr_t const page_size = heap->page_size;
  uintptr_t const min_block_size = heap->min_block_size;
  uintptr_t const min_alloc_size = min_block_size - HEAP_BLOCK_HEADER_SIZE;

  uintptr_t const block_size = _Heap_Block_size( block );

  uintptr_t const used_size =
    _Heap_Max( alloc_size, min_alloc_size ) + HEAP_BLOCK_HEADER_SIZE;
  uintptr_t const used_block_size = _Heap_Align_up( used_size, page_size );

  uintptr_t const free_size = block_size + HEAP_BLOCK_SIZE_OFFSET - used_size;
  uintptr_t const free_size_limit = min_block_size + HEAP_BLOCK_SIZE_OFFSET;

  Heap_Block *next_block = _Heap_Block_at( block, block_size );

  _HAssert( used_size <= block_size + HEAP_BLOCK_SIZE_OFFSET );
  _HAssert( used_size + free_size == block_size + HEAP_BLOCK_SIZE_OFFSET );

  if ( free_size >= free_size_limit ) {
    Heap_Block *const free_block = _Heap_Block_at( block, used_block_size );
    uintptr_t free_block_size = block_size - used_block_size;

    _HAssert( used_block_size + free_block_size == block_size );

    _Heap_Block_set_size( block, used_block_size );

    /* Statistics */
    stats->free_size += free_block_size;

    if ( _Heap_Is_used( next_block ) ) {
      _Heap_Free_list_insert_after( free_list_anchor, free_block );

      /* Statistics */
      ++stats->free_blocks;
    } else {
      uintptr_t const next_block_size = _Heap_Block_size( next_block );

      _Heap_Free_list_replace( next_block, free_block );

      free_block_size += next_block_size;

      next_block = _Heap_Block_at( free_block, free_block_size );
    }

    free_block->size_and_flag = free_block_size | HEAP_PREV_BLOCK_USED;

    next_block->prev_size = free_block_size;
    next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED;
  } else {
    next_block->size_and_flag |= HEAP_PREV_BLOCK_USED;
  }
}
uintptr_t _Heap_Initialize(
  Heap_Control *heap,
  void *heap_area_begin_ptr,
  uintptr_t heap_area_size,
  uintptr_t page_size
)
{
  Heap_Statistics *const stats = &heap->stats;
  uintptr_t const heap_area_begin = (uintptr_t) heap_area_begin_ptr;
  uintptr_t const heap_area_end = heap_area_begin + heap_area_size;
  uintptr_t alloc_area_begin = heap_area_begin + HEAP_BLOCK_HEADER_SIZE;
  uintptr_t alloc_area_size = 0;
  uintptr_t first_block_begin = 0;
  uintptr_t first_block_size = 0;
  uintptr_t min_block_size = 0;
  uintptr_t overhead = 0;
  Heap_Block *first_block = NULL;
  Heap_Block *last_block = NULL;

  if ( page_size == 0 ) {
    page_size = CPU_ALIGNMENT;
  } else {
    page_size = _Heap_Align_up( page_size, CPU_ALIGNMENT );

    if ( page_size < CPU_ALIGNMENT ) {
      /* Integer overflow */
      return 0;
    }
  }
  min_block_size = _Heap_Align_up( sizeof( Heap_Block ), page_size );

  alloc_area_begin = _Heap_Align_up( alloc_area_begin, page_size );
  first_block_begin = alloc_area_begin - HEAP_BLOCK_HEADER_SIZE;
  overhead = HEAP_BLOCK_HEADER_SIZE + (first_block_begin - heap_area_begin);
  first_block_size = heap_area_size - overhead;
  first_block_size = _Heap_Align_down ( first_block_size, page_size );
  alloc_area_size = first_block_size - HEAP_BLOCK_HEADER_SIZE;

  if (
    heap_area_end < heap_area_begin
      || heap_area_size <= overhead
      || first_block_size < min_block_size
  ) {
    /* Invalid area or area too small */
    return 0;
  }

  /* First block */
  first_block = (Heap_Block *) first_block_begin;
  first_block->prev_size = page_size;
  first_block->size_and_flag = first_block_size | HEAP_PREV_BLOCK_USED;
  first_block->next = _Heap_Free_list_tail( heap );
  first_block->prev = _Heap_Free_list_head( heap );

  /*
   * Last block.
   *
   * The next block of the last block is the first block.  Since the first
   * block indicates that the previous block is used, this ensures that the
   * last block appears as used for the _Heap_Is_used() and _Heap_Is_free()
   * functions.
   */
  last_block = _Heap_Block_at( first_block, first_block_size );
  last_block->prev_size = first_block_size;
  last_block->size_and_flag = first_block_begin - (uintptr_t) last_block;

  /* Heap control */
  heap->page_size = page_size;
  heap->min_block_size = min_block_size;
  heap->area_begin = heap_area_begin;
  heap->area_end = heap_area_end;
  heap->first_block = first_block;
  heap->last_block = last_block;
  _Heap_Free_list_head( heap )->next = first_block;
  _Heap_Free_list_tail( heap )->prev = first_block;

  /* Statistics */
  stats->size = first_block_size;
  stats->free_size = first_block_size;
  stats->min_free_size = first_block_size;
  stats->free_blocks = 1;
  stats->max_free_blocks = 1;
  stats->used_blocks = 0;
  stats->max_search = 0;
  stats->allocs = 0;
  stats->searches = 0;
  stats->frees = 0;
  stats->resizes = 0;
  stats->instance = instance++;

  _HAssert( _Heap_Is_aligned( heap->page_size, CPU_ALIGNMENT ) );
  _HAssert( _Heap_Is_aligned( heap->min_block_size, page_size ) );
  _HAssert(
    _Heap_Is_aligned( _Heap_Alloc_area_of_block( first_block ), page_size )
  );
  _HAssert(
    _Heap_Is_aligned( _Heap_Alloc_area_of_block( last_block ), page_size )
  );

  return alloc_area_size;
}