Exemple #1
0
  static bool _Heap_Protection_determine_block_free(
    Heap_Control *heap,
    Heap_Block *block
  )
  {
    bool do_free = true;
    Heap_Block *const next = block->Protection_begin.next_delayed_free_block;

    /*
     * Sometimes after a free the allocated area is still in use.  An example
     * is the task stack of a thread that deletes itself.  The thread dispatch
     * disable level is a way to detect this use case.
     */
    if ( _Thread_Dispatch_is_enabled() ) {
      if ( next == NULL ) {
        _Heap_Protection_delay_block_free( heap, block );
        do_free = false;
      } else if ( next == HEAP_PROTECTION_OBOLUS ) {
        _Heap_Protection_check_free_block( heap, block );
      } else {
        _Heap_Protection_block_error( heap, block );
      }
    } else if ( next == NULL ) {
      /*
       * This is a hack to prevent heavy workspace fragmentation which would
       * lead to test suite failures.
       */
      _Heap_Protection_free_all_delayed_blocks( heap );
    }

    return do_free;
  }
Exemple #2
0
  static void _Heap_Protection_check_free_block(
    Heap_Control *heap,
    Heap_Block *block
  )
  {
    uintptr_t *const pattern_begin = (uintptr_t *)
      _Heap_Alloc_area_of_block( block );
    uintptr_t *const pattern_end = (uintptr_t *)
      ((uintptr_t) block + _Heap_Block_size( block ) + HEAP_ALLOC_BONUS);
    uintptr_t *current = NULL;

    for ( current = pattern_begin; current != pattern_end; ++current ) {
      if ( *current != HEAP_FREE_PATTERN ) {
        _Heap_Protection_block_error( heap, block );
        break;
      }
    }
  }
Exemple #3
0
  static bool _Heap_Protection_free_delayed_blocks(
    Heap_Control *heap,
    uintptr_t alloc_begin
  )
  {
    bool search_again = false;
    uintptr_t const blocks_to_free_count =
      (heap->Protection.delayed_free_block_count
         + heap->Protection.delayed_free_fraction - 1)
      / heap->Protection.delayed_free_fraction;

    if ( alloc_begin == 0 && blocks_to_free_count > 0 ) {
      Heap_Block *block_to_free = heap->Protection.first_delayed_free_block;
      uintptr_t count = 0;

      for ( count = 0; count < blocks_to_free_count; ++count ) {
        Heap_Block *next_block_to_free;

        if ( !_Heap_Is_block_in_heap( heap, block_to_free ) ) {
          _Heap_Protection_block_error( heap, block_to_free );
        }

        next_block_to_free =
          block_to_free->Protection_begin.next_delayed_free_block;
        block_to_free->Protection_begin.next_delayed_free_block =
          HEAP_PROTECTION_OBOLUS;

        _Heap_Free(
          heap,
          (void *) _Heap_Alloc_area_of_block( block_to_free )
        );

        block_to_free = next_block_to_free;
      }

      heap->Protection.delayed_free_block_count -= blocks_to_free_count;
      heap->Protection.first_delayed_free_block = block_to_free;

      search_again = true;
    }

    return search_again;
  }
Exemple #4
0
bool _Heap_Free( Heap_Control *heap, void *alloc_begin_ptr )
{
  Heap_Statistics *const stats = &heap->stats;
  uintptr_t alloc_begin;
  Heap_Block *block;
  Heap_Block *next_block = NULL;
  uintptr_t block_size = 0;
  uintptr_t next_block_size = 0;
  bool next_is_free = false;

  /*
   * If NULL return true so a free on NULL is considered a valid release. This
   * is a special case that could be handled by the in heap check how-ever that
   * would result in false being returned which is wrong.
   */
  if ( alloc_begin_ptr == NULL ) {
    return true;
  }

  alloc_begin = (uintptr_t) alloc_begin_ptr;
  block = _Heap_Block_of_alloc_area( alloc_begin, heap->page_size );

  if ( !_Heap_Is_block_in_heap( heap, block ) ) {
    return false;
  }

  _Heap_Protection_block_check( heap, block );

  block_size = _Heap_Block_size( block );
  next_block = _Heap_Block_at( block, block_size );

  if ( !_Heap_Is_block_in_heap( heap, next_block ) ) {
    return false;
  }

  _Heap_Protection_block_check( heap, next_block );

  if ( !_Heap_Is_prev_used( next_block ) ) {
    _Heap_Protection_block_error( heap, block );
    return false;
  }

  if ( !_Heap_Protection_determine_block_free( heap, block ) ) {
    return true;
  }

  next_block_size = _Heap_Block_size( next_block );
  next_is_free = next_block != heap->last_block
    && !_Heap_Is_prev_used( _Heap_Block_at( next_block, next_block_size ));

  if ( !_Heap_Is_prev_used( block ) ) {
    uintptr_t const prev_size = block->prev_size;
    Heap_Block * const prev_block = _Heap_Block_at( block, -prev_size );

    if ( !_Heap_Is_block_in_heap( heap, prev_block ) ) {
      _HAssert( false );
      return( false );
    }

    /* As we always coalesce free blocks, the block that preceedes prev_block
       must have been used. */
    if ( !_Heap_Is_prev_used ( prev_block) ) {
      _HAssert( false );
      return( false );
    }

    if ( next_is_free ) {       /* coalesce both */
      uintptr_t const size = block_size + prev_size + next_block_size;
      _Heap_Free_list_remove( next_block );
      stats->free_blocks -= 1;
      prev_block->size_and_flag = size | HEAP_PREV_BLOCK_USED;
      next_block = _Heap_Block_at( prev_block, size );
      _HAssert(!_Heap_Is_prev_used( next_block));
      next_block->prev_size = size;
    } else {                      /* coalesce prev */
      uintptr_t const size = block_size + prev_size;
      prev_block->size_and_flag = size | HEAP_PREV_BLOCK_USED;
      next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED;
      next_block->prev_size = size;
    }
  } else if ( next_is_free ) {    /* coalesce next */
    uintptr_t const size = block_size + next_block_size;
    _Heap_Free_list_replace( next_block, block );
    block->size_and_flag = size | HEAP_PREV_BLOCK_USED;
    next_block  = _Heap_Block_at( block, size );
    next_block->prev_size = size;
  } else {                        /* no coalesce */
    /* Add 'block' to the head of the free blocks list as it tends to
       produce less fragmentation than adding to the tail. */
    _Heap_Free_list_insert_after( _Heap_Free_list_head( heap), block );
    block->size_and_flag = block_size | HEAP_PREV_BLOCK_USED;
    next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED;
    next_block->prev_size = block_size;

    /* Statistics */
    ++stats->free_blocks;
    if ( stats->max_free_blocks < stats->free_blocks ) {
      stats->max_free_blocks = stats->free_blocks;
    }
  }

  /* Statistics */
  --stats->used_blocks;
  ++stats->frees;
  stats->free_size += block_size;

  return( true );
}