예제 #1
0
void* Chunk::operator new(size_t requested_size, size_t length) {
  // requested_size is equal to sizeof(Chunk) but in order for the arena 
  // allocations to come out aligned as expected the size must be aligned
  // to expected arean alignment.
  // expect requested_size but if sizeof(Chunk) doesn't match isn't proper size we must align it.
  assert(ARENA_ALIGN(requested_size) == aligned_overhead_size(), "Bad alignment");
  size_t bytes = ARENA_ALIGN(requested_size) + length;
  switch (length) {
   case Chunk::size:        return ChunkPool::large_pool()->allocate(bytes);
   case Chunk::medium_size: return ChunkPool::medium_pool()->allocate(bytes);
   case Chunk::init_size:   return ChunkPool::small_pool()->allocate(bytes);
   default: {
     void *p =  os::malloc(bytes);
     if (p == NULL)
       vm_exit_out_of_memory(bytes, "Chunk::new");
     return p;
   }
  }
}
예제 #2
0
파일: Arena.cpp 프로젝트: 1833183060/wke
void InitArenaPool(ArenaPool* pool, const char*, unsigned size, unsigned align)
{
     if (align == 0)
         align = ARENA_DEFAULT_ALIGN;
     pool->mask = BITMASK(CeilingLog2(align));
     pool->first.next = NULL;
     pool->first.base = pool->first.avail = pool->first.limit =
         (uword)ARENA_ALIGN(&pool->first + 1);
     pool->current = &pool->first;
     pool->arenasize = size;                                  
}
예제 #3
0
void ArenaRelease(ArenaPool *pool, char *mark)
{
    Arena *a;

    for (a = pool->first.next; a; a = a->next) {
        if (UPTRDIFF(mark, a->base) < UPTRDIFF(a->avail, a->base)) {
            a->avail = (uword)ARENA_ALIGN(pool, mark);
            FreeArenaList(pool, a, false);
            return;
        }
    }
}
예제 #4
0
파일: arena.cpp 프로젝트: vasi/kdelibs
void InitArenaPool(ArenaPool *pool, const char* /*name*/,
                   unsigned int /*size*/, unsigned int align)
{
     unsigned int size = POOL_SIZE;
     if (align == 0)
         align = ARENA_DEFAULT_ALIGN;
     pool->mask = BITMASK(CeilingLog2(align));
     pool->first.next = NULL;
     pool->first.base = pool->first.avail = pool->first.limit =
         (uword)ARENA_ALIGN(pool, &pool->first + 1);
     pool->current = &pool->first;
     pool->arenasize = size;
     pool->largealloc = LARGE_ALLOCATION_CEIL(pool);
     pool->cumul = freelist_count*size;
}
예제 #5
0
// Reallocate storage in Arena.  
void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size) {
  assert(new_size >= 0, "bad size");
  if (new_size == 0) return NULL;
#ifdef ASSERT
  if (UseMallocOnly) {
    // always allocate a new object  (otherwise we'll free this one twice)
    char* copy = (char*)Amalloc(new_size);
    size_t n = MIN2(old_size, new_size);
    if (n > 0) memcpy(copy, old_ptr, n);
    return copy;
  }
#endif
  char *c_old = (char*)old_ptr; // Handy name
  // Stupid fast special case
  if( new_size <= old_size ) {  // Shrink in-place
    if( c_old+old_size == _hwm) // Attempt to free the excess bytes
      _hwm = c_old+new_size;    // Adjust hwm
    return c_old;
  }

  // make sure that new_size is legal
  size_t corrected_new_size = ARENA_ALIGN(new_size);

  // See if we can resize in-place
  if( (c_old+old_size == _hwm) &&       // Adjusting recent thing
      (c_old+corrected_new_size <= _max) ) {      // Still fits where it sits
    _hwm = c_old+corrected_new_size;      // Adjust hwm
    return c_old;               // Return old pointer
  }

  // Oops, got to relocate guts
  void *new_ptr = Amalloc(new_size);
  memcpy( new_ptr, c_old, old_size );
  Afree(c_old,old_size);        // Mostly done to keep stats accurate
  return new_ptr;
}
예제 #6
0
/*
 ** ArenaAllocate() -- allocate space from an arena pool
 ** 
 ** Description: ArenaAllocate() allocates space from an arena
 ** pool. 
 **
 ** First try to satisfy the request from arenas starting at
 ** pool->current.
 **
 ** If there is not enough space in the arena pool->current, try
 ** to claim an arena, on a first fit basis, from the global
 ** freelist (arena_freelist).
 ** 
 ** If no arena in arena_freelist is suitable, then try to
 ** allocate a new arena from the heap.
 **
 ** Returns: pointer to allocated space or NULL
 ** 
 */
void* ArenaAllocate(ArenaPool *pool, unsigned int nb)
{
    Arena *a;   
    char *rp;     /* returned pointer */

    ASSERT((nb & pool->mask) == 0);
    
    nb = (uword)ARENA_ALIGN(pool, nb); /* force alignment */

    /* attempt to allocate from arenas at pool->current */
    {
        a = pool->current;
        do {
            if ( a->avail +nb <= a->limit )  {
                pool->current = a;
                rp = (char *)a->avail;
                a->avail += nb;
                return rp;
            }
        } while( NULL != (a = a->next) );
    }

    /* attempt to allocate from arena_freelist */
    {
        Arena *p = NULL; /* previous pointer, for unlinking from freelist */

        for ( a = arena_freelist; a != NULL ; p = a, a = a->next ) {
            if ( a->base +nb <= a->limit )  {
                if ( p == NULL )
                    arena_freelist = a->next;
                else
                    p->next = a->next;
                a->avail = a->base;
                rp = (char *)a->avail;
                a->avail += nb;
                /* the newly allocated arena is linked after pool->current 
                 *  and becomes pool->current */
                a->next = pool->current->next;
                pool->current->next = a;
                pool->current = a;
                if ( 0 == pool->first.next )
                    pool->first.next = a;
                freelist_count--;
                return(rp);
            }
        }
    }

    /* attempt to allocate from the heap */ 
    {  
        unsigned int sz = max(pool->arenasize, nb);
        sz += sizeof *a + pool->mask;  /* header and alignment slop */
#ifdef DEBUG_ARENA_MALLOC
        i++;
        OWB_PRINTF("Malloc: %d\n", i);
#endif
        a = (Arena*)fastMalloc(sz);
        if (a)  {
            a->limit = (uword)a + sz;
            a->base = a->avail = (uword)ARENA_ALIGN(pool, a + 1);
            rp = (char *)a->avail;
            a->avail += nb;
            /* the newly allocated arena is linked after pool->current 
            *  and becomes pool->current */
            a->next = pool->current->next;
            pool->current->next = a;
            pool->current = a;
            if ( !pool->first.next )
                pool->first.next = a;
            return(rp);
       }
    }

    /* we got to here, and there's no memory to allocate */
    return(0);
} /* --- end ArenaAllocate() --- */
예제 #7
0
파일: arena.cpp 프로젝트: KDE/khtml
/*
 ** ArenaAllocate() -- allocate space from an arena pool
 **
 ** Description: ArenaAllocate() allocates space from an arena
 ** pool.
 **
 ** First try to satisfy the request from arenas starting at
 ** pool->current.
 **
 ** If there is not enough space in the arena pool->current, try
 ** to claim an arena, on a first fit basis, from the global
 ** freelist (arena_freelist).
 **
 ** If no arena in arena_freelist is suitable, then try to
 ** allocate a new arena from the heap.
 **
 ** Returns: pointer to allocated space or NULL
 **
 */
void *ArenaAllocate(ArenaPool *pool, unsigned int nb)
{
    Arena *a;
    char *rp;     /* returned pointer */

#ifdef DEBUG_ARENA_MALLOC
    assert((nb & pool->mask) == 0);
#endif

    nb = (uword)ARENA_ALIGN(pool, nb); /* force alignment */

    /* attempt to allocate from arenas at pool->current */
    {
        a = pool->current;
        do {
            if (a->avail + nb <= a->limit)  {
                pool->current = a;
                rp = (char *)a->avail;
                a->avail += nb;
                VALGRIND_MEMPOOL_ALLOC(a->base, rp, nb);
                return rp;
            }
        } while (NULL != (a = a->next));
    }

    /* attempt to allocate from arena_freelist */
    {
        Arena *p; /* previous pointer, for unlinking from freelist */

        for (a = p = arena_freelist; a != NULL; p = a, a = a->next) {
            if (a->base + nb <= a->limit)  {
                if (p == arena_freelist) {
                    arena_freelist = a->next;
                } else {
                    p->next = a->next;
                }
                a->avail = a->base;
                rp = (char *)a->avail;
                a->avail += nb;
                VALGRIND_MEMPOOL_ALLOC(a->base, rp, nb);
                /* the newly allocated arena is linked after pool->current
                 *  and becomes pool->current */
                a->next = pool->current->next;
                pool->current->next = a;
                pool->current = a;
                if (0 == pool->first.next) {
                    pool->first.next = a;
                }
                freelist_count--;
                return (rp);
            }
        }
    }

    /* attempt to allocate from the heap */
    {
        unsigned int sz;
#if HAVE_MMAP
        if (pool->cumul > pool->largealloc) {
            // High memory pressure. Switch to a fractional allocation strategy
            // so that malloc gets a chance to successfully trim us down when it's over.
            sz = qMin(pool->cumul / 12, MAX_DISCRETE_ALLOCATION(pool));
#ifdef DEBUG_ARENA_MALLOC
            printf("allocating %d bytes (fractional strategy)\n", sz);
#endif
        } else
#endif
            sz = pool->arenasize > nb ? pool->arenasize : nb;
        sz += sizeof * a + pool->mask; /* header and alignment slop */
        pool->cumul += sz;
#ifdef DEBUG_ARENA_MALLOC
        i++;
        printf("Malloc: %d\n", i);
#endif
        a = (Arena *)malloc(sz);
        if (a)  {
            a->limit = (uword)a + sz;
            a->base = a->avail = (uword)ARENA_ALIGN(pool, a + 1);
            VALGRIND_CREATE_MEMPOOL(a->base, 0, 0);
            rp = (char *)a->avail;
            a->avail += nb;
            VALGRIND_MEMPOOL_ALLOC(a->base, rp, nb);

            /* the newly allocated arena is linked after pool->current
            *  and becomes pool->current */
            a->next = pool->current->next;
            pool->current->next = a;
            pool->current = a;
            if (!pool->first.next) {
                pool->first.next = a;
            }
            return (rp);
        }
    }

    /* we got to here, and there's no memory to allocate */
    return (0);
} /* --- end ArenaAllocate() --- */