Ejemplo n.º 1
0
/* Compact the buffer by trying both shrinking and moving.
 *
 * Try to move first. If unsuccesfull, try to shrink. If that was successful
 * try to move once more as there might be more room now.
 */
static bool
buflib_compact_and_shrink(struct buflib_context *ctx, unsigned shrink_hints)
{
    bool result = false;
    /* if something compacted before already there will be no further gain */
    if (!ctx->compact)
        result = buflib_compact(ctx);
    if (!result)
    {
        union buflib_data* this;
        for(this = ctx->buf_start; this < ctx->alloc_end; this += abs(this->val))
        {
            if (this->val > 0 && this[2].ops
                              && this[2].ops->shrink_callback)
            {
                int ret;
                int handle = ctx->handle_table - this[1].handle;
                char* data = this[1].handle->alloc;
                ret = this[2].ops->shrink_callback(handle, shrink_hints,
                                            data, (char*)(this+this->val)-data);
                result |= (ret == BUFLIB_CB_OK);
                /* this might have changed in the callback (if
                 * it shrinked from the top), get it again */
                this = handle_to_block(ctx, handle);
            }
        }
        /* shrinking was successful at least once, try compaction again */
        if (result)
            result |= buflib_compact(ctx);
    }

    return result;
}
Ejemplo n.º 2
0
/* Compact the buffer by trying both shrinking and moving.
 *
 * Try to move first. If unsuccesfull, try to shrink. If that was successful
 * try to move once more as there might be more room now.
 */
static bool
buflib_compact_and_shrink(struct buflib_context *ctx, unsigned shrink_hints)
{
    bool result = false;
    /* if something compacted before already there will be no further gain */
    if (!ctx->compact)
        result = buflib_compact(ctx);
    if (!result)
    {
        union buflib_data *this, *before;
        for(this = ctx->buf_start, before = this;
            this < ctx->alloc_end;
            before = this, this += abs(this->val))
        {
            if (this->val > 0 && this[2].ops
                              && this[2].ops->shrink_callback)
            {
                int ret;
                int handle = ctx->handle_table - this[1].handle;
                char* data = this[1].handle->alloc;
                bool last = (this+this->val) == ctx->alloc_end;
                unsigned pos_hints = shrink_hints & BUFLIB_SHRINK_POS_MASK;
                /* adjust what we ask for if there's free space in the front
                 * this isn't too unlikely assuming this block is
                 * shrinkable but not movable */
                if (pos_hints == BUFLIB_SHRINK_POS_FRONT
                    && before != this && before->val < 0)
                {   
                    size_t free_space = (-before->val) * sizeof(union buflib_data);
                    size_t wanted = shrink_hints & BUFLIB_SHRINK_SIZE_MASK;
                    if (wanted < free_space) /* no shrink needed? */
                        continue;
                    wanted -= free_space;
                    shrink_hints = pos_hints | wanted;
                }
                ret = this[2].ops->shrink_callback(handle, shrink_hints,
                                            data, (char*)(this+this->val)-data);
                result |= (ret == BUFLIB_CB_OK);
                /* 'this' might have changed in the callback (if it shrinked
                 * from the top or even freed the handle), get it again */
                this = handle_to_block(ctx, handle);
                /* The handle was possibly be freed in the callback,
                 * re-run the loop with the handle before */
                if (!this)
                    this = before;
                /* could also change with shrinking from back */
                else if (last)
                    ctx->alloc_end = this + this->val;
            }
        }
        /* shrinking was successful at least once, try compaction again */
        if (result)
            result |= buflib_compact(ctx);
    }

    return result;
}
Ejemplo n.º 3
0
/* Shift buffered items up by size bytes, or as many as possible if size == 0.
 * Set size to the number of bytes freed.
 */
void*
buflib_buffer_out(struct buflib_context *ctx, size_t *size)
{
    if (!ctx->compact)
        buflib_compact(ctx);
    size_t avail = ctx->last_handle - ctx->alloc_end;
    size_t avail_b = avail * sizeof(union buflib_data);
    if (*size && *size < avail_b)
    {
        avail = (*size + sizeof(union buflib_data) - 1)
            / sizeof(union buflib_data);
        avail_b = avail * sizeof(union buflib_data);
    }
    *size = avail_b;
    void *ret = ctx->buf_start;
    buflib_buffer_shift(ctx, avail);
    return ret;
}
Ejemplo n.º 4
0
/* Return the maximum allocatable contiguous memory in bytes */
size_t
buflib_allocatable(struct buflib_context* ctx)
{
    union buflib_data *this;
    size_t free_space = 0, max_free_space = 0;

    /* make sure buffer is as contiguous as possible  */
    if (!ctx->compact)
        buflib_compact(ctx);

    /* now look if there's free in holes */
    for(this = find_first_free(ctx); this < ctx->alloc_end; this += abs(this->val))
    {
        if (this->val < 0)
        {
            free_space += -this->val;
            continue;
        }
        /* an unmovable section resets the count as free space
         * can't be contigous */
        if (!IS_MOVABLE(this))
        {
            if (max_free_space < free_space)
                max_free_space = free_space;
            free_space = 0;
        }
    }

    /* select the best */
    max_free_space = MAX(max_free_space, free_space);
    max_free_space *= sizeof(union buflib_data);
    max_free_space = MAX(max_free_space, free_space_at_end(ctx));

    if (max_free_space > 0)
        return max_free_space;
    else
        return 0;
}
Ejemplo n.º 5
0
int
buflib_alloc_ex(struct buflib_context *ctx, size_t size, const char *name,
                struct buflib_callbacks *ops)
{
    /* busy wait if there's a thread owning the lock */
    while (ctx->handle_lock != 0) YIELD();

    union buflib_data *handle, *block;
    size_t name_len = name ? B_ALIGN_UP(strlen(name)+1) : 0;
    bool last;
    /* This really is assigned a value before use */
    int block_len;
    size += name_len;
    size = (size + sizeof(union buflib_data) - 1) /
           sizeof(union buflib_data)
           /* add 4 objects for alloc len, pointer to handle table entry and
            * name length, and the ops pointer */
           + 4;
handle_alloc:
    handle = handle_alloc(ctx);
    if (!handle)
    {
        /* If allocation has failed, and compaction has succeded, it may be
         * possible to get a handle by trying again.
         */
        if (!ctx->compact && buflib_compact(ctx))
            goto handle_alloc;
        else
        {   /* first try to shrink the alloc before the handle table
             * to make room for new handles */
            int handle = ctx->handle_table - ctx->last_handle;
            union buflib_data* last_block = handle_to_block(ctx, handle);
            struct buflib_callbacks* ops = last_block[2].ops;
            if (ops && ops->shrink_callback)
            {
                char *data = buflib_get_data(ctx, handle);
                unsigned hint = BUFLIB_SHRINK_POS_BACK | 10*sizeof(union buflib_data);
                if (ops->shrink_callback(handle, hint, data, 
                        (char*)(last_block+last_block->val)-data) == BUFLIB_CB_OK)
                {   /* retry one more time */
                    goto handle_alloc;
                }
            }
            return 0;
        }
    }

buffer_alloc:
    /* need to re-evaluate last before the loop because the last allocation
     * possibly made room in its front to fit this, so last would be wrong */
    last = false;
    for (block = ctx->first_free_block;;block += block_len)
    {
        /* If the last used block extends all the way to the handle table, the
         * block "after" it doesn't have a header. Because of this, it's easier
         * to always find the end of allocation by saving a pointer, and always
         * calculate the free space at the end by comparing it to the
         * last_handle pointer.
         */
        if(block == ctx->alloc_end)
        {
            last = true;
            block_len = ctx->last_handle - block;
            if ((size_t)block_len < size)
                block = NULL;
            break;
        }
        block_len = block->val;
        /* blocks with positive length are already allocated. */
        if(block_len > 0)
            continue;
        block_len = -block_len;
        /* The search is first-fit, any fragmentation this causes will be 
         * handled at compaction.
         */
        if ((size_t)block_len >= size)
            break;
    }
    if (!block)
    {
        /* Try compacting if allocation failed */
        if (buflib_compact_and_shrink(ctx,
                    (size*sizeof(union buflib_data))&BUFLIB_SHRINK_SIZE_MASK))
        {
            goto buffer_alloc;
        } else {
            handle->val=1;
            handle_free(ctx, handle);
            return 0;
        }
    }

    /* Set up the allocated block, by marking the size allocated, and storing
     * a pointer to the handle.
     */
    union buflib_data *name_len_slot;
    block->val = size;
    block[1].handle = handle;
    block[2].ops = ops ?: &default_callbacks;
    strcpy(block[3].name, name);
    name_len_slot = (union buflib_data*)B_ALIGN_UP(block[3].name + name_len);
    name_len_slot->val = 1 + name_len/sizeof(union buflib_data);
    handle->alloc = (char*)(name_len_slot + 1);
    /* If we have just taken the first free block, the next allocation search
     * can save some time by starting after this block.
     */
    if (block == ctx->first_free_block)
        ctx->first_free_block += size;
    block += size;
    /* alloc_end must be kept current if we're taking the last block. */
    if (last)
        ctx->alloc_end = block;
    /* Only free blocks *before* alloc_end have tagged length. */
    else if ((size_t)block_len > size)
        block->val = size - block_len;
    /* Return the handle index as a positive integer. */
    return ctx->handle_table - handle;
}