Esempio n. 1
0
/* Self-locked. This function is not multiple-caller safe */
static void memblock_replace_import(pa_memblock *b) {
    pa_memimport_segment *segment;
    pa_memimport *import;

    pa_assert(b);
    pa_assert(b->type == PA_MEMBLOCK_IMPORTED);

    pa_assert(pa_atomic_load(&b->pool->stat.n_imported) > 0);
    pa_assert(pa_atomic_load(&b->pool->stat.imported_size) >= (int) b->length);
    pa_atomic_dec(&b->pool->stat.n_imported);
    pa_atomic_sub(&b->pool->stat.imported_size, (int) b->length);

    pa_assert_se(segment = b->per_type.imported.segment);
    pa_assert_se(import = segment->import);

    pa_mutex_lock(import->mutex);

    pa_assert_se(pa_hashmap_remove(import->blocks, PA_UINT32_TO_PTR(b->per_type.imported.id)));

    memblock_make_local(b);

    pa_assert(segment->n_blocks >= 1);
    if (-- segment->n_blocks <= 0)
        segment_detach(segment);

    pa_mutex_unlock(import->mutex);
}
Esempio n. 2
0
static void memblock_free(pa_memblock *b) {
    pa_assert(b);

    pa_assert(pa_atomic_load(&b->n_acquired) == 0);

    stat_remove(b);

    switch (b->type) {
        case PA_MEMBLOCK_USER :
            pa_assert(b->per_type.user.free_cb);
            b->per_type.user.free_cb(pa_atomic_ptr_load(&b->data));

            /* Fall through */

        case PA_MEMBLOCK_FIXED:
            if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                pa_xfree(b);

            break;

        case PA_MEMBLOCK_APPENDED:

            /* We could attach it to unused_memblocks, but that would
             * probably waste some considerable amount of memory */
            pa_xfree(b);
            break;

        case PA_MEMBLOCK_IMPORTED: {
            pa_memimport_segment *segment;
            pa_memimport *import;

            /* FIXME! This should be implemented lock-free */

            pa_assert_se(segment = b->per_type.imported.segment);
            pa_assert_se(import = segment->import);

            pa_mutex_lock(import->mutex);

            pa_assert_se(pa_hashmap_remove(import->blocks, PA_UINT32_TO_PTR(b->per_type.imported.id)));

            pa_assert(segment->n_blocks >= 1);
            if (-- segment->n_blocks <= 0)
                segment_detach(segment);

            pa_mutex_unlock(import->mutex);

            import->release_cb(import, b->per_type.imported.id, import->userdata);

            if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                pa_xfree(b);

            break;
        }

        case PA_MEMBLOCK_POOL_EXTERNAL:
        case PA_MEMBLOCK_POOL: {
            struct mempool_slot *slot;
            bool call_free;

            pa_assert_se(slot = mempool_slot_by_ptr(b->pool, pa_atomic_ptr_load(&b->data)));

            call_free = b->type == PA_MEMBLOCK_POOL_EXTERNAL;

/* #ifdef HAVE_VALGRIND_MEMCHECK_H */
/*             if (PA_UNLIKELY(pa_in_valgrind())) { */
/*                 VALGRIND_FREELIKE_BLOCK(slot, b->pool->block_size); */
/*             } */
/* #endif */

            /* The free list dimensions should easily allow all slots
             * to fit in, hence try harder if pushing this slot into
             * the free list fails */
            while (pa_flist_push(b->pool->free_slots, slot) < 0)
                ;

            if (call_free)
                if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                    pa_xfree(b);

            break;
        }

        case PA_MEMBLOCK_TYPE_MAX:
        default:
            pa_assert_not_reached();
    }
}
Esempio n. 3
0
static void memblock_free(pa_memblock *b) {
    pa_assert(b);

    pa_assert(pa_atomic_load(&b->n_acquired) == 0);

    stat_remove(b);

    switch (b->type) {
        case PA_MEMBLOCK_USER :
            pa_assert(b->per_type.user.free_cb);
            b->per_type.user.free_cb(pa_atomic_ptr_load(&b->data));

            /* Fall through */

        case PA_MEMBLOCK_FIXED:
        case PA_MEMBLOCK_APPENDED :
            if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                pa_xfree(b);

            break;

        case PA_MEMBLOCK_IMPORTED : {
            pa_memimport_segment *segment;
            pa_memimport *import;

            /* FIXME! This should be implemented lock-free */

            segment = b->per_type.imported.segment;
            pa_assert(segment);
            import = segment->import;
            pa_assert(import);

            pa_mutex_lock(import->mutex);
            pa_hashmap_remove(import->blocks, PA_UINT32_TO_PTR(b->per_type.imported.id));
            if (-- segment->n_blocks <= 0)
                segment_detach(segment);

            pa_mutex_unlock(import->mutex);

            import->release_cb(import, b->per_type.imported.id, import->userdata);

            if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                pa_xfree(b);
            break;
        }

        case PA_MEMBLOCK_POOL_EXTERNAL:
        case PA_MEMBLOCK_POOL: {
            struct mempool_slot *slot;
            int call_free;

            slot = mempool_slot_by_ptr(b->pool, pa_atomic_ptr_load(&b->data));
            pa_assert(slot);

            call_free = b->type == PA_MEMBLOCK_POOL_EXTERNAL;

            /* The free list dimensions should easily allow all slots
             * to fit in, hence try harder if pushing this slot into
             * the free list fails */
            while (pa_flist_push(b->pool->free_slots, slot) < 0)
                ;

            if (call_free)
                if (pa_flist_push(PA_STATIC_FLIST_GET(unused_memblocks), b) < 0)
                    pa_xfree(b);

            break;
        }

        case PA_MEMBLOCK_TYPE_MAX:
        default:
            pa_assert_not_reached();
    }
}