static void list_remove_empty_desc (MonoLockFreeAllocSizeClass *sc) { int num_non_empty = 0; for (;;) { Descriptor *desc = (Descriptor*) mono_lock_free_queue_dequeue (&sc->partial); if (!desc) return; /* * We don't need to read atomically because we're the * only thread that references this descriptor. */ if (desc->anchor.data.state == STATE_EMPTY) { desc_retire (desc); } else { g_assert (desc->heap->sc == sc); mono_thread_hazardous_try_free (desc, desc_put_partial); if (++num_non_empty >= 2) return; } } }
static gpointer alloc_from_new_sb (MonoLockFreeAllocator *heap) { unsigned int slot_size, block_size, count, i; Descriptor *desc = desc_alloc (); slot_size = desc->slot_size = heap->sc->slot_size; block_size = desc->block_size = heap->sc->block_size; count = LOCK_FREE_ALLOC_SB_USABLE_SIZE (block_size) / slot_size; desc->heap = heap; /* * Setting avail to 1 because 0 is the block we're allocating * right away. */ desc->anchor.data.avail = 1; desc->slot_size = heap->sc->slot_size; desc->max_count = count; desc->anchor.data.count = desc->max_count - 1; desc->anchor.data.state = STATE_PARTIAL; desc->sb = alloc_sb (desc); /* Organize blocks into linked list. */ for (i = 1; i < count - 1; ++i) *(unsigned int*)((char*)desc->sb + i * slot_size) = i + 1; mono_memory_write_barrier (); /* Make it active or free it again. */ if (InterlockedCompareExchangePointer ((gpointer * volatile)&heap->active, desc, NULL) == NULL) { return desc->sb; } else { desc->anchor.data.state = STATE_EMPTY; desc_retire (desc); return NULL; } }
void mono_lock_free_free (gpointer ptr, size_t block_size) { Anchor old_anchor, new_anchor; Descriptor *desc; gpointer sb; MonoLockFreeAllocator *heap = NULL; desc = *(Descriptor**) sb_header_for_addr (ptr, block_size); g_assert (block_size == desc->block_size); sb = desc->sb; do { new_anchor.value = old_anchor.value = ((volatile Anchor*)&desc->anchor)->value; *(unsigned int*)ptr = old_anchor.data.avail; new_anchor.data.avail = ((char*)ptr - (char*)sb) / desc->slot_size; g_assert (new_anchor.data.avail < LOCK_FREE_ALLOC_SB_USABLE_SIZE (block_size) / desc->slot_size); if (old_anchor.data.state == STATE_FULL) new_anchor.data.state = STATE_PARTIAL; if (++new_anchor.data.count == desc->max_count) { heap = desc->heap; new_anchor.data.state = STATE_EMPTY; } } while (!set_anchor (desc, old_anchor, new_anchor)); if (new_anchor.data.state == STATE_EMPTY) { g_assert (old_anchor.data.state != STATE_EMPTY); if (mono_atomic_cas_ptr ((volatile gpointer *)&heap->active, NULL, desc) == desc) { /* * We own desc, check if it's still empty, in which case we retire it. * If it's partial we need to put it back either on the active slot or * on the partial list. */ if (desc->anchor.data.state == STATE_EMPTY) { desc_retire (desc); } else if (desc->anchor.data.state == STATE_PARTIAL) { if (mono_atomic_cas_ptr ((volatile gpointer *)&heap->active, desc, NULL) != NULL) heap_put_partial (desc); } } else { /* * Somebody else must free it, so we do some * freeing for others. */ list_remove_empty_desc (heap->sc); } } else if (old_anchor.data.state == STATE_FULL) { /* * Nobody owned it, now we do, so we need to give it * back. */ g_assert (new_anchor.data.state == STATE_PARTIAL); if (mono_atomic_cas_ptr ((volatile gpointer *)&desc->heap->active, desc, NULL) != NULL) heap_put_partial (desc); } }