static void* ms_get_empty_block (void) { char *p; int i; void *block, *empty, *next; retry: if (!empty_blocks) { p = mono_sgen_alloc_os_memory_aligned (MS_BLOCK_SIZE * MS_BLOCK_ALLOC_NUM, MS_BLOCK_SIZE, TRUE); for (i = 0; i < MS_BLOCK_ALLOC_NUM; ++i) { block = p; /* * We do the free list update one after the * other so that other threads can use the new * blocks as quickly as possible. */ do { empty = empty_blocks; *(void**)block = empty; } while (SGEN_CAS_PTR (&empty_blocks, block, empty) != empty); p += MS_BLOCK_SIZE; } SGEN_ATOMIC_ADD (num_empty_blocks, MS_BLOCK_ALLOC_NUM); stat_major_blocks_alloced += MS_BLOCK_ALLOC_NUM; } do { empty = empty_blocks; if (!empty) goto retry; block = empty; next = *(void**)block; } while (SGEN_CAS_PTR (&empty_blocks, next, empty) != empty); SGEN_ATOMIC_ADD (num_empty_blocks, -1); *(void**)block = NULL; g_assert (!((mword)block & (MS_BLOCK_SIZE - 1))); mono_sgen_update_heap_boundaries ((mword)block, (mword)block + MS_BLOCK_SIZE); return block; }
static inline char* par_alloc_for_promotion (char *obj, size_t objsize, gboolean has_references) { char *p; int age; age = get_object_age (obj); if (age >= promote_age) return major_collector.par_alloc_object (objsize, has_references); restart: p = age_alloc_buffers [age].next; LOAD_LOAD_FENCE; /* The read of ->next must happen before ->end */ if (G_LIKELY (p + objsize <= age_alloc_buffers [age].end)) { if (SGEN_CAS_PTR ((void*)&age_alloc_buffers [age].next, p + objsize, p) != p) goto restart; } else { p = par_alloc_for_promotion_slow_path (age, objsize); /* Have we failed to promote to the nursery, lets just evacuate it to old gen. */ if (!p) p = major_collector.par_alloc_object (objsize, has_references); } return p; }
static char* par_alloc_for_promotion_slow_path (int age, size_t objsize) { char *p; size_t allocated_size; size_t aligned_objsize = (size_t)align_up (objsize, SGEN_TO_SPACE_GRANULE_BITS); mono_mutex_lock (&par_alloc_buffer_refill_mutex); restart: p = age_alloc_buffers [age].next; if (G_LIKELY (p + objsize <= age_alloc_buffers [age].end)) { if (SGEN_CAS_PTR ((void*)&age_alloc_buffers [age].next, p + objsize, p) != p) goto restart; } else { /* Reclaim remaining space - if we OOMd the nursery nothing to see here. */ char *end = age_alloc_buffers [age].end; if (end) { do { p = age_alloc_buffers [age].next; } while (SGEN_CAS_PTR ((void*)&age_alloc_buffers [age].next, end, p) != p); sgen_clear_range (p, end); } /* By setting end to NULL we make sure no other thread can advance while we're updating.*/ age_alloc_buffers [age].end = NULL; STORE_STORE_FENCE; p = sgen_fragment_allocator_par_range_alloc ( &collector_allocator, MAX (aligned_objsize, AGE_ALLOC_BUFFER_DESIRED_SIZE), MAX (aligned_objsize, AGE_ALLOC_BUFFER_MIN_SIZE), &allocated_size); if (p) { set_age_in_range (p, p + allocated_size, age); age_alloc_buffers [age].next = p + objsize; STORE_STORE_FENCE; /* Next must arrive before the new value for next. */ age_alloc_buffers [age].end = p + allocated_size; } } mono_mutex_unlock (&par_alloc_buffer_refill_mutex); return p; }
static void ms_free_block (void *block) { void *empty; memset (block, 0, MS_BLOCK_SIZE); do { empty = empty_blocks; *(void**)block = empty; } while (SGEN_CAS_PTR (&empty_blocks, block, empty) != empty); SGEN_ATOMIC_ADD (num_empty_blocks, 1); }
static void ms_free_block (void *block) { void *empty; mono_sgen_release_space (MS_BLOCK_SIZE, SPACE_MAJOR); memset (block, 0, MS_BLOCK_SIZE); do { empty = empty_blocks; *(void**)block = empty; } while (SGEN_CAS_PTR (&empty_blocks, block, empty) != empty); SGEN_ATOMIC_ADD (num_empty_blocks, 1); }
static void* alloc_from_slot (SgenPinnedAllocator *alc, int slot) { SgenPinnedChunk *pchunk; size_t size = freelist_sizes [slot]; if (alc->delayed_free_lists [slot]) { void **p; do { p = alc->delayed_free_lists [slot]; } while (SGEN_CAS_PTR (&alc->delayed_free_lists [slot], *p, p) != p); memset (p, 0, size); return p; } restart: pchunk = alc->free_lists [slot]; if (pchunk) { void **p = pchunk->free_list [slot]; void *next; g_assert (p); next = *p; pchunk->free_list [slot] = next; if (!next) { alc->free_lists [slot] = pchunk->free_list_nexts [slot]; pchunk->free_list_nexts [slot] = NULL; } memset (p, 0, size); return p; } for (pchunk = alc->chunk_list; pchunk; pchunk = pchunk->block.next) { if (populate_chunk_page (alc, pchunk, slot)) goto restart; } pchunk = alloc_pinned_chunk (alc); /* FIXME: handle OOM */ if (pchunk->free_list [slot]) goto restart; if (!populate_chunk_page (alc, pchunk, slot)) g_assert_not_reached (); goto restart; }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; mword vtable_word = *(mword*)obj; MonoVTable *vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); mword objsize; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; gboolean has_references; void *destination; if (vtable_word & SGEN_FORWARDED_BIT) { *ptr = (void*)vt; return; } if (vtable_word & SGEN_PINNED_BIT) return; HEAVY_STAT (++stat_objects_copied_major); objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); has_references = SGEN_VTABLE_HAS_REFERENCES (vt); destination = major_alloc_object (objsize, has_references); if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) { gboolean was_marked; par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL); obj = destination; *ptr = obj; /* * FIXME: If we make major_alloc_object() give * us the block info, too, we won't have to * re-fetch it here. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_PAR_SET_MARK_BIT (was_marked, block, word, bit); } else { /* * FIXME: We have allocated destination, but * we cannot use it. Give it back to the * allocator. */ *(void**)destination = NULL; vtable_word = *(mword*)obj; g_assert (vtable_word & SGEN_FORWARDED_BIT); obj = (void*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); *ptr = obj; } } else { #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { block = MS_BLOCK_FOR_OBJ (obj); MS_PAR_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } else { if (vtable_word & SGEN_PINNED_BIT) return; binary_protocol_pin (obj, vt, mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (SGEN_CAS_PTR (obj, (void*)(vtable_word | SGEN_PINNED_BIT), (void*)vtable_word) == (void*)vtable_word) { if (SGEN_VTABLE_HAS_REFERENCES (vt)) GRAY_OBJECT_ENQUEUE (queue, obj); } else { g_assert (SGEN_OBJECT_IS_PINNED (obj)); } } } }