/* * We found a fragment of free memory in the nursery: memzero it and if * it is big enough, add it to the list of fragments that can be used for * allocation. */ static void add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end) { SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start, frag_end, frag_size); binary_protocol_empty (frag_start, frag_size); /* Not worth dealing with smaller fragments: need to tune */ if (frag_size >= SGEN_MAX_NURSERY_WASTE) { /* memsetting just the first chunk start is bound to provide better cache locality */ if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC) memset (frag_start, 0, frag_size); else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) memset (frag_start, 0xff, frag_size); #ifdef NALLOC_DEBUG /* XXX convert this into a flight record entry printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size); */ #endif sgen_fragment_allocator_add (allocator, frag_start, frag_end); fragment_total += frag_size; } else { /* Clear unused fragments, pinning depends on this */ sgen_clear_range (frag_start, frag_end); HEAVY_STAT (stat_wasted_bytes_small_areas += frag_size); } }
static void prepare_to_space (char *to_space_bitmap, int space_bitmap_size) { SgenFragment **previous, *frag; memset (to_space_bitmap, 0, space_bitmap_size); memset (age_alloc_buffers, 0, sizeof (age_alloc_buffers)); previous = &collector_allocator.alloc_head; for (frag = *previous; frag; frag = *previous) { char *start = align_up (frag->fragment_next, SGEN_TO_SPACE_GRANULE_BITS); char *end = align_down (frag->fragment_end, SGEN_TO_SPACE_GRANULE_BITS); /* Fragment is too small to be usable. */ if ((end - start) < SGEN_MAX_NURSERY_WASTE) { sgen_clear_range (frag->fragment_next, frag->fragment_end); frag->fragment_next = frag->fragment_end = frag->fragment_start; *previous = frag->next; continue; } /* We need to insert 3 phony objects so the fragments build step can correctly walk the nursery. */ /* Clean the fragment range. */ sgen_clear_range (start, end); /* We need a phony object in between the original fragment start and the effective one. */ if (start != frag->fragment_next) sgen_clear_range (frag->fragment_next, start); /* We need an phony object in between the new fragment end and the original fragment end. */ if (end != frag->fragment_end) sgen_clear_range (end, frag->fragment_end); frag->fragment_start = frag->fragment_next = start; frag->fragment_end = end; mark_bits_in_range (to_space_bitmap, start, end); previous = &frag->next; } }
static SgenFragment* build_fragments_get_exclude_head (void) { int i; for (i = 0; i < MAX_AGE; ++i) { /*If we OOM'd on the last collection ->end might be null while ->next not.*/ if (age_alloc_buffers [i].end) sgen_clear_range (age_alloc_buffers [i].next, age_alloc_buffers [i].end); } return collector_allocator.region_head; }
void sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator) { SgenFragment *frag; for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) { SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end); sgen_clear_range (frag->fragment_next, frag->fragment_end); #ifdef NALLOC_DEBUG add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS); #endif } }
static char* par_alloc_for_promotion_slow_path (int age, size_t objsize) { char *p; size_t allocated_size; size_t aligned_objsize = (size_t)align_up (objsize, SGEN_TO_SPACE_GRANULE_BITS); mono_mutex_lock (&par_alloc_buffer_refill_mutex); restart: p = age_alloc_buffers [age].next; if (G_LIKELY (p + objsize <= age_alloc_buffers [age].end)) { if (SGEN_CAS_PTR ((void*)&age_alloc_buffers [age].next, p + objsize, p) != p) goto restart; } else { /* Reclaim remaining space - if we OOMd the nursery nothing to see here. */ char *end = age_alloc_buffers [age].end; if (end) { do { p = age_alloc_buffers [age].next; } while (SGEN_CAS_PTR ((void*)&age_alloc_buffers [age].next, end, p) != p); sgen_clear_range (p, end); } /* By setting end to NULL we make sure no other thread can advance while we're updating.*/ age_alloc_buffers [age].end = NULL; STORE_STORE_FENCE; p = sgen_fragment_allocator_par_range_alloc ( &collector_allocator, MAX (aligned_objsize, AGE_ALLOC_BUFFER_DESIRED_SIZE), MAX (aligned_objsize, AGE_ALLOC_BUFFER_MIN_SIZE), &allocated_size); if (p) { set_age_in_range (p, p + allocated_size, age); age_alloc_buffers [age].next = p + objsize; STORE_STORE_FENCE; /* Next must arrive before the new value for next. */ age_alloc_buffers [age].end = p + allocated_size; } } mono_mutex_unlock (&par_alloc_buffer_refill_mutex); return p; }
static char* alloc_for_promotion_slow_path (int age, size_t objsize) { char *p; size_t allocated_size; size_t aligned_objsize = (size_t)align_up (objsize, SGEN_TO_SPACE_GRANULE_BITS); p = sgen_fragment_allocator_serial_range_alloc ( &collector_allocator, MAX (aligned_objsize, AGE_ALLOC_BUFFER_DESIRED_SIZE), MAX (aligned_objsize, AGE_ALLOC_BUFFER_MIN_SIZE), &allocated_size); if (p) { set_age_in_range (p, p + allocated_size, age); sgen_clear_range (age_alloc_buffers [age].next, age_alloc_buffers [age].end); age_alloc_buffers [age].next = p + objsize; age_alloc_buffers [age].end = p + allocated_size; } return p; }
static void* par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size) { char *p = frag->fragment_next; char *end = p + size; if (end > frag->fragment_end) return NULL; /* p = frag->fragment_next must happen before */ mono_memory_barrier (); if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p) return NULL; if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) { SgenFragment *next, **prev_ptr; /* * Before we clean the remaining nursery, we must claim the remaining space * as it could end up been used by the range allocator since it can end up * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE * when doing second chance allocation. */ if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) { sgen_clear_range (end, frag->fragment_end); HEAVY_STAT (stat_wasted_bytes_trailer += frag->fragment_end - end); #ifdef NALLOC_DEBUG add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING); #endif } prev_ptr = find_previous_pointer_fragment (allocator, frag); /*Use Michaels linked list remove*/ /*prev_ptr will be null if the fragment was removed concurrently */ while (prev_ptr) { next = frag->next; /*already deleted*/ if (!get_mark (next)) { /*frag->next read must happen before the first CAS*/ mono_memory_write_barrier (); /*Fail if the next node is removed concurrently and its CAS wins */ if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) { continue; } } /* The second CAS must happen after the first CAS or frag->next. */ mono_memory_write_barrier (); /* Fail if the previous node was deleted and its CAS wins */ if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) { prev_ptr = find_previous_pointer_fragment (allocator, frag); continue; } break; } } return p; }