/* * obj is some object. If it's not in the major heap (i.e. if it's in * the nursery or LOS), return FALSE. Otherwise return whether it's * been marked or copied. */ static gboolean major_is_object_live (char *obj) { MSBlockInfo *block; int word, bit; #ifndef FIXED_HEAP mword objsize; #endif if (ptr_in_nursery (obj)) return FALSE; #ifdef FIXED_HEAP /* LOS */ if (!MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) return FALSE; #else objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); /* LOS */ if (objsize > SGEN_MAX_SMALL_OBJ_SIZE) return FALSE; #endif /* now we know it's in a major block */ block = MS_BLOCK_FOR_OBJ (obj); DEBUG (9, g_assert (!block->pinned)); MS_CALC_MARK_BIT (word, bit, obj); return MS_MARK_BIT (block, word, bit) ? TRUE : FALSE; }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; char *forwarded; if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } if (SGEN_OBJECT_IS_PINNED (obj)) return; HEAVY_STAT (++stat_objects_copied_major); obj = copy_object_no_checks (obj, queue); *ptr = obj; /* * FIXME: See comment for copy_object_no_checks(). If * we have that, we can let the allocation function * give us the block info, too, and we won't have to * re-fetch it. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_SET_MARK_BIT (block, word, bit); } else { #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else mword objsize; objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { block = MS_BLOCK_FOR_OBJ (obj); MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } else { if (SGEN_OBJECT_IS_PINNED (obj)) return; binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj)); SGEN_PIN_OBJECT (obj); /* FIXME: only enqueue if object has references */ GRAY_OBJECT_ENQUEUE (queue, obj); } } }
/* * We're not freeing the block if it's empty. We leave that work for * the next major collection. * * This is just called from the domain clearing code, which runs in a * single thread and has the GC lock, so we don't need an extra lock. */ static void free_object (char *obj, size_t size, gboolean pinned) { MSBlockInfo *block = MS_BLOCK_FOR_OBJ (obj); int word, bit; DEBUG (9, g_assert ((pinned && block->pinned) || (!pinned && !block->pinned))); DEBUG (9, g_assert (MS_OBJ_ALLOCED (obj, block))); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); if (!block->free_list) { MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, block->has_references); int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size); DEBUG (9, g_assert (!block->next_free)); block->next_free = free_blocks [size_index]; free_blocks [size_index] = block; } memset (obj, 0, size); *(void**)obj = block->free_list; block->free_list = (void**)obj; }
static void major_sweep (void) { int i; #ifdef FIXED_HEAP int j; #else MSBlockInfo **iter; #endif /* clear all the free lists */ for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) { MSBlockInfo **free_blocks = free_block_lists [i]; int j; for (j = 0; j < num_block_obj_sizes; ++j) free_blocks [j] = NULL; } /* traverse all blocks, free and zero unmarked objects */ #ifdef FIXED_HEAP for (j = 0; j < ms_heap_num_blocks; ++j) { MSBlockInfo *block = &block_infos [j]; #else iter = &all_blocks; while (*iter) { MSBlockInfo *block = *iter; #endif int count; gboolean have_live = FALSE; int obj_index; #ifdef FIXED_HEAP if (!block->used) continue; #endif count = MS_BLOCK_FREE / block->obj_size; block->free_list = NULL; for (obj_index = 0; obj_index < count; ++obj_index) { int word, bit; void *obj = MS_BLOCK_OBJ (block, obj_index); MS_CALC_MARK_BIT (word, bit, obj); if (MS_MARK_BIT (block, word, bit)) { DEBUG (9, g_assert (MS_OBJ_ALLOCED (obj, block))); have_live = TRUE; } else { /* an unmarked object */ if (MS_OBJ_ALLOCED (obj, block)) { binary_protocol_empty (obj, block->obj_size); memset (obj, 0, block->obj_size); } *(void**)obj = block->free_list; block->free_list = obj; } } /* reset mark bits */ memset (block->mark_words, 0, sizeof (mword) * MS_NUM_MARK_WORDS); /* * FIXME: reverse free list so that it's in address * order */ if (have_live) { #ifndef FIXED_HEAP iter = &block->next; #endif /* * If there are free slots in the block, add * the block to the corresponding free list. */ if (block->free_list) { MSBlockInfo **free_blocks = FREE_BLOCKS (block->pinned, block->has_references); int index = MS_BLOCK_OBJ_SIZE_INDEX (block->obj_size); block->next_free = free_blocks [index]; free_blocks [index] = block; } } else { /* * Blocks without live objects are removed from the * block list and freed. */ #ifdef FIXED_HEAP ms_free_block (block); #else *iter = block->next; ms_free_block (block->block); mono_sgen_free_internal (block, INTERNAL_MEM_MS_BLOCK_INFO); #endif --num_major_sections; } } } static int count_pinned_ref; static int count_pinned_nonref; static int count_nonpinned_ref; static int count_nonpinned_nonref; static void count_nonpinned_callback (char *obj, size_t size, void *data) { MonoVTable *vtable = (MonoVTable*)LOAD_VTABLE (obj); if (vtable->klass->has_references) ++count_nonpinned_ref; else ++count_nonpinned_nonref; }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; mword vtable_word = *(mword*)obj; MonoVTable *vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); mword objsize; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; gboolean has_references; void *destination; if (vtable_word & SGEN_FORWARDED_BIT) { *ptr = (void*)vt; return; } if (vtable_word & SGEN_PINNED_BIT) return; HEAVY_STAT (++stat_objects_copied_major); objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); has_references = SGEN_VTABLE_HAS_REFERENCES (vt); destination = major_alloc_object (objsize, has_references); if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) { gboolean was_marked; par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL); obj = destination; *ptr = obj; /* * FIXME: If we make major_alloc_object() give * us the block info, too, we won't have to * re-fetch it here. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_PAR_SET_MARK_BIT (was_marked, block, word, bit); } else { /* * FIXME: We have allocated destination, but * we cannot use it. Give it back to the * allocator. */ *(void**)destination = NULL; vtable_word = *(mword*)obj; g_assert (vtable_word & SGEN_FORWARDED_BIT); obj = (void*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); *ptr = obj; } } else { #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { block = MS_BLOCK_FOR_OBJ (obj); MS_PAR_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } else { if (vtable_word & SGEN_PINNED_BIT) return; binary_protocol_pin (obj, vt, mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (SGEN_CAS_PTR (obj, (void*)(vtable_word | SGEN_PINNED_BIT), (void*)vtable_word) == (void*)vtable_word) { if (SGEN_VTABLE_HAS_REFERENCES (vt)) GRAY_OBJECT_ENQUEUE (queue, obj); } else { g_assert (SGEN_OBJECT_IS_PINNED (obj)); } } } }
static void ms_sweep (void) { int i; MSBlockInfo **iter; /* statistics for evacuation */ int *slots_available = alloca (sizeof (int) * num_block_obj_sizes); int *slots_used = alloca (sizeof (int) * num_block_obj_sizes); int *num_blocks = alloca (sizeof (int) * num_block_obj_sizes); for (i = 0; i < num_block_obj_sizes; ++i) slots_available [i] = slots_used [i] = num_blocks [i] = 0; /* clear all the free lists */ for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) { MSBlockInfo **free_blocks = free_block_lists [i]; int j; for (j = 0; j < num_block_obj_sizes; ++j) free_blocks [j] = NULL; } /* traverse all blocks, free and zero unmarked objects */ iter = &all_blocks; while (*iter) { MSBlockInfo *block = *iter; int count; gboolean have_live = FALSE; gboolean has_pinned; int obj_index; int obj_size_index; obj_size_index = block->obj_size_index; has_pinned = block->has_pinned; block->has_pinned = block->pinned; block->is_to_space = FALSE; count = MS_BLOCK_FREE / block->obj_size; block->free_list = NULL; for (obj_index = 0; obj_index < count; ++obj_index) { int word, bit; void *obj = MS_BLOCK_OBJ (block, obj_index); MS_CALC_MARK_BIT (word, bit, obj); if (MS_MARK_BIT (block, word, bit)) { DEBUG (9, g_assert (MS_OBJ_ALLOCED (obj, block))); have_live = TRUE; if (!has_pinned) ++slots_used [obj_size_index]; } else { /* an unmarked object */ if (MS_OBJ_ALLOCED (obj, block)) { binary_protocol_empty (obj, block->obj_size); memset (obj, 0, block->obj_size); } *(void**)obj = block->free_list; block->free_list = obj; } } /* reset mark bits */ memset (block->mark_words, 0, sizeof (mword) * MS_NUM_MARK_WORDS); /* * FIXME: reverse free list so that it's in address * order */ if (have_live) { if (!has_pinned) { ++num_blocks [obj_size_index]; slots_available [obj_size_index] += count; } iter = &block->next; /* * If there are free slots in the block, add * the block to the corresponding free list. */ if (block->free_list) { MSBlockInfo **free_blocks = FREE_BLOCKS (block->pinned, block->has_references); int index = MS_BLOCK_OBJ_SIZE_INDEX (block->obj_size); block->next_free = free_blocks [index]; free_blocks [index] = block; } update_heap_boundaries_for_block (block); } else { /* * Blocks without live objects are removed from the * block list and freed. */ *iter = block->next; #ifdef FIXED_HEAP ms_free_block (block); #else ms_free_block (block->block); mono_sgen_free_internal (block, INTERNAL_MEM_MS_BLOCK_INFO); #endif --num_major_sections; } } for (i = 0; i < num_block_obj_sizes; ++i) { float usage = (float)slots_used [i] / (float)slots_available [i]; if (num_blocks [i] > 5 && usage < evacuation_threshold) { evacuate_block_obj_sizes [i] = TRUE; /* g_print ("slot size %d - %d of %d used\n", block_obj_sizes [i], slots_used [i], slots_available [i]); */ } else { evacuate_block_obj_sizes [i] = FALSE; } } have_swept = TRUE; }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; char *forwarded, *old_obj; if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } if (SGEN_OBJECT_IS_PINNED (obj)) return; HEAVY_STAT (++stat_objects_copied_major); do_copy_object: old_obj = obj; obj = copy_object_no_checks (obj, queue); if (G_UNLIKELY (old_obj == obj)) { /*If we fail to evacuate an object we just stop doing it for a given block size as all other will surely fail too.*/ if (!ptr_in_nursery (obj)) { int size_index; block = MS_BLOCK_FOR_OBJ (obj); size_index = block->obj_size_index; evacuate_block_obj_sizes [size_index] = FALSE; MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } return; } *ptr = obj; /* * FIXME: See comment for copy_object_no_checks(). If * we have that, we can let the allocation function * give us the block info, too, and we won't have to * re-fetch it. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_SET_MARK_BIT (block, word, bit); } else { char *forwarded; #ifndef FIXED_HEAP mword objsize; #endif if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { int size_index; block = MS_BLOCK_FOR_OBJ (obj); size_index = block->obj_size_index; if (!block->has_pinned && evacuate_block_obj_sizes [size_index]) { if (block->is_to_space) return; HEAVY_STAT (++stat_major_objects_evacuated); goto do_copy_object; } else { MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } } else { if (SGEN_OBJECT_IS_PINNED (obj)) return; binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj)); SGEN_PIN_OBJECT (obj); /* FIXME: only enqueue if object has references */ GRAY_OBJECT_ENQUEUE (queue, obj); } } }