static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; char *forwarded; if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } if (SGEN_OBJECT_IS_PINNED (obj)) return; HEAVY_STAT (++stat_objects_copied_major); obj = copy_object_no_checks (obj, queue); *ptr = obj; /* * FIXME: See comment for copy_object_no_checks(). If * we have that, we can let the allocation function * give us the block info, too, and we won't have to * re-fetch it. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_SET_MARK_BIT (block, word, bit); } else { #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else mword objsize; objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { block = MS_BLOCK_FOR_OBJ (obj); MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } else { if (SGEN_OBJECT_IS_PINNED (obj)) return; binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj)); SGEN_PIN_OBJECT (obj); /* FIXME: only enqueue if object has references */ GRAY_OBJECT_ENQUEUE (queue, obj); } } }
static void pin_pinned_object_callback (void *addr, size_t slot_size, SgenGrayQueue *queue) { binary_protocol_pin (addr, (gpointer)SGEN_LOAD_VTABLE (addr), mono_sgen_safe_object_get_size ((MonoObject*)addr)); if (!SGEN_OBJECT_IS_PINNED (addr)) mono_sgen_pin_stats_register_object ((char*) addr, mono_sgen_safe_object_get_size ((MonoObject*) addr)); SGEN_PIN_OBJECT (addr); GRAY_OBJECT_ENQUEUE (queue, addr); DEBUG (6, fprintf (gc_debug_file, "Marked pinned object %p (%s) from roots\n", addr, mono_sgen_safe_name (addr))); }
/* * obj is some object. If it's not in the major heap (i.e. if it's in * the nursery or LOS), return FALSE. Otherwise return whether it's * been marked or copied. */ static gboolean major_is_object_live (char *obj) { MSBlockInfo *block; int word, bit; #ifndef FIXED_HEAP mword objsize; #endif if (ptr_in_nursery (obj)) return FALSE; #ifdef FIXED_HEAP /* LOS */ if (!MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) return FALSE; #else objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); /* LOS */ if (objsize > SGEN_MAX_SMALL_OBJ_SIZE) return FALSE; #endif /* now we know it's in a major block */ block = MS_BLOCK_FOR_OBJ (obj); DEBUG (9, g_assert (!block->pinned)); MS_CALC_MARK_BIT (word, bit, obj); return MS_MARK_BIT (block, word, bit) ? TRUE : FALSE; }
void mono_sgen_nursery_allocator_prepare_for_pinning (void) { Fragment *frag; /* * The code below starts the search from an entry in scan_starts, which might point into a nursery * fragment containing random data. Clearing the nursery fragments takes a lot of time, and searching * though them too, so lay arrays at each location inside a fragment where a search can start: * - scan_locations[i] * - start_nursery * - the start of each fragment (the last_obj + last_obj case) * The third encompasses the first two, since scan_locations [i] can't point inside a nursery fragment. */ for (frag = unmask (nursery_fragments); frag; frag = unmask (frag->next)) { MonoArray *o; g_assert (frag->fragment_end - frag->fragment_next >= sizeof (MonoArray)); o = (MonoArray*)frag->fragment_next; memset (o, 0, sizeof (MonoArray)); g_assert (mono_sgen_get_array_fill_vtable ()); o->obj.vtable = mono_sgen_get_array_fill_vtable (); /* Mark this as not a real object */ o->obj.synchronisation = GINT_TO_POINTER (-1); o->max_length = (frag->fragment_end - frag->fragment_next) - sizeof (MonoArray); g_assert (frag->fragment_next + mono_sgen_safe_object_get_size ((MonoObject*)o) == frag->fragment_end); } }
mword mono_sgen_build_nursery_fragments (GCMemSection *nursery_section, void **start, int num_entries) { char *frag_start, *frag_end; size_t frag_size; int i; #ifdef NALLOC_DEBUG reset_alloc_records (); #endif while (unmask (nursery_fragments)) { Fragment *nf = unmask (nursery_fragments); Fragment *next = unmask (nf->next); nf->next_free = fragment_freelist; fragment_freelist = nf; nursery_fragments = next; } frag_start = nursery_start; fragment_total = 0; /* clear scan starts */ memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer)); for (i = 0; i < num_entries; ++i) { frag_end = start [i]; /* remove the pin bit from pinned objects */ SGEN_UNPIN_OBJECT (frag_end); nursery_section->scan_starts [((char*)frag_end - (char*)nursery_section->data)/SGEN_SCAN_START_SIZE] = frag_end; frag_size = frag_end - frag_start; if (frag_size) add_nursery_frag (frag_size, frag_start, frag_end); frag_size = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)start [i])); #ifdef NALLOC_DEBUG add_alloc_record (start [i], frag_size, PINNING); #endif frag_start = (char*)start [i] + frag_size; } nursery_last_pinned_end = frag_start; frag_end = nursery_end; frag_size = frag_end - frag_start; if (frag_size) add_nursery_frag (frag_size, frag_start, frag_end); if (!unmask (nursery_fragments)) { DEBUG (1, fprintf (gc_debug_file, "Nursery fully pinned (%d)\n", num_entries)); for (i = 0; i < num_entries; ++i) { DEBUG (3, fprintf (gc_debug_file, "Bastard pinning obj %p (%s), size: %d\n", start [i], mono_sgen_safe_name (start [i]), mono_sgen_safe_object_get_size (start [i]))); } } return fragment_total; }
static mword* find_in_remset_loc (mword *p, char *addr, gboolean *found) { void **ptr; mword count, desc; size_t skip_size; switch ((*p) & REMSET_TYPE_MASK) { case REMSET_LOCATION: if (*p == (mword)addr) *found = TRUE; return p + 1; case REMSET_RANGE: ptr = (void**)(*p & ~REMSET_TYPE_MASK); count = p [1]; if ((void**)addr >= ptr && (void**)addr < ptr + count) *found = TRUE; return p + 2; case REMSET_OBJECT: ptr = (void**)(*p & ~REMSET_TYPE_MASK); count = mono_sgen_safe_object_get_size ((MonoObject*)ptr); count = SGEN_ALIGN_UP (count); count /= sizeof (mword); if ((void**)addr >= ptr && (void**)addr < ptr + count) *found = TRUE; return p + 1; case REMSET_VTYPE: ptr = (void**)(*p & ~REMSET_TYPE_MASK); desc = p [1]; count = p [2]; skip_size = p [3]; /* The descriptor includes the size of MonoObject */ skip_size -= sizeof (MonoObject); skip_size *= count; if ((void**)addr >= ptr && (void**)addr < ptr + (skip_size / sizeof (gpointer))) *found = TRUE; return p + 4; default: g_assert_not_reached (); } return NULL; }
static gboolean major_is_object_live (char *obj) { mword objsize; /* nursery */ if (ptr_in_nursery (obj)) return FALSE; objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); /* LOS */ if (objsize > SGEN_MAX_SMALL_OBJ_SIZE) return FALSE; /* pinned chunk */ if (obj_is_from_pinned_alloc (obj)) return FALSE; /* now we know it's in a major heap section */ return MAJOR_SECTION_FOR_OBJECT (obj)->is_to_space; }
/* FIXME: later reduce code duplication here with build_nursery_fragments(). * We don't keep track of section fragments for non-nursery sections yet, so * just memset to 0. */ static void build_section_fragments (GCMemSection *section) { int i; char *frag_start, *frag_end; size_t frag_size; /* clear scan starts */ memset (section->scan_starts, 0, section->num_scan_start * sizeof (gpointer)); frag_start = section->data; section->next_data = section->data; for (i = 0; i < section->pin_queue_num_entries; ++i) { frag_end = section->pin_queue_start [i]; /* remove the pin bit from pinned objects */ SGEN_UNPIN_OBJECT (frag_end); if (frag_end >= section->data + section->size) { frag_end = section->data + section->size; } else { section->scan_starts [((char*)frag_end - (char*)section->data)/SGEN_SCAN_START_SIZE] = frag_end; } frag_size = frag_end - frag_start; if (frag_size) { binary_protocol_empty (frag_start, frag_size); memset (frag_start, 0, frag_size); } frag_size = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)section->pin_queue_start [i])); frag_start = (char*)section->pin_queue_start [i] + frag_size; section->next_data = MAX (section->next_data, frag_start); } frag_end = section->end_data; frag_size = frag_end - frag_start; if (frag_size) { binary_protocol_empty (frag_start, frag_size); memset (frag_start, 0, frag_size); } }
static mword* handle_remset (mword *p, void *start_nursery, void *end_nursery, gboolean global, SgenGrayQueue *queue) { void **ptr; mword count; mword desc; if (global) HEAVY_STAT (++stat_global_remsets_processed); else HEAVY_STAT (++stat_local_remsets_processed); /* FIXME: exclude stack locations */ switch ((*p) & REMSET_TYPE_MASK) { case REMSET_LOCATION: ptr = (void**)(*p); //__builtin_prefetch (ptr); if (((void*)ptr < start_nursery || (void*)ptr >= end_nursery)) { gpointer old = *ptr; major_collector.copy_object (ptr, queue); DEBUG (9, fprintf (gc_debug_file, "Overwrote remset at %p with %p\n", ptr, *ptr)); if (old) binary_protocol_ptr_update (ptr, old, *ptr, (gpointer)LOAD_VTABLE (*ptr), mono_sgen_safe_object_get_size (*ptr)); if (!global && *ptr >= start_nursery && *ptr < end_nursery) { /* * If the object is pinned, each reference to it from nonpinned objects * becomes part of the global remset, which can grow very large. */ DEBUG (9, fprintf (gc_debug_file, "Add to global remset because of pinning %p (%p %s)\n", ptr, *ptr, mono_sgen_safe_name (*ptr))); mono_sgen_add_to_global_remset (ptr); } } else { DEBUG (9, fprintf (gc_debug_file, "Skipping remset at %p holding %p\n", ptr, *ptr)); } return p + 1; case REMSET_RANGE: ptr = (void**)(*p & ~REMSET_TYPE_MASK); if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery)) return p + 2; count = p [1]; while (count-- > 0) { major_collector.copy_object (ptr, queue); DEBUG (9, fprintf (gc_debug_file, "Overwrote remset at %p with %p (count: %d)\n", ptr, *ptr, (int)count)); if (!global && *ptr >= start_nursery && *ptr < end_nursery) mono_sgen_add_to_global_remset (ptr); ++ptr; } return p + 2; case REMSET_OBJECT: ptr = (void**)(*p & ~REMSET_TYPE_MASK); if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery)) return p + 1; mono_sgen_get_minor_scan_object () ((char*)ptr, queue); return p + 1; case REMSET_VTYPE: { ScanVTypeFunc scan_vtype = mono_sgen_get_minor_scan_vtype (); size_t skip_size; ptr = (void**)(*p & ~REMSET_TYPE_MASK); if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery)) return p + 4; desc = p [1]; count = p [2]; skip_size = p [3]; while (count-- > 0) { scan_vtype ((char*)ptr, desc, queue); ptr = (void**)((char*)ptr + skip_size); } return p + 4; } default: g_assert_not_reached (); } return NULL; }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; mword vtable_word = *(mword*)obj; MonoVTable *vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); mword objsize; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; gboolean has_references; void *destination; if (vtable_word & SGEN_FORWARDED_BIT) { *ptr = (void*)vt; return; } if (vtable_word & SGEN_PINNED_BIT) return; HEAVY_STAT (++stat_objects_copied_major); objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); has_references = SGEN_VTABLE_HAS_REFERENCES (vt); destination = major_alloc_object (objsize, has_references); if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) { gboolean was_marked; par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL); obj = destination; *ptr = obj; /* * FIXME: If we make major_alloc_object() give * us the block info, too, we won't have to * re-fetch it here. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_PAR_SET_MARK_BIT (was_marked, block, word, bit); } else { /* * FIXME: We have allocated destination, but * we cannot use it. Give it back to the * allocator. */ *(void**)destination = NULL; vtable_word = *(mword*)obj; g_assert (vtable_word & SGEN_FORWARDED_BIT); obj = (void*)(vtable_word & ~SGEN_VTABLE_BITS_MASK); *ptr = obj; } } else { #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { block = MS_BLOCK_FOR_OBJ (obj); MS_PAR_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } else { if (vtable_word & SGEN_PINNED_BIT) return; binary_protocol_pin (obj, vt, mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (SGEN_CAS_PTR (obj, (void*)(vtable_word | SGEN_PINNED_BIT), (void*)vtable_word) == (void*)vtable_word) { if (SGEN_VTABLE_HAS_REFERENCES (vt)) GRAY_OBJECT_ENQUEUE (queue, obj); } else { g_assert (SGEN_OBJECT_IS_PINNED (obj)); } } } }
static void major_copy_or_mark_object (void **obj_slot, SgenGrayQueue *queue) { char *forwarded; char *obj = *obj_slot; mword objsize; DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, fprintf (gc_debug_file, "Precise copy of %p from %p", obj, obj_slot)); /* * obj must belong to one of: * * 1. the nursery * 2. the LOS * 3. a pinned chunk * 4. a non-to-space section of the major heap * 5. a to-space section of the major heap * * In addition, objects in 1, 2 and 4 might also be pinned. * Objects in 1 and 4 might be forwarded. * * Before we can copy the object we must make sure that we are * allowed to, i.e. that the object not pinned, not already * forwarded and doesn't belong to the LOS, a pinned chunk, or * a to-space section. * * We are usually called for to-space objects (5) when we have * two remset entries for the same reference. The first entry * copies the object and updates the reference and the second * calls us with the updated reference that points into * to-space. There might also be other circumstances where we * get to-space objects. */ if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr)); DEBUG (9, fprintf (gc_debug_file, " (already forwarded to %p)\n", forwarded)); HEAVY_STAT (++stat_major_copy_object_failed_forwarded); *obj_slot = forwarded; return; } if (SGEN_OBJECT_IS_PINNED (obj)) { DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr)); DEBUG (9, fprintf (gc_debug_file, " (pinned, no change)\n")); HEAVY_STAT (++stat_major_copy_object_failed_pinned); return; } if (ptr_in_nursery (obj)) goto copy; /* * At this point we know obj is not pinned, not forwarded and * belongs to 2, 3, 4, or 5. * * LOS object (2) are simple, at least until we always follow * the rule: if objsize > SGEN_MAX_SMALL_OBJ_SIZE, pin the * object and return it. At the end of major collections, we * walk the los list and if the object is pinned, it is * marked, otherwise it can be freed. * * Pinned chunks (3) and major heap sections (4, 5) both * reside in blocks, which are always aligned, so once we've * eliminated LOS objects, we can just access the block and * see whether it's a pinned chunk or a major heap section. */ objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (G_UNLIKELY (objsize > SGEN_MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) { if (SGEN_OBJECT_IS_PINNED (obj)) return; DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %zd)\n", obj, mono_sgen_safe_name (obj), objsize)); binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj)); SGEN_PIN_OBJECT (obj); GRAY_OBJECT_ENQUEUE (queue, obj); HEAVY_STAT (++stat_major_copy_object_failed_large_pinned); return; } /* * Now we know the object is in a major heap section. All we * need to do is check whether it's already in to-space (5) or * not (4). */ if (MAJOR_OBJ_IS_IN_TO_SPACE (obj)) { DEBUG (9, g_assert (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)); DEBUG (9, fprintf (gc_debug_file, " (already copied)\n")); HEAVY_STAT (++stat_major_copy_object_failed_to_space); return; } copy: HEAVY_STAT (++stat_objects_copied_major); *obj_slot = copy_object_no_checks (obj, queue); }
static void major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue) { void *obj = *ptr; MSBlockInfo *block; HEAVY_STAT (++stat_copy_object_called_major); DEBUG (9, g_assert (obj)); DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD)); if (ptr_in_nursery (obj)) { int word, bit; char *forwarded, *old_obj; if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } if (SGEN_OBJECT_IS_PINNED (obj)) return; HEAVY_STAT (++stat_objects_copied_major); do_copy_object: old_obj = obj; obj = copy_object_no_checks (obj, queue); if (G_UNLIKELY (old_obj == obj)) { /*If we fail to evacuate an object we just stop doing it for a given block size as all other will surely fail too.*/ if (!ptr_in_nursery (obj)) { int size_index; block = MS_BLOCK_FOR_OBJ (obj); size_index = block->obj_size_index; evacuate_block_obj_sizes [size_index] = FALSE; MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } return; } *ptr = obj; /* * FIXME: See comment for copy_object_no_checks(). If * we have that, we can let the allocation function * give us the block info, too, and we won't have to * re-fetch it. */ block = MS_BLOCK_FOR_OBJ (obj); MS_CALC_MARK_BIT (word, bit, obj); DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit))); MS_SET_MARK_BIT (block, word, bit); } else { char *forwarded; #ifndef FIXED_HEAP mword objsize; #endif if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) { *ptr = forwarded; return; } #ifdef FIXED_HEAP if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj)) #else objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj)); if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE) #endif { int size_index; block = MS_BLOCK_FOR_OBJ (obj); size_index = block->obj_size_index; if (!block->has_pinned && evacuate_block_obj_sizes [size_index]) { if (block->is_to_space) return; HEAVY_STAT (++stat_major_objects_evacuated); goto do_copy_object; } else { MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue); } } else { if (SGEN_OBJECT_IS_PINNED (obj)) return; binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj)); SGEN_PIN_OBJECT (obj); /* FIXME: only enqueue if object has references */ GRAY_OBJECT_ENQUEUE (queue, obj); } } }