Пример #1
0
/*
 * Descriptor builders.
 */
SgenDescriptor
mono_gc_make_descr_for_object (gsize *bitmap, int numbits, size_t obj_size)
{
	int first_set = -1, num_set = 0, last_set = -1, i;
	SgenDescriptor desc = 0;
	size_t stored_size = SGEN_ALIGN_UP (obj_size);

	for (i = 0; i < numbits; ++i) {
		if (bitmap [i / GC_BITS_PER_WORD] & ((gsize)1 << (i % GC_BITS_PER_WORD))) {
			if (first_set < 0)
				first_set = i;
			last_set = i;
			num_set++;
		}
	}

	if (first_set < 0) {
		SGEN_LOG (6, "Ptrfree descriptor %p, size: %zd", (void*)desc, stored_size);
		if (stored_size <= MAX_RUNLEN_OBJECT_SIZE && stored_size <= SGEN_MAX_SMALL_OBJ_SIZE)
			return DESC_TYPE_SMALL_PTRFREE | stored_size;
		return DESC_TYPE_COMPLEX_PTRFREE;
	}

	g_assert (!(stored_size & 0x7));

	SGEN_ASSERT (5, stored_size == SGEN_ALIGN_UP (stored_size), "Size is not aligned");

	/* we know the 2-word header is ptr-free */
	if (last_set < BITMAP_NUM_BITS + OBJECT_HEADER_WORDS && stored_size <= SGEN_MAX_SMALL_OBJ_SIZE) {
		desc = DESC_TYPE_BITMAP | ((*bitmap >> OBJECT_HEADER_WORDS) << LOW_TYPE_BITS);
		SGEN_LOG (6, "Largebitmap descriptor %p, size: %zd, last set: %d", (void*)desc, stored_size, last_set);
		return desc;
	}
Пример #2
0
/*
 * obj is some object.  If it's not in the major heap (i.e. if it's in
 * the nursery or LOS), return FALSE.  Otherwise return whether it's
 * been marked or copied.
 */
static gboolean
major_is_object_live (char *obj)
{
	MSBlockInfo *block;
	int word, bit;
#ifndef FIXED_HEAP
	mword objsize;
#endif

	if (ptr_in_nursery (obj))
		return FALSE;

#ifdef FIXED_HEAP
	/* LOS */
	if (!MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
		return FALSE;
#else
	objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj));

	/* LOS */
	if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
		return FALSE;
#endif

	/* now we know it's in a major block */
	block = MS_BLOCK_FOR_OBJ (obj);
	DEBUG (9, g_assert (!block->pinned));
	MS_CALC_MARK_BIT (word, bit, obj);
	return MS_MARK_BIT (block, word, bit) ? TRUE : FALSE;
}
Пример #3
0
static void
major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue)
{
	void *obj = *ptr;
	MSBlockInfo *block;

	HEAVY_STAT (++stat_copy_object_called_major);

	DEBUG (9, g_assert (obj));
	DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));

	if (ptr_in_nursery (obj)) {
		int word, bit;
		char *forwarded;

		if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
			*ptr = forwarded;
			return;
		}
		if (SGEN_OBJECT_IS_PINNED (obj))
			return;

		HEAVY_STAT (++stat_objects_copied_major);

		obj = copy_object_no_checks (obj, queue);
		*ptr = obj;

		/*
		 * FIXME: See comment for copy_object_no_checks().  If
		 * we have that, we can let the allocation function
		 * give us the block info, too, and we won't have to
		 * re-fetch it.
		 */
		block = MS_BLOCK_FOR_OBJ (obj);
		MS_CALC_MARK_BIT (word, bit, obj);
		DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit)));
		MS_SET_MARK_BIT (block, word, bit);
	} else {
#ifdef FIXED_HEAP
		if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
#else
		mword objsize;

		objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj));

		if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
#endif
		{
			block = MS_BLOCK_FOR_OBJ (obj);
			MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
		} else {
			if (SGEN_OBJECT_IS_PINNED (obj))
				return;
			binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj));
			SGEN_PIN_OBJECT (obj);
			/* FIXME: only enqueue if object has references */
			GRAY_OBJECT_ENQUEUE (queue, obj);
		}
	}
}
Пример #4
0
void*
mono_sgen_nursery_alloc (size_t size)
{
	Fragment *frag;
	DEBUG (4, fprintf (gc_debug_file, "Searching nursery for size: %zd\n", size));
	size = SGEN_ALIGN_UP (size);

	HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests));

#ifdef NALLOC_DEBUG
	InterlockedIncrement (&alloc_count);
#endif

restart:
	for (frag = unmask (nursery_fragments); frag; frag = unmask (frag->next)) {
		HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));

		if (size <= (frag->fragment_end - frag->fragment_next)) {
			void *p = alloc_from_fragment (frag, size);
			if (!p) {
				HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
				goto restart;
			}
#ifdef NALLOC_DEBUG
			add_alloc_record (p, size, FIXED_ALLOC);
#endif
			return p;
		}
	}
	return NULL;
}
Пример #5
0
void*
mono_gc_alloc_array (MonoVTable *vtable, size_t size, uintptr_t max_length, uintptr_t bounds_size)
{
	MonoArray *arr;
	MonoArrayBounds *bounds;
	TLAB_ACCESS_INIT;

	if (!SGEN_CAN_ALIGN_UP (size))
		return NULL;

#ifndef DISABLE_CRITICAL_REGION
	ENTER_CRITICAL_REGION;
	arr = mono_gc_try_alloc_obj_nolock (vtable, size);
	if (arr) {
		/*This doesn't require fencing since EXIT_CRITICAL_REGION already does it for us*/
		arr->max_length = (mono_array_size_t)max_length;

		bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size);
		arr->bounds = bounds;
		EXIT_CRITICAL_REGION;
		goto done;
	}
	EXIT_CRITICAL_REGION;
#endif

	LOCK_GC;

	arr = mono_gc_alloc_obj_nolock (vtable, size);
	if (G_UNLIKELY (!arr)) {
		UNLOCK_GC;
		return mono_gc_out_of_memory (size);
	}

	arr->max_length = (mono_array_size_t)max_length;

	bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size);
	arr->bounds = bounds;

	UNLOCK_GC;

 done:
	SGEN_ASSERT (6, SGEN_ALIGN_UP (size) == SGEN_ALIGN_UP (sgen_par_object_get_size (vtable, (MonoObject*)arr)), "Array has incorrect size.");
	return arr;
}
Пример #6
0
gboolean
mono_sgen_can_alloc_size (size_t size)
{
	Fragment *frag;
	size = SGEN_ALIGN_UP (size);

	for (frag = unmask (nursery_fragments); frag; frag = unmask (frag->next)) {
		if ((frag->fragment_end - frag->fragment_next) >= size)
			return TRUE;
	}
	return FALSE;
}
Пример #7
0
void*
sgen_nursery_alloc (size_t size)
{
	SGEN_ASSERT (1, size >= sizeof (MonoObject) && size <= SGEN_MAX_SMALL_OBJ_SIZE, "Invalid nursery object size");

	SGEN_LOG (4, "Searching nursery for size: %zd", size);
	size = SGEN_ALIGN_UP (size);

	HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests));

	return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
}
Пример #8
0
void*
sgen_nursery_alloc (size_t size)
{
	SGEN_ASSERT (1, size >= (SGEN_CLIENT_MINIMUM_OBJECT_SIZE + CANARY_SIZE) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");

	SGEN_LOG (4, "Searching nursery for size: %zd", size);
	size = SGEN_ALIGN_UP (size);

	HEAVY_STAT (++stat_nursery_alloc_requests);

	return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
}
Пример #9
0
mword
mono_sgen_build_nursery_fragments (GCMemSection *nursery_section, void **start, int num_entries)
{
	char *frag_start, *frag_end;
	size_t frag_size;
	int i;

#ifdef NALLOC_DEBUG
	reset_alloc_records ();
#endif

	while (unmask (nursery_fragments)) {
		Fragment *nf = unmask (nursery_fragments);
		Fragment *next = unmask (nf->next);

		nf->next_free = fragment_freelist;
		fragment_freelist = nf;
		nursery_fragments = next;
	}
	frag_start = nursery_start;
	fragment_total = 0;
	/* clear scan starts */
	memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
	for (i = 0; i < num_entries; ++i) {
		frag_end = start [i];
		/* remove the pin bit from pinned objects */
		SGEN_UNPIN_OBJECT (frag_end);
		nursery_section->scan_starts [((char*)frag_end - (char*)nursery_section->data)/SGEN_SCAN_START_SIZE] = frag_end;
		frag_size = frag_end - frag_start;
		if (frag_size)
			add_nursery_frag (frag_size, frag_start, frag_end);
		frag_size = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)start [i]));
#ifdef NALLOC_DEBUG
		add_alloc_record (start [i], frag_size, PINNING);
#endif
		frag_start = (char*)start [i] + frag_size;
	}
	nursery_last_pinned_end = frag_start;
	frag_end = nursery_end;
	frag_size = frag_end - frag_start;
	if (frag_size)
		add_nursery_frag (frag_size, frag_start, frag_end);
	if (!unmask (nursery_fragments)) {
		DEBUG (1, fprintf (gc_debug_file, "Nursery fully pinned (%d)\n", num_entries));
		for (i = 0; i < num_entries; ++i) {
			DEBUG (3, fprintf (gc_debug_file, "Bastard pinning obj %p (%s), size: %d\n", start [i], mono_sgen_safe_name (start [i]), mono_sgen_safe_object_get_size (start [i])));
		}
		
	}

	return fragment_total;
}
Пример #10
0
static SgenPinnedChunk*
alloc_pinned_chunk (SgenPinnedAllocator *alc)
{
    SgenPinnedChunk *chunk;
    int offset;
    int size = SGEN_PINNED_CHUNK_SIZE;

    chunk = sgen_alloc_os_memory_aligned (size, size, TRUE);
    chunk->block.role = MEMORY_ROLE_PINNED;

    sgen_update_heap_boundaries ((mword)chunk, ((mword)chunk + size));

    pinned_chunk_bytes_alloced += size;

    /* setup the bookeeping fields */
    chunk->num_pages = size / FREELIST_PAGESIZE;
    offset = G_STRUCT_OFFSET (SgenPinnedChunk, data);
    chunk->page_sizes = (void*)((char*)chunk + offset);
    offset += sizeof (int) * chunk->num_pages;
    offset = SGEN_ALIGN_UP (offset);
    chunk->free_list = (void*)((char*)chunk + offset);
    offset += sizeof (void*) * SGEN_PINNED_FREELIST_NUM_SLOTS;
    offset = SGEN_ALIGN_UP (offset);
    chunk->start_data = (void*)((char*)chunk + offset);

    /* allocate the first page to the freelist */
    chunk->page_sizes [0] = PINNED_FIRST_SLOT_SIZE;
    build_freelist (alc, chunk, slot_for_size (PINNED_FIRST_SLOT_SIZE), PINNED_FIRST_SLOT_SIZE,
                    chunk->start_data, ((char*)chunk + FREELIST_PAGESIZE));
    sgen_debug_printf (4, "Allocated pinned chunk %p, size: %d\n", chunk, size);

    chunk->block.next = alc->chunk_list;
    alc->chunk_list = chunk;

    chunk->allocator = alc;

    return chunk;
}
Пример #11
0
gboolean
sgen_can_alloc_size (size_t size)
{
	SgenFragment *frag;

	if (!SGEN_CAN_ALIGN_UP (size))
		return FALSE;

	size = SGEN_ALIGN_UP (size);

	for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
		if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
			return TRUE;
	}
	return FALSE;
}
Пример #12
0
static mword*
find_in_remset_loc (mword *p, char *addr, gboolean *found)
{
	void **ptr;
	mword count, desc;
	size_t skip_size;

	switch ((*p) & REMSET_TYPE_MASK) {
	case REMSET_LOCATION:
		if (*p == (mword)addr)
			*found = TRUE;
		return p + 1;
	case REMSET_RANGE:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		count = p [1];
		if ((void**)addr >= ptr && (void**)addr < ptr + count)
			*found = TRUE;
		return p + 2;
	case REMSET_OBJECT:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		count = mono_sgen_safe_object_get_size ((MonoObject*)ptr); 
		count = SGEN_ALIGN_UP (count);
		count /= sizeof (mword);
		if ((void**)addr >= ptr && (void**)addr < ptr + count)
			*found = TRUE;
		return p + 1;
	case REMSET_VTYPE:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		desc = p [1];
		count = p [2];
		skip_size = p [3];

		/* The descriptor includes the size of MonoObject */
		skip_size -= sizeof (MonoObject);
		skip_size *= count;
		if ((void**)addr >= ptr && (void**)addr < ptr + (skip_size / sizeof (gpointer)))
			*found = TRUE;

		return p + 4;
	default:
		g_assert_not_reached ();
	}
	return NULL;
}
Пример #13
0
static gboolean
major_is_object_live (char *obj)
{
	mword objsize;

	/* nursery */
	if (ptr_in_nursery (obj))
		return FALSE;

	objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));

	/* LOS */
	if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
		return FALSE;

	/* pinned chunk */
	if (obj_is_from_pinned_alloc (obj))
		return FALSE;

	/* now we know it's in a major heap section */
	return MAJOR_SECTION_FOR_OBJECT (obj)->is_to_space;
}
Пример #14
0
/* FIXME: later reduce code duplication here with build_nursery_fragments().
 * We don't keep track of section fragments for non-nursery sections yet, so
 * just memset to 0.
 */
static void
build_section_fragments (GCMemSection *section)
{
	int i;
	char *frag_start, *frag_end;
	size_t frag_size;

	/* clear scan starts */
	memset (section->scan_starts, 0, section->num_scan_start * sizeof (gpointer));
	frag_start = section->data;
	section->next_data = section->data;
	for (i = 0; i < section->pin_queue_num_entries; ++i) {
		frag_end = section->pin_queue_start [i];
		/* remove the pin bit from pinned objects */
		SGEN_UNPIN_OBJECT (frag_end);
		if (frag_end >= section->data + section->size) {
			frag_end = section->data + section->size;
		} else {
			section->scan_starts [((char*)frag_end - (char*)section->data)/SGEN_SCAN_START_SIZE] = frag_end;
		}
		frag_size = frag_end - frag_start;
		if (frag_size) {
			binary_protocol_empty (frag_start, frag_size);
			memset (frag_start, 0, frag_size);
		}
		frag_size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)section->pin_queue_start [i]));
		frag_start = (char*)section->pin_queue_start [i] + frag_size;
		section->next_data = MAX (section->next_data, frag_start);
	}
	frag_end = section->end_data;
	frag_size = frag_end - frag_start;
	if (frag_size) {
		binary_protocol_empty (frag_start, frag_size);
		memset (frag_start, 0, frag_size);
	}
}
Пример #15
0
mword
sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
{
	char *frag_start, *frag_end;
	size_t frag_size;
	SgenFragment *frags_ranges;
	void **pin_start, **pin_entry, **pin_end;

#ifdef NALLOC_DEBUG
	reset_alloc_records ();
#endif
	/*The mutator fragments are done. We no longer need them. */
	sgen_fragment_allocator_release (&mutator_allocator);

	frag_start = sgen_nursery_start;
	fragment_total = 0;

	/* The current nursery might give us a fragment list to exclude [start, next[*/
	frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();

	/* clear scan starts */
	memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));

	pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
	pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);

	while (pin_entry < pin_end || frags_ranges) {
		char *addr0, *addr1;
		size_t size;

		addr0 = addr1 = sgen_nursery_end;
		if (pin_entry < pin_end)
			addr0 = (char *)*pin_entry;
		if (frags_ranges)
			addr1 = frags_ranges->fragment_start;

		if (addr0 < addr1) {
			if (unpin_queue)
				GRAY_OBJECT_ENQUEUE (unpin_queue, (GCObject*)addr0, sgen_obj_get_descriptor_safe ((GCObject*)addr0));
			else
				SGEN_UNPIN_OBJECT (addr0);
			size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0));
			CANARIFY_SIZE (size);
			sgen_set_nursery_scan_start (addr0);
			frag_end = addr0;
			++pin_entry;
		} else {
			frag_end = addr1;
			size = frags_ranges->fragment_next - addr1;
			frags_ranges = frags_ranges->next_in_order;
		}

		frag_size = frag_end - frag_start;

		if (size == 0)
			continue;

		g_assert (frag_size >= 0);
		g_assert (size > 0);
		if (frag_size && size)
			add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);	

		frag_size = size;
#ifdef NALLOC_DEBUG
		add_alloc_record (*pin_entry, frag_size, PINNING);
#endif
		frag_start = frag_end + frag_size;
	}

	nursery_last_pinned_end = frag_start;
	frag_end = sgen_nursery_end;
	frag_size = frag_end - frag_start;
	if (frag_size)
		add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);

	/* Now it's safe to release the fragments exclude list. */
	sgen_minor_collector.build_fragments_release_exclude_head ();

	/* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
	fragment_list_reverse (&mutator_allocator);

	/*The collector might want to do something with the final nursery fragment list.*/
	sgen_minor_collector.build_fragments_finish (&mutator_allocator);

	if (!unmask (mutator_allocator.alloc_head)) {
		SGEN_LOG (1, "Nursery fully pinned");
		for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
			GCObject *p = (GCObject *)*pin_entry;
			SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), sgen_safe_object_get_size (p));
		}
	}
	return fragment_total;
}
Пример #16
0
static void
major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue)
{
	void *obj = *ptr;
	MSBlockInfo *block;

	HEAVY_STAT (++stat_copy_object_called_major);

	DEBUG (9, g_assert (obj));
	DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));

	if (ptr_in_nursery (obj)) {
		int word, bit;
		char *forwarded, *old_obj;

		if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
			*ptr = forwarded;
			return;
		}
		if (SGEN_OBJECT_IS_PINNED (obj))
			return;

		HEAVY_STAT (++stat_objects_copied_major);

	do_copy_object:
		old_obj = obj;
		obj = copy_object_no_checks (obj, queue);
		if (G_UNLIKELY (old_obj == obj)) {
			/*If we fail to evacuate an object we just stop doing it for a given block size as all other will surely fail too.*/
			if (!ptr_in_nursery (obj)) {
				int size_index;
				block = MS_BLOCK_FOR_OBJ (obj);
				size_index = block->obj_size_index;
				evacuate_block_obj_sizes [size_index] = FALSE;
				MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
			}
			return;
		}
		*ptr = obj;

		/*
		 * FIXME: See comment for copy_object_no_checks().  If
		 * we have that, we can let the allocation function
		 * give us the block info, too, and we won't have to
		 * re-fetch it.
		 */
		block = MS_BLOCK_FOR_OBJ (obj);
		MS_CALC_MARK_BIT (word, bit, obj);
		DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit)));
		MS_SET_MARK_BIT (block, word, bit);
	} else {
		char *forwarded;
#ifndef FIXED_HEAP
		mword objsize;
#endif

		if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
			*ptr = forwarded;
			return;
		}

#ifdef FIXED_HEAP
		if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
#else
		objsize = SGEN_ALIGN_UP (mono_sgen_safe_object_get_size ((MonoObject*)obj));

		if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
#endif
		{
			int size_index;

			block = MS_BLOCK_FOR_OBJ (obj);
			size_index = block->obj_size_index;

			if (!block->has_pinned && evacuate_block_obj_sizes [size_index]) {
				if (block->is_to_space)
					return;
				HEAVY_STAT (++stat_major_objects_evacuated);
				goto do_copy_object;
			} else {
				MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
			}
		} else {
			if (SGEN_OBJECT_IS_PINNED (obj))
				return;
			binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), mono_sgen_safe_object_get_size ((MonoObject*)obj));
			SGEN_PIN_OBJECT (obj);
			/* FIXME: only enqueue if object has references */
			GRAY_OBJECT_ENQUEUE (queue, obj);
		}
	}
}
Пример #17
0
mword
sgen_build_nursery_fragments (GCMemSection *nursery_section, void **start, size_t num_entries, SgenGrayQueue *unpin_queue)
{
	char *frag_start, *frag_end;
	size_t frag_size;
	size_t i = 0;
	SgenFragment *frags_ranges;

#ifdef NALLOC_DEBUG
	reset_alloc_records ();
#endif
	/*The mutator fragments are done. We no longer need them. */
	sgen_fragment_allocator_release (&mutator_allocator);

	frag_start = sgen_nursery_start;
	fragment_total = 0;

	/* The current nursery might give us a fragment list to exclude [start, next[*/
	frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();

	/* clear scan starts */
	memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));

	while (i < num_entries || frags_ranges) {
		char *addr0, *addr1;
		size_t size;
		SgenFragment *last_frag = NULL;

		addr0 = addr1 = sgen_nursery_end;
		if (i < num_entries)
			addr0 = start [i];
		if (frags_ranges)
			addr1 = frags_ranges->fragment_start;

		if (addr0 < addr1) {
			if (unpin_queue)
				GRAY_OBJECT_ENQUEUE (unpin_queue, addr0);
			else
				SGEN_UNPIN_OBJECT (addr0);
			sgen_set_nursery_scan_start (addr0);
			frag_end = addr0;
			size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)addr0));
			++i;
		} else {
			frag_end = addr1;
			size = frags_ranges->fragment_next - addr1;
			last_frag = frags_ranges;
			frags_ranges = frags_ranges->next_in_order;
		}

		frag_size = frag_end - frag_start;

		if (size == 0)
			continue;

		g_assert (frag_size >= 0);
		g_assert (size > 0);
		if (frag_size && size)
			add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);	

		frag_size = size;
#ifdef NALLOC_DEBUG
		add_alloc_record (start [i], frag_size, PINNING);
#endif
		frag_start = frag_end + frag_size;
	}

	nursery_last_pinned_end = frag_start;
	frag_end = sgen_nursery_end;
	frag_size = frag_end - frag_start;
	if (frag_size)
		add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);

	/* Now it's safe to release the fragments exclude list. */
	sgen_minor_collector.build_fragments_release_exclude_head ();

	/* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
	fragment_list_reverse (&mutator_allocator);

	/*The collector might want to do something with the final nursery fragment list.*/
	sgen_minor_collector.build_fragments_finish (&mutator_allocator);

	if (!unmask (mutator_allocator.alloc_head)) {
		SGEN_LOG (1, "Nursery fully pinned (%zd)", num_entries);
		for (i = 0; i < num_entries; ++i) {
			SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", start [i], sgen_safe_name (start [i]), sgen_safe_object_get_size (start [i]));
		}
	}
	return fragment_total;
}
Пример #18
0
static void
major_copy_or_mark_object (void **ptr, SgenGrayQueue *queue)
{
	void *obj = *ptr;
	mword vtable_word = *(mword*)obj;
	MonoVTable *vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
	mword objsize;
	MSBlockInfo *block;

	HEAVY_STAT (++stat_copy_object_called_major);

	DEBUG (9, g_assert (obj));
	DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));

	if (ptr_in_nursery (obj)) {
		int word, bit;
		gboolean has_references;
		void *destination;

		if (vtable_word & SGEN_FORWARDED_BIT) {
			*ptr = (void*)vt;
			return;
		}

		if (vtable_word & SGEN_PINNED_BIT)
			return;

		HEAVY_STAT (++stat_objects_copied_major);

		objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj));
		has_references = SGEN_VTABLE_HAS_REFERENCES (vt);

		destination = major_alloc_object (objsize, has_references);

		if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) {
			gboolean was_marked;

			par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL);
			obj = destination;
			*ptr = obj;

			/*
			 * FIXME: If we make major_alloc_object() give
			 * us the block info, too, we won't have to
			 * re-fetch it here.
			 */
			block = MS_BLOCK_FOR_OBJ (obj);
			MS_CALC_MARK_BIT (word, bit, obj);
			DEBUG (9, g_assert (!MS_MARK_BIT (block, word, bit)));
			MS_PAR_SET_MARK_BIT (was_marked, block, word, bit);
		} else {
			/*
			 * FIXME: We have allocated destination, but
			 * we cannot use it.  Give it back to the
			 * allocator.
			 */
			*(void**)destination = NULL;

			vtable_word = *(mword*)obj;
			g_assert (vtable_word & SGEN_FORWARDED_BIT);

			obj = (void*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);

			*ptr = obj;
		}
	} else {
#ifdef FIXED_HEAP
		if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
#else
		objsize = SGEN_ALIGN_UP (mono_sgen_par_object_get_size (vt, (MonoObject*)obj));

		if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
#endif
		{
			block = MS_BLOCK_FOR_OBJ (obj);
			MS_PAR_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
		} else {
			if (vtable_word & SGEN_PINNED_BIT)
				return;
			binary_protocol_pin (obj, vt, mono_sgen_safe_object_get_size ((MonoObject*)obj));
			if (SGEN_CAS_PTR (obj, (void*)(vtable_word | SGEN_PINNED_BIT), (void*)vtable_word) == (void*)vtable_word) {
				if (SGEN_VTABLE_HAS_REFERENCES (vt))
					GRAY_OBJECT_ENQUEUE (queue, obj);
			} else {
				g_assert (SGEN_OBJECT_IS_PINNED (obj));
			}
		}
	}
}
Пример #19
0
static void
major_copy_or_mark_object (void **obj_slot, SgenGrayQueue *queue)
{
	char *forwarded;
	char *obj = *obj_slot;
	mword objsize;

	DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));

	HEAVY_STAT (++stat_copy_object_called_major);

	DEBUG (9, fprintf (gc_debug_file, "Precise copy of %p from %p", obj, obj_slot));

	/*
	 * obj must belong to one of:
	 *
	 * 1. the nursery
	 * 2. the LOS
	 * 3. a pinned chunk
	 * 4. a non-to-space section of the major heap
	 * 5. a to-space section of the major heap
	 *
	 * In addition, objects in 1, 2 and 4 might also be pinned.
	 * Objects in 1 and 4 might be forwarded.
	 *
	 * Before we can copy the object we must make sure that we are
	 * allowed to, i.e. that the object not pinned, not already
	 * forwarded, not in the nursery To Space and doesn't belong
	 * to the LOS, a pinned chunk, or a to-space section.
	 *
	 * We are usually called for to-space objects (5) when we have
	 * two remset entries for the same reference.  The first entry
	 * copies the object and updates the reference and the second
	 * calls us with the updated reference that points into
	 * to-space.  There might also be other circumstances where we
	 * get to-space objects.
	 */

	if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
		DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
		DEBUG (9, fprintf (gc_debug_file, " (already forwarded to %p)\n", forwarded));
		HEAVY_STAT (++stat_major_copy_object_failed_forwarded);
		*obj_slot = forwarded;
		return;
	}
	if (SGEN_OBJECT_IS_PINNED (obj)) {
		DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
		DEBUG (9, fprintf (gc_debug_file, " (pinned, no change)\n"));
		HEAVY_STAT (++stat_major_copy_object_failed_pinned);
		return;
	}

	if (ptr_in_nursery (obj)) {
		/* A To Space object is already on its final destination for the current collection. */
		if (sgen_nursery_is_to_space (obj))
			return;
		goto copy;
	}

	/*
	 * At this point we know obj is not pinned, not forwarded and
	 * belongs to 2, 3, 4, or 5.
	 *
	 * LOS object (2) are simple, at least until we always follow
	 * the rule: if objsize > SGEN_MAX_SMALL_OBJ_SIZE, pin the
	 * object and return it.  At the end of major collections, we
	 * walk the los list and if the object is pinned, it is
	 * marked, otherwise it can be freed.
	 *
	 * Pinned chunks (3) and major heap sections (4, 5) both
	 * reside in blocks, which are always aligned, so once we've
	 * eliminated LOS objects, we can just access the block and
	 * see whether it's a pinned chunk or a major heap section.
	 */

	objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));

	if (G_UNLIKELY (objsize > SGEN_MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) {
		if (SGEN_OBJECT_IS_PINNED (obj))
			return;
		DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %td)\n", obj, sgen_safe_name (obj), objsize));
		binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
		SGEN_PIN_OBJECT (obj);
		GRAY_OBJECT_ENQUEUE (queue, obj);
		HEAVY_STAT (++stat_major_copy_object_failed_large_pinned);
		return;
	}

	/*
	 * Now we know the object is in a major heap section.  All we
	 * need to do is check whether it's already in to-space (5) or
	 * not (4).
	 */
	if (MAJOR_OBJ_IS_IN_TO_SPACE (obj)) {
		DEBUG (9, g_assert (objsize <= SGEN_MAX_SMALL_OBJ_SIZE));
		DEBUG (9, fprintf (gc_debug_file, " (already copied)\n"));
		HEAVY_STAT (++stat_major_copy_object_failed_to_space);
		return;
	}

 copy:
	HEAVY_STAT (++stat_objects_copied_major);

	*obj_slot = copy_object_no_checks (obj, queue);
}