Ejemplo n.º 1
0
gboolean
sgen_cement_lookup_or_register (char *obj, gboolean concurrent_cementing)
{
	int i;
	CementHashEntry *hash;

	if (!cement_enabled)
		return FALSE;

	if (concurrent_cementing)
		SGEN_ASSERT (5, cement_concurrent, "Cementing wasn't inited with concurrent flag");

	if (concurrent_cementing)
		hash = cement_hash_concurrent;
	else
		hash = cement_hash;

	/*
	 * We use modulus hashing, which is fine with constants as gcc
	 * can optimize them to multiplication, but with variable
	 * values it would be a bad idea given armv7 has no hardware
	 * for division, making it 20x slower than a multiplication.
	 *
	 * This code path can be quite hot, depending on the workload,
	 * so if we make the hash size user-adjustable we should
	 * figure out something not involving division.
	 */
	i = mono_aligned_addr_hash (obj) % SGEN_CEMENT_HASH_SIZE;

	SGEN_ASSERT (5, sgen_ptr_in_nursery (obj), "Can only cement pointers to nursery objects");

	if (!hash [i].obj) {
		SGEN_ASSERT (5, !hash [i].count, "Cementing hash inconsistent");
		hash [i].obj = obj;
	} else if (hash [i].obj != obj) {
		return FALSE;
	}

	if (hash [i].count >= SGEN_CEMENT_THRESHOLD)
		return TRUE;

	++hash [i].count;
	if (hash [i].count == SGEN_CEMENT_THRESHOLD) {
		if (G_UNLIKELY (MONO_GC_OBJ_CEMENTED_ENABLED())) {
			MonoVTable *vt = (MonoVTable*)SGEN_LOAD_VTABLE (obj);
			MONO_GC_OBJ_CEMENTED ((mword)obj, sgen_safe_object_get_size ((MonoObject*)obj),
					vt->klass->name_space, vt->klass->name);
		}
#ifdef SGEN_BINARY_PROTOCOL
		binary_protocol_cement (obj, (gpointer)SGEN_LOAD_VTABLE (obj),
				sgen_safe_object_get_size ((MonoObject*)obj));
#endif
	}

	return FALSE;
}
Ejemplo n.º 2
0
static void
pin_pinned_object_callback (void *addr, size_t slot_size, SgenGrayQueue *queue)
{
	binary_protocol_pin (addr, (gpointer)SGEN_LOAD_VTABLE (addr), sgen_safe_object_get_size ((MonoObject*)addr));
	if (!SGEN_OBJECT_IS_PINNED (addr))
		sgen_pin_stats_register_object ((char*) addr, sgen_safe_object_get_size ((MonoObject*) addr));
	SGEN_PIN_OBJECT (addr);
	GRAY_OBJECT_ENQUEUE (queue, addr);
	DEBUG (6, fprintf (gc_debug_file, "Marked pinned object %p (%s) from roots\n", addr, sgen_safe_name (addr)));
}
Ejemplo n.º 3
0
gboolean
sgen_cement_lookup_or_register (char *obj)
{
	guint hv;
	int i;
	CementHashEntry *hash;
	gboolean concurrent_cementing = sgen_concurrent_collection_in_progress ();

	if (!cement_enabled)
		return FALSE;

	if (concurrent_cementing)
		SGEN_ASSERT (5, cement_concurrent, "Cementing wasn't inited with concurrent flag");

	if (concurrent_cementing)
		hash = cement_hash_concurrent;
	else
		hash = cement_hash;

	hv = mono_aligned_addr_hash (obj);
	i = SGEN_CEMENT_HASH (hv);

	SGEN_ASSERT (5, sgen_ptr_in_nursery (obj), "Can only cement pointers to nursery objects");

	if (!hash [i].obj) {
		SGEN_ASSERT (5, !hash [i].count, "Cementing hash inconsistent");
		hash [i].obj = obj;
	} else if (hash [i].obj != obj) {
		return FALSE;
	}

	if (hash [i].count >= SGEN_CEMENT_THRESHOLD)
		return TRUE;

	++hash [i].count;
	if (hash [i].count == SGEN_CEMENT_THRESHOLD) {
		SGEN_ASSERT (9, SGEN_OBJECT_IS_PINNED (obj), "Can only cement pinned objects");
		SGEN_CEMENT_OBJECT (obj);

		if (G_UNLIKELY (MONO_GC_OBJ_CEMENTED_ENABLED())) {
			MonoVTable *vt G_GNUC_UNUSED = (MonoVTable*)SGEN_LOAD_VTABLE (obj);
			MONO_GC_OBJ_CEMENTED ((mword)obj, sgen_safe_object_get_size ((MonoObject*)obj),
					vt->klass->name_space, vt->klass->name);
		}
		binary_protocol_cement (obj, (gpointer)SGEN_LOAD_VTABLE (obj),
				(int)sgen_safe_object_get_size ((MonoObject*)obj));
	}

	return FALSE;
}
Ejemplo n.º 4
0
/*
 * The pin_queue should be full and sorted, without entries from the cemented
 * objects. We traverse the cement hash and check if each object is pinned in
 * the pin_queue (the pin_queue contains entries between obj and obj+obj_len)
 */
void
sgen_cement_force_pinned (void)
{
	int i;

	if (!cement_enabled)
		return;

	for (i = 0; i < SGEN_CEMENT_HASH_SIZE; i++) {
		GCObject *obj = cement_hash [i].obj;
		size_t index;
		if (!obj)
			continue;
		if (cement_hash [i].count < SGEN_CEMENT_THRESHOLD)
			continue;
		SGEN_ASSERT (0, !cement_hash [i].forced, "Why do we have a forced cemented object before forcing ?");

		/* Returns the index of the target or of the first element greater than it */
		index = sgen_pointer_queue_search (&pin_queue, obj);
		if (index == pin_queue.next_slot)
			continue;
		SGEN_ASSERT (0, pin_queue.data [index] >= (gpointer)obj, "Binary search should return a pointer greater than the search target");
		if (pin_queue.data [index] < (gpointer)((char*)obj + sgen_safe_object_get_size (obj)))
			cement_hash [i].forced = TRUE;
	}
}
Ejemplo n.º 5
0
void
sgen_dump_pin_queue (void)
{
	int i;

	for (i = 0; i < last_num_pinned; ++i) {
		void *ptr = pin_queue.data [i];
		SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", ptr, sgen_safe_name (ptr), sgen_safe_object_get_size (ptr));
	}
}
Ejemplo n.º 6
0
/*
 * Mark a given range of memory as invalid.
 *
 * This can be done either by zeroing memory or by placing
 * a phony byte[] array. This keeps the heap forward walkable.
 *
 * This function ignores calls with a zero range, even if
 * both start and end are NULL.
 */
void
sgen_clear_range (char *start, char *end)
{
	size_t size = end - start;

	if ((start && !end) || (start > end))
		g_error ("Invalid range [%p %p]", start, end);

	if (sgen_client_array_fill_range (start, size)) {
		sgen_set_nursery_scan_start (start);
		SGEN_ASSERT (0, start + sgen_safe_object_get_size ((GCObject*)start) == end, "Array fill produced wrong size");
	}
}
Ejemplo n.º 7
0
static mword*
find_in_remset_loc (mword *p, char *addr, gboolean *found)
{
	void **ptr;
	mword count, desc;
	size_t skip_size;

	switch ((*p) & REMSET_TYPE_MASK) {
	case REMSET_LOCATION:
		if (*p == (mword)addr)
			*found = TRUE;
		return p + 1;
	case REMSET_RANGE:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		count = p [1];
		if ((void**)addr >= ptr && (void**)addr < ptr + count)
			*found = TRUE;
		return p + 2;
	case REMSET_OBJECT:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		count = sgen_safe_object_get_size ((MonoObject*)ptr); 
		count = SGEN_ALIGN_UP (count);
		count /= sizeof (mword);
		if ((void**)addr >= ptr && (void**)addr < ptr + count)
			*found = TRUE;
		return p + 1;
	case REMSET_VTYPE:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		desc = p [1];
		count = p [2];
		skip_size = p [3];

		/* The descriptor includes the size of MonoObject */
		skip_size -= sizeof (MonoObject);
		skip_size *= count;
		if ((void**)addr >= ptr && (void**)addr < ptr + (skip_size / sizeof (gpointer)))
			*found = TRUE;

		return p + 4;
	default:
		g_assert_not_reached ();
	}
	return NULL;
}
Ejemplo n.º 8
0
gboolean
sgen_cement_lookup_or_register (GCObject *obj)
{
	guint hv;
	int i;
	CementHashEntry *hash = cement_hash;

	if (!cement_enabled)
		return FALSE;

	hv = sgen_aligned_addr_hash (obj);
	i = SGEN_CEMENT_HASH (hv);

	SGEN_ASSERT (5, sgen_ptr_in_nursery (obj), "Can only cement pointers to nursery objects");

	if (!hash [i].obj) {
		GCObject *old_obj;
		old_obj = InterlockedCompareExchangePointer ((gpointer*)&hash [i].obj, obj, NULL);
		/* Check if the slot was occupied by some other object */
		if (old_obj != NULL && old_obj != obj)
			return FALSE;
	} else if (hash [i].obj != obj) {
		return FALSE;
	}

	if (hash [i].count >= SGEN_CEMENT_THRESHOLD)
		return TRUE;

	if (InterlockedIncrement ((gint32*)&hash [i].count) == SGEN_CEMENT_THRESHOLD) {
		SGEN_ASSERT (9, sgen_get_current_collection_generation () >= 0, "We can only cement objects when we're in a collection pause.");
		SGEN_ASSERT (9, SGEN_OBJECT_IS_PINNED (obj), "Can only cement pinned objects");
		SGEN_CEMENT_OBJECT (obj);

		binary_protocol_cement (obj, (gpointer)SGEN_LOAD_VTABLE (obj),
				(int)sgen_safe_object_get_size (obj));
	}

	return FALSE;
}
Ejemplo n.º 9
0
static gboolean
major_is_object_live (char *obj)
{
	mword objsize;

	/* nursery */
	if (ptr_in_nursery (obj))
		return FALSE;

	objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));

	/* LOS */
	if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
		return FALSE;

	/* pinned chunk */
	if (obj_is_from_pinned_alloc (obj))
		return FALSE;

	/* now we know it's in a major heap section */
	return MAJOR_SECTION_FOR_OBJECT (obj)->is_to_space;
}
Ejemplo n.º 10
0
/*
 * Mark a given range of memory as invalid.
 *
 * This can be done either by zeroing memory or by placing
 * a phony byte[] array. This keeps the heap forward walkable.
 *
 * This function ignores calls with a zero range, even if
 * both start and end are NULL.
 */
void
sgen_clear_range (char *start, char *end)
{
	MonoArray *o;
	size_t size = end - start;

	if ((start && !end) || (start > end))
		g_error ("Invalid range [%p %p]", start, end);

	if (size < sizeof (MonoArray)) {
		memset (start, 0, size);
		return;
	}

	o = (MonoArray*)start;
	o->obj.vtable = sgen_get_array_fill_vtable ();
	/* Mark this as not a real object */
	o->obj.synchronisation = GINT_TO_POINTER (-1);
	o->bounds = NULL;
	o->max_length = (mono_array_size_t)(size - sizeof (MonoArray));
	sgen_set_nursery_scan_start (start);
	g_assert (start + sgen_safe_object_get_size ((MonoObject*)o) == end);
}
Ejemplo n.º 11
0
/* FIXME: later reduce code duplication here with build_nursery_fragments().
 * We don't keep track of section fragments for non-nursery sections yet, so
 * just memset to 0.
 */
static void
build_section_fragments (GCMemSection *section)
{
	int i;
	char *frag_start, *frag_end;
	size_t frag_size;

	/* clear scan starts */
	memset (section->scan_starts, 0, section->num_scan_start * sizeof (gpointer));
	frag_start = section->data;
	section->next_data = section->data;
	for (i = 0; i < section->pin_queue_num_entries; ++i) {
		frag_end = section->pin_queue_start [i];
		/* remove the pin bit from pinned objects */
		SGEN_UNPIN_OBJECT (frag_end);
		if (frag_end >= section->data + section->size) {
			frag_end = section->data + section->size;
		} else {
			section->scan_starts [((char*)frag_end - (char*)section->data)/SGEN_SCAN_START_SIZE] = frag_end;
		}
		frag_size = frag_end - frag_start;
		if (frag_size) {
			binary_protocol_empty (frag_start, frag_size);
			memset (frag_start, 0, frag_size);
		}
		frag_size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)section->pin_queue_start [i]));
		frag_start = (char*)section->pin_queue_start [i] + frag_size;
		section->next_data = MAX (section->next_data, frag_start);
	}
	frag_end = section->end_data;
	frag_size = frag_end - frag_start;
	if (frag_size) {
		binary_protocol_empty (frag_start, frag_size);
		memset (frag_start, 0, frag_size);
	}
}
Ejemplo n.º 12
0
mword
sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
{
	char *frag_start, *frag_end;
	size_t frag_size;
	SgenFragment *frags_ranges;
	void **pin_start, **pin_entry, **pin_end;

#ifdef NALLOC_DEBUG
	reset_alloc_records ();
#endif
	/*The mutator fragments are done. We no longer need them. */
	sgen_fragment_allocator_release (&mutator_allocator);

	frag_start = sgen_nursery_start;
	fragment_total = 0;

	/* The current nursery might give us a fragment list to exclude [start, next[*/
	frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();

	/* clear scan starts */
	memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));

	pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
	pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);

	while (pin_entry < pin_end || frags_ranges) {
		char *addr0, *addr1;
		size_t size;

		addr0 = addr1 = sgen_nursery_end;
		if (pin_entry < pin_end)
			addr0 = (char *)*pin_entry;
		if (frags_ranges)
			addr1 = frags_ranges->fragment_start;

		if (addr0 < addr1) {
			if (unpin_queue)
				GRAY_OBJECT_ENQUEUE (unpin_queue, (GCObject*)addr0, sgen_obj_get_descriptor_safe ((GCObject*)addr0));
			else
				SGEN_UNPIN_OBJECT (addr0);
			size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0));
			CANARIFY_SIZE (size);
			sgen_set_nursery_scan_start (addr0);
			frag_end = addr0;
			++pin_entry;
		} else {
			frag_end = addr1;
			size = frags_ranges->fragment_next - addr1;
			frags_ranges = frags_ranges->next_in_order;
		}

		frag_size = frag_end - frag_start;

		if (size == 0)
			continue;

		g_assert (frag_size >= 0);
		g_assert (size > 0);
		if (frag_size && size)
			add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);	

		frag_size = size;
#ifdef NALLOC_DEBUG
		add_alloc_record (*pin_entry, frag_size, PINNING);
#endif
		frag_start = frag_end + frag_size;
	}

	nursery_last_pinned_end = frag_start;
	frag_end = sgen_nursery_end;
	frag_size = frag_end - frag_start;
	if (frag_size)
		add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);

	/* Now it's safe to release the fragments exclude list. */
	sgen_minor_collector.build_fragments_release_exclude_head ();

	/* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
	fragment_list_reverse (&mutator_allocator);

	/*The collector might want to do something with the final nursery fragment list.*/
	sgen_minor_collector.build_fragments_finish (&mutator_allocator);

	if (!unmask (mutator_allocator.alloc_head)) {
		SGEN_LOG (1, "Nursery fully pinned");
		for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
			GCObject *p = (GCObject *)*pin_entry;
			SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), sgen_safe_object_get_size (p));
		}
	}
	return fragment_total;
}
Ejemplo n.º 13
0
static mword*
handle_remset (mword *p, void *start_nursery, void *end_nursery, gboolean global, SgenGrayQueue *queue)
{
	void **ptr;
	mword count;
	mword desc;

	if (global)
		HEAVY_STAT (++stat_global_remsets_processed);
	else
		HEAVY_STAT (++stat_local_remsets_processed);

	/* FIXME: exclude stack locations */
	switch ((*p) & REMSET_TYPE_MASK) {
	case REMSET_LOCATION:
		ptr = (void**)(*p);
		//__builtin_prefetch (ptr);
		if (((void*)ptr < start_nursery || (void*)ptr >= end_nursery)) {
			gpointer old = *ptr;

			sgen_get_current_object_ops ()->copy_or_mark_object (ptr, queue);
			SGEN_LOG (9, "Overwrote remset at %p with %p", ptr, *ptr);
			if (old)
				binary_protocol_ptr_update (ptr, old, *ptr, (gpointer)SGEN_LOAD_VTABLE (*ptr), sgen_safe_object_get_size (*ptr));
			if (!global && *ptr >= start_nursery && *ptr < end_nursery) {
				/*
				 * If the object is pinned, each reference to it from nonpinned objects
				 * becomes part of the global remset, which can grow very large.
				 */
				SGEN_LOG (9, "Add to global remset because of pinning %p (%p %s)", ptr, *ptr, sgen_safe_name (*ptr));
				sgen_add_to_global_remset (ptr);
			}
		} else {
			SGEN_LOG (9, "Skipping remset at %p holding %p", ptr, *ptr);
		}
		return p + 1;
	case REMSET_RANGE: {
		CopyOrMarkObjectFunc copy_func = sgen_get_current_object_ops ()->copy_or_mark_object;

		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
			return p + 2;
		count = p [1];
		while (count-- > 0) {
			copy_func (ptr, queue);
			SGEN_LOG (9, "Overwrote remset at %p with %p (count: %d)", ptr, *ptr, (int)count);
			if (!global && *ptr >= start_nursery && *ptr < end_nursery)
				sgen_add_to_global_remset (ptr);
			++ptr;
		}
		return p + 2;
	}
	case REMSET_OBJECT:
		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
			return p + 1;
		sgen_get_current_object_ops ()->scan_object ((char*)ptr, queue);
		return p + 1;
	case REMSET_VTYPE: {
		size_t skip_size;

		ptr = (void**)(*p & ~REMSET_TYPE_MASK);
		if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
			return p + 4;
		desc = p [1];
		count = p [2];
		skip_size = p [3];
		while (count-- > 0) {
			sgen_get_current_object_ops ()->scan_vtype ((char*)ptr, desc, queue);
			ptr = (void**)((char*)ptr + skip_size);
		}
		return p + 4;
	}
	default:
		g_assert_not_reached ();
	}
	return NULL;
}
Ejemplo n.º 14
0
static void
major_copy_or_mark_object (void **obj_slot, SgenGrayQueue *queue)
{
	char *forwarded;
	char *obj = *obj_slot;
	mword objsize;

	DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));

	HEAVY_STAT (++stat_copy_object_called_major);

	DEBUG (9, fprintf (gc_debug_file, "Precise copy of %p from %p", obj, obj_slot));

	/*
	 * obj must belong to one of:
	 *
	 * 1. the nursery
	 * 2. the LOS
	 * 3. a pinned chunk
	 * 4. a non-to-space section of the major heap
	 * 5. a to-space section of the major heap
	 *
	 * In addition, objects in 1, 2 and 4 might also be pinned.
	 * Objects in 1 and 4 might be forwarded.
	 *
	 * Before we can copy the object we must make sure that we are
	 * allowed to, i.e. that the object not pinned, not already
	 * forwarded, not in the nursery To Space and doesn't belong
	 * to the LOS, a pinned chunk, or a to-space section.
	 *
	 * We are usually called for to-space objects (5) when we have
	 * two remset entries for the same reference.  The first entry
	 * copies the object and updates the reference and the second
	 * calls us with the updated reference that points into
	 * to-space.  There might also be other circumstances where we
	 * get to-space objects.
	 */

	if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
		DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
		DEBUG (9, fprintf (gc_debug_file, " (already forwarded to %p)\n", forwarded));
		HEAVY_STAT (++stat_major_copy_object_failed_forwarded);
		*obj_slot = forwarded;
		return;
	}
	if (SGEN_OBJECT_IS_PINNED (obj)) {
		DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
		DEBUG (9, fprintf (gc_debug_file, " (pinned, no change)\n"));
		HEAVY_STAT (++stat_major_copy_object_failed_pinned);
		return;
	}

	if (ptr_in_nursery (obj)) {
		/* A To Space object is already on its final destination for the current collection. */
		if (sgen_nursery_is_to_space (obj))
			return;
		goto copy;
	}

	/*
	 * At this point we know obj is not pinned, not forwarded and
	 * belongs to 2, 3, 4, or 5.
	 *
	 * LOS object (2) are simple, at least until we always follow
	 * the rule: if objsize > SGEN_MAX_SMALL_OBJ_SIZE, pin the
	 * object and return it.  At the end of major collections, we
	 * walk the los list and if the object is pinned, it is
	 * marked, otherwise it can be freed.
	 *
	 * Pinned chunks (3) and major heap sections (4, 5) both
	 * reside in blocks, which are always aligned, so once we've
	 * eliminated LOS objects, we can just access the block and
	 * see whether it's a pinned chunk or a major heap section.
	 */

	objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));

	if (G_UNLIKELY (objsize > SGEN_MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) {
		if (SGEN_OBJECT_IS_PINNED (obj))
			return;
		DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %td)\n", obj, sgen_safe_name (obj), objsize));
		binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
		SGEN_PIN_OBJECT (obj);
		GRAY_OBJECT_ENQUEUE (queue, obj);
		HEAVY_STAT (++stat_major_copy_object_failed_large_pinned);
		return;
	}

	/*
	 * Now we know the object is in a major heap section.  All we
	 * need to do is check whether it's already in to-space (5) or
	 * not (4).
	 */
	if (MAJOR_OBJ_IS_IN_TO_SPACE (obj)) {
		DEBUG (9, g_assert (objsize <= SGEN_MAX_SMALL_OBJ_SIZE));
		DEBUG (9, fprintf (gc_debug_file, " (already copied)\n"));
		HEAVY_STAT (++stat_major_copy_object_failed_to_space);
		return;
	}

 copy:
	HEAVY_STAT (++stat_objects_copied_major);

	*obj_slot = copy_object_no_checks (obj, queue);
}
Ejemplo n.º 15
0
void
sgen_dump_pin_queue (void)
{
	int i;

	for (i = 0; i < last_num_pinned; ++i) {
		SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %d", pin_queue [i], sgen_safe_name (pin_queue [i]), sgen_safe_object_get_size (pin_queue [i]));
	}	
}
Ejemplo n.º 16
0
mword
sgen_build_nursery_fragments (GCMemSection *nursery_section, void **start, size_t num_entries, SgenGrayQueue *unpin_queue)
{
	char *frag_start, *frag_end;
	size_t frag_size;
	size_t i = 0;
	SgenFragment *frags_ranges;

#ifdef NALLOC_DEBUG
	reset_alloc_records ();
#endif
	/*The mutator fragments are done. We no longer need them. */
	sgen_fragment_allocator_release (&mutator_allocator);

	frag_start = sgen_nursery_start;
	fragment_total = 0;

	/* The current nursery might give us a fragment list to exclude [start, next[*/
	frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();

	/* clear scan starts */
	memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));

	while (i < num_entries || frags_ranges) {
		char *addr0, *addr1;
		size_t size;
		SgenFragment *last_frag = NULL;

		addr0 = addr1 = sgen_nursery_end;
		if (i < num_entries)
			addr0 = start [i];
		if (frags_ranges)
			addr1 = frags_ranges->fragment_start;

		if (addr0 < addr1) {
			if (unpin_queue)
				GRAY_OBJECT_ENQUEUE (unpin_queue, addr0);
			else
				SGEN_UNPIN_OBJECT (addr0);
			sgen_set_nursery_scan_start (addr0);
			frag_end = addr0;
			size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)addr0));
			++i;
		} else {
			frag_end = addr1;
			size = frags_ranges->fragment_next - addr1;
			last_frag = frags_ranges;
			frags_ranges = frags_ranges->next_in_order;
		}

		frag_size = frag_end - frag_start;

		if (size == 0)
			continue;

		g_assert (frag_size >= 0);
		g_assert (size > 0);
		if (frag_size && size)
			add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);	

		frag_size = size;
#ifdef NALLOC_DEBUG
		add_alloc_record (start [i], frag_size, PINNING);
#endif
		frag_start = frag_end + frag_size;
	}

	nursery_last_pinned_end = frag_start;
	frag_end = sgen_nursery_end;
	frag_size = frag_end - frag_start;
	if (frag_size)
		add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);

	/* Now it's safe to release the fragments exclude list. */
	sgen_minor_collector.build_fragments_release_exclude_head ();

	/* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
	fragment_list_reverse (&mutator_allocator);

	/*The collector might want to do something with the final nursery fragment list.*/
	sgen_minor_collector.build_fragments_finish (&mutator_allocator);

	if (!unmask (mutator_allocator.alloc_head)) {
		SGEN_LOG (1, "Nursery fully pinned (%zd)", num_entries);
		for (i = 0; i < num_entries; ++i) {
			SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", start [i], sgen_safe_name (start [i]), sgen_safe_object_get_size (start [i]));
		}
	}
	return fragment_total;
}
Ejemplo n.º 17
0
void
sgen_dump_pin_queue (void)
{
	int i;

	for (i = 0; i < last_num_pinned; ++i) {
		GCObject *ptr = (GCObject *)pin_queue.data [i];
		SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", ptr, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (ptr)), sgen_safe_object_get_size (ptr));
	}
}