gboolean sgen_need_major_collection (mword space_needed) { size_t heap_size; if (sgen_concurrent_collection_in_progress ()) { heap_size = get_heap_size (); if (heap_size <= major_collection_trigger_size) return FALSE; /* * The more the heap grows, the more we need to decrease the allowance above, * in order to have similar trigger sizes as the synchronous collector. * If the heap grows so much that we would need to have a negative allowance, * we force the finishing of the collection, to avoid increased memory usage. */ if ((heap_size - major_start_heap_size) > major_start_heap_size * SGEN_DEFAULT_ALLOWANCE_HEAP_SIZE_RATIO) return TRUE; return FALSE; } /* FIXME: This is a cop-out. We should have some way of figuring this out. */ if (!major_collector.have_swept ()) return FALSE; if (space_needed > sgen_memgov_available_free_space ()) return TRUE; sgen_memgov_calculate_minor_collection_allowance (); heap_size = get_heap_size (); return heap_size > major_collection_trigger_size; }
gboolean sgen_need_major_collection (mword space_needed) { mword los_alloced = los_memory_usage - MIN (last_collection_los_memory_usage, los_memory_usage); return (space_needed > sgen_memgov_available_free_space ()) || minor_collection_sections_alloced * major_collector.section_size + los_alloced > minor_collection_allowance; }
gboolean sgen_memgov_try_alloc_space (mword size, int space) { if (sgen_memgov_available_free_space () < size) return FALSE; SGEN_ATOMIC_ADD_P (allocated_heap, size); mono_runtime_resource_check_limit (MONO_RESOURCE_GC_HEAP, allocated_heap); return TRUE; }
gboolean sgen_memgov_try_alloc_space (mword size, int space) { if (sgen_memgov_available_free_space () < size) { SGEN_ASSERT (4, !sgen_thread_pool_is_thread_pool_thread (mono_native_thread_id_get ()), "Memory shouldn't run out in worker thread"); return FALSE; } SGEN_ATOMIC_ADD_P (allocated_heap, size); sgen_client_total_allocated_heap_changed (allocated_heap); return TRUE; }