static void* major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits) { char *heap_start; mword major_heap_size = ms_heap_num_blocks * MS_BLOCK_SIZE; mword alloc_size = nursery_size + major_heap_size; int i; g_assert (ms_heap_num_blocks > 0); g_assert (nursery_size % MS_BLOCK_SIZE == 0); if (nursery_align) g_assert (nursery_align % MS_BLOCK_SIZE == 0); nursery_start = mono_sgen_alloc_os_memory_aligned (alloc_size, nursery_align ? nursery_align : MS_BLOCK_SIZE, TRUE); nursery_end = heap_start = nursery_start + nursery_size; nursery_bits = the_nursery_bits; ms_heap_end = heap_start + major_heap_size; block_infos = mono_sgen_alloc_internal_dynamic (sizeof (MSBlockInfo) * ms_heap_num_blocks, INTERNAL_MEM_MS_BLOCK_INFO); for (i = 0; i < ms_heap_num_blocks; ++i) { block_infos [i].block = heap_start + i * MS_BLOCK_SIZE; if (i < ms_heap_num_blocks - 1) block_infos [i].next_free = &block_infos [i + 1]; else block_infos [i].next_free = NULL; } empty_blocks = &block_infos [0]; return nursery_start; }
/* * Allocate a new section of memory to be used as old generation. */ static GCMemSection* alloc_major_section (void) { GCMemSection *section; int scan_starts; section = mono_sgen_alloc_os_memory_aligned (MAJOR_SECTION_SIZE, MAJOR_SECTION_SIZE, TRUE); section->next_data = section->data = (char*)section + SGEN_SIZEOF_GC_MEM_SECTION; g_assert (!((mword)section->data & 7)); section->size = MAJOR_SECTION_SIZE - SGEN_SIZEOF_GC_MEM_SECTION; section->end_data = section->data + section->size; mono_sgen_update_heap_boundaries ((mword)section->data, (mword)section->end_data); DEBUG (3, fprintf (gc_debug_file, "New major heap section: (%p-%p), total: %ld\n", section->data, section->end_data, mono_gc_get_heap_size ())); scan_starts = (section->size + SGEN_SCAN_START_SIZE - 1) / SGEN_SCAN_START_SIZE; section->scan_starts = mono_sgen_alloc_internal_dynamic (sizeof (char*) * scan_starts, INTERNAL_MEM_SCAN_STARTS); section->num_scan_start = scan_starts; section->block.role = MEMORY_ROLE_GEN1; section->is_to_space = TRUE; /* add to the section list */ section->block.next = section_list; section_list = section; ++num_major_sections; return section; }
/* LOCKING: assumes the GC lock is held */ static void rehash_dislink (DisappearingLinkHashTable *hash_table) { DisappearingLink **disappearing_link_hash = hash_table->table; int disappearing_link_hash_size = hash_table->size; int i; unsigned int hash; DisappearingLink **new_hash; DisappearingLink *entry, *next; int new_size = g_spaced_primes_closest (hash_table->num_links); new_hash = mono_sgen_alloc_internal_dynamic (new_size * sizeof (DisappearingLink*), INTERNAL_MEM_DISLINK_TABLE); for (i = 0; i < disappearing_link_hash_size; ++i) { for (entry = disappearing_link_hash [i]; entry; entry = next) { hash = mono_aligned_addr_hash (entry->link) % new_size; next = entry->next; entry->next = new_hash [hash]; new_hash [hash] = entry; } } mono_sgen_free_internal_dynamic (disappearing_link_hash, disappearing_link_hash_size * sizeof (DisappearingLink*), INTERNAL_MEM_DISLINK_TABLE); hash_table->table = new_hash; hash_table->size = new_size; }
/* FIXME: later choose a size that takes into account the RememberedSet struct * and doesn't waste any alloc paddin space. */ static RememberedSet* mono_sgen_alloc_remset (int size, gpointer id, gboolean global) { RememberedSet* res = mono_sgen_alloc_internal_dynamic (sizeof (RememberedSet) + (size * sizeof (gpointer)), INTERNAL_MEM_REMSET); res->store_next = res->data; res->end_set = res->data + size; res->next = NULL; DEBUG (4, fprintf (gc_debug_file, "Allocated%s remset size %d at %p for %p\n", global ? " global" : "", size, res->data, id)); return res; }
static void realloc_pin_queue (void) { int new_size = pin_queue_size? pin_queue_size + pin_queue_size/2: 1024; void **new_pin = mono_sgen_alloc_internal_dynamic (sizeof (void*) * new_size, INTERNAL_MEM_PIN_QUEUE); memcpy (new_pin, pin_queue, sizeof (void*) * next_pin_slot); mono_sgen_free_internal_dynamic (pin_queue, sizeof (void*) * pin_queue_size, INTERNAL_MEM_PIN_QUEUE); pin_queue = new_pin; pin_queue_size = new_size; DEBUG (4, fprintf (gc_debug_file, "Reallocated pin queue to size: %d\n", new_size)); }
static void remset_stats (void) { RememberedSet *remset; int size = 0; SgenThreadInfo *info; mword *addresses, *bumper, *p, *r; FOREACH_THREAD (info) { for (remset = info->remset; remset; remset = remset->next) size += remset->store_next - remset->data; } END_FOREACH_THREAD for (remset = freed_thread_remsets; remset; remset = remset->next) size += remset->store_next - remset->data; for (remset = global_remset; remset; remset = remset->next) size += remset->store_next - remset->data; bumper = addresses = mono_sgen_alloc_internal_dynamic (sizeof (mword) * size, INTERNAL_MEM_STATISTICS); FOREACH_THREAD (info) { for (remset = info->remset; remset; remset = remset->next) bumper = collect_store_remsets (remset, bumper); } END_FOREACH_THREAD for (remset = global_remset; remset; remset = remset->next) bumper = collect_store_remsets (remset, bumper); for (remset = freed_thread_remsets; remset; remset = remset->next) bumper = collect_store_remsets (remset, bumper); g_assert (bumper <= addresses + size); stat_store_remsets += bumper - addresses; mono_sgen_sort_addresses ((void**)addresses, bumper - addresses); p = addresses; r = addresses + 1; while (r < bumper) { if (*r != *p) *++p = *r; ++r; } stat_store_remsets_unique += p - addresses; mono_sgen_free_internal_dynamic (addresses, sizeof (mword) * size, INTERNAL_MEM_STATISTICS); }
static void workers_init (int num_workers) { int i; if (!major_collector.is_parallel) return; //g_print ("initing %d workers\n", num_workers); workers_num = num_workers; workers_data = mono_sgen_alloc_internal_dynamic (sizeof (WorkerData) * num_workers, INTERNAL_MEM_WORKER_DATA); memset (workers_data, 0, sizeof (WorkerData) * num_workers); MONO_SEM_INIT (&workers_waiting_sem, 0); MONO_SEM_INIT (&workers_done_sem, 0); gray_object_queue_init_with_alloc_prepare (&workers_distribute_gray_queue, workers_gray_queue_share_redirect, &workers_gc_thread_data); pthread_mutex_init (&workers_gc_thread_data.stealable_stack_mutex, NULL); workers_gc_thread_data.stealable_stack_fill = 0; if (major_collector.alloc_worker_data) workers_gc_thread_data.major_collector_data = major_collector.alloc_worker_data (); for (i = 0; i < workers_num; ++i) { /* private gray queue is inited by the thread itself */ pthread_mutex_init (&workers_data [i].stealable_stack_mutex, NULL); workers_data [i].stealable_stack_fill = 0; if (major_collector.alloc_worker_data) workers_data [i].major_collector_data = major_collector.alloc_worker_data (); } LOCK_INIT (workers_job_queue_mutex); mono_sgen_register_fixed_internal_mem_type (INTERNAL_MEM_JOB_QUEUE_ENTRY, sizeof (JobQueueEntry)); mono_counters_register ("Stolen from self lock", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_workers_stolen_from_self_lock); mono_counters_register ("Stolen from self no lock", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_workers_stolen_from_self_no_lock); mono_counters_register ("Stolen from others", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_workers_stolen_from_others); mono_counters_register ("# workers waited", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_workers_num_waited); }
/* LOCKING: requires that the GC lock is held */ static void rehash_fin_table (FinalizeEntryHashTable *hash_table) { FinalizeEntry **finalizable_hash = hash_table->table; mword finalizable_hash_size = hash_table->size; int i; unsigned int hash; FinalizeEntry **new_hash; FinalizeEntry *entry, *next; int new_size = g_spaced_primes_closest (hash_table->num_registered); new_hash = mono_sgen_alloc_internal_dynamic (new_size * sizeof (FinalizeEntry*), INTERNAL_MEM_FIN_TABLE); for (i = 0; i < finalizable_hash_size; ++i) { for (entry = finalizable_hash [i]; entry; entry = next) { hash = mono_object_hash (entry->object) % new_size; next = entry->next; entry->next = new_hash [hash]; new_hash [hash] = entry; } } mono_sgen_free_internal_dynamic (finalizable_hash, finalizable_hash_size * sizeof (FinalizeEntry*), INTERNAL_MEM_FIN_TABLE); hash_table->table = new_hash; hash_table->size = new_size; }
void #ifdef SGEN_PARALLEL_MARK #ifdef FIXED_HEAP mono_sgen_marksweep_fixed_par_init #else mono_sgen_marksweep_par_init #endif #else #ifdef FIXED_HEAP mono_sgen_marksweep_fixed_init #else mono_sgen_marksweep_init #endif #endif (SgenMajorCollector *collector) { int i; #ifndef FIXED_HEAP mono_sgen_register_fixed_internal_mem_type (INTERNAL_MEM_MS_BLOCK_INFO, sizeof (MSBlockInfo)); #endif num_block_obj_sizes = ms_calculate_block_obj_sizes (MS_BLOCK_OBJ_SIZE_FACTOR, NULL); block_obj_sizes = mono_sgen_alloc_internal_dynamic (sizeof (int) * num_block_obj_sizes, INTERNAL_MEM_MS_TABLES); ms_calculate_block_obj_sizes (MS_BLOCK_OBJ_SIZE_FACTOR, block_obj_sizes); /* { int i; g_print ("block object sizes:\n"); for (i = 0; i < num_block_obj_sizes; ++i) g_print ("%d\n", block_obj_sizes [i]); } */ for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) free_block_lists [i] = mono_sgen_alloc_internal_dynamic (sizeof (MSBlockInfo*) * num_block_obj_sizes, INTERNAL_MEM_MS_TABLES); for (i = 0; i < MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES; ++i) fast_block_obj_size_indexes [i] = ms_find_block_obj_size_index (i * 8); for (i = 0; i < MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES * 8; ++i) g_assert (MS_BLOCK_OBJ_SIZE_INDEX (i) == ms_find_block_obj_size_index (i)); LOCK_INIT (ms_block_list_mutex); mono_counters_register ("# major blocks allocated", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_alloced); mono_counters_register ("# major blocks freed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_freed); collector->section_size = MAJOR_SECTION_SIZE; #ifdef SGEN_PARALLEL_MARK collector->is_parallel = TRUE; #else collector->is_parallel = FALSE; #endif collector->alloc_heap = major_alloc_heap; collector->is_object_live = major_is_object_live; collector->alloc_small_pinned_obj = major_alloc_small_pinned_obj; collector->alloc_degraded = major_alloc_degraded; collector->copy_or_mark_object = major_copy_or_mark_object; collector->alloc_object = major_alloc_object; collector->free_pinned_object = free_pinned_object; collector->iterate_objects = major_iterate_objects; collector->free_non_pinned_object = major_free_non_pinned_object; collector->find_pin_queue_start_ends = major_find_pin_queue_start_ends; collector->pin_objects = major_pin_objects; collector->init_to_space = major_init_to_space; collector->sweep = major_sweep; collector->check_scan_starts = major_check_scan_starts; collector->dump_heap = major_dump_heap; collector->get_used_size = major_get_used_size; collector->start_nursery_collection = major_start_nursery_collection; collector->finish_nursery_collection = major_finish_nursery_collection; collector->finish_major_collection = major_finish_major_collection; collector->ptr_is_in_non_pinned_space = major_ptr_is_in_non_pinned_space; collector->obj_is_from_pinned_alloc = obj_is_from_pinned_alloc; collector->report_pinned_memory_usage = major_report_pinned_memory_usage; collector->get_num_major_sections = get_num_major_sections; #ifdef FIXED_HEAP collector->handle_gc_param = major_handle_gc_param; collector->print_gc_param_usage = major_print_gc_param_usage; #else collector->handle_gc_param = NULL; collector->print_gc_param_usage = NULL; #endif FILL_COLLECTOR_COPY_OBJECT (collector); FILL_COLLECTOR_SCAN_OBJECT (collector); }