/* used for the GC-internal data structures */ void* sgen_alloc_pinned (SgenPinnedAllocator *alc, size_t size) { int slot; void *res = NULL; HEAVY_STAT (++stat_pinned_alloc); if (size > freelist_sizes [SGEN_PINNED_FREELIST_NUM_SLOTS - 1]) { LargePinnedMemHeader *mh; size += sizeof (LargePinnedMemHeader); mh = sgen_alloc_os_memory (size, TRUE); mh->magic = LARGE_PINNED_MEM_HEADER_MAGIC; mh->size = size; /* FIXME: do a CAS here */ large_pinned_bytes_alloced += size; return mh->data; } slot = slot_for_size (size); g_assert (size <= freelist_sizes [slot]); res = alloc_from_slot (alc, slot); return res; }
void* sgen_alloc_internal_dynamic (size_t size, int type, gboolean assert_on_failure) { int index; void *p; if (size > allocator_sizes [NUM_ALLOCATORS - 1]) { p = sgen_alloc_os_memory (size, (SgenAllocFlags)(SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE), NULL, MONO_MEM_ACCOUNT_SGEN_INTERNAL); if (!p) sgen_assert_memory_alloc (NULL, size, description_for_type (type)); } else { index = index_for_size (size); #ifdef HEAVY_STATISTICS ++ allocator_sizes_stats [index]; #endif p = mono_lock_free_alloc (&allocators [index]); if (!p) sgen_assert_memory_alloc (NULL, size, description_for_type (type)); memset (p, 0, size); } SGEN_ASSERT (0, !(((mword)p) & (sizeof(gpointer) - 1)), "Why do we allocate unaligned addresses ?"); return p; }
void sgen_init_nursery_allocator (void) { sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment)); #ifdef NALLOC_DEBUG alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory"); #endif }
static void* major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits) { if (nursery_align) nursery_start = sgen_alloc_os_memory_aligned (nursery_size, nursery_align, TRUE); else nursery_start = sgen_alloc_os_memory (nursery_size, TRUE); nursery_end = nursery_start + nursery_size; nursery_bits = the_nursery_bits; return nursery_start; }
static void* major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits) { if (nursery_align) nursery_start = sgen_alloc_os_memory_aligned (nursery_size, nursery_align, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE, "nursery"); else nursery_start = sgen_alloc_os_memory (nursery_size, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE, "nursery"); nursery_end = nursery_start + nursery_size; nursery_bits = the_nursery_bits; return nursery_start; }
void* sgen_alloc_internal_dynamic (size_t size, int type) { int index; void *p; if (size > allocator_sizes [NUM_ALLOCATORS - 1]) return sgen_alloc_os_memory (size, TRUE); index = index_for_size (size); p = mono_lock_free_alloc (&allocators [index]); memset (p, 0, size); return p; }
/* FIXME: -This heap checker is racy regarding inlined write barriers and other JIT tricks that depend on OP_DUMMY_USE. */ void sgen_check_whole_heap (void) { /*setup valid_nursery_objects*/ if (!valid_nursery_objects) valid_nursery_objects = sgen_alloc_os_memory (DEFAULT_NURSERY_SIZE, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging data"); valid_nursery_object_count = 0; sgen_scan_area_with_callback (nursery_section->data, nursery_section->end_data, setup_mono_sgen_scan_area_with_callback, NULL, FALSE); broken_heap = FALSE; sgen_scan_area_with_callback (nursery_section->data, nursery_section->end_data, verify_object_pointers_callback, NULL, FALSE); major_collector.iterate_objects (TRUE, TRUE, verify_object_pointers_callback, NULL); sgen_los_iterate_objects (verify_object_pointers_callback, NULL); g_assert (!broken_heap); }
static BinaryProtocolBuffer* binary_protocol_get_buffer (int length) { BinaryProtocolBuffer *buffer, *new_buffer; retry: buffer = binary_protocol_buffers; if (buffer && buffer->index + length <= BINARY_PROTOCOL_BUFFER_SIZE) return buffer; new_buffer = (BinaryProtocolBuffer *)sgen_alloc_os_memory (sizeof (BinaryProtocolBuffer), (SgenAllocFlags)(SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE), "debugging memory"); new_buffer->next = buffer; new_buffer->index = 0; if (InterlockedCompareExchangePointer ((void**)&binary_protocol_buffers, new_buffer, buffer) != buffer) { sgen_free_os_memory (new_buffer, sizeof (BinaryProtocolBuffer), SGEN_ALLOC_INTERNAL); goto retry; } return new_buffer; }
void* sgen_alloc_internal_dynamic (size_t size, int type, gboolean assert_on_failure) { int index; void *p; if (size > allocator_sizes [NUM_ALLOCATORS - 1]) { p = sgen_alloc_os_memory (size, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, NULL); if (!p) sgen_assert_memory_alloc (NULL, description_for_type (type)); return p; } index = index_for_size (size); p = mono_lock_free_alloc (&allocators [index]); if (!p) sgen_assert_memory_alloc (NULL, description_for_type (type)); memset (p, 0, size); return p; }
void* sgen_alloc_internal_dynamic (size_t size, int type, gboolean assert_on_failure) { int index; void *p; if (size > allocator_sizes [NUM_ALLOCATORS - 1]) { p = sgen_alloc_os_memory (size, (SgenAllocFlags)(SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE), NULL); if (!p) sgen_assert_memory_alloc (NULL, size, description_for_type (type)); } else { index = index_for_size (size); #ifdef HEAVY_STATISTICS ++ allocator_sizes_stats [index]; #endif p = mono_lock_free_alloc (&allocators [index]); if (!p) sgen_assert_memory_alloc (NULL, size, description_for_type (type)); memset (p, 0, size); } return p; }