void* mono_gc_alloc_string (MonoVTable *vtable, size_t size, gint32 len) { MonoString *str; TLAB_ACCESS_INIT; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; #ifndef DISABLE_CRITICAL_REGION ENTER_CRITICAL_REGION; str = mono_gc_try_alloc_obj_nolock (vtable, size); if (str) { /*This doesn't require fencing since EXIT_CRITICAL_REGION already does it for us*/ str->length = len; EXIT_CRITICAL_REGION; return str; } EXIT_CRITICAL_REGION; #endif LOCK_GC; str = mono_gc_alloc_obj_nolock (vtable, size); if (G_UNLIKELY (!str)) { UNLOCK_GC; return mono_gc_out_of_memory (size); } str->length = len; UNLOCK_GC; return str; }
/* * To be used for interned strings and possibly MonoThread, reflection handles. * We may want to explicitly free these objects. */ void* mono_gc_alloc_pinned_obj (MonoVTable *vtable, size_t size) { void **p; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; size = ALIGN_UP (size); LOCK_GC; if (size > SGEN_MAX_SMALL_OBJ_SIZE) { /* large objects are always pinned anyway */ p = sgen_los_alloc_large_inner (vtable, size); } else { SGEN_ASSERT (9, vtable->klass->inited, "class %s:%s is not initialized", vtable->klass->name_space, vtable->klass->name); p = major_collector.alloc_small_pinned_obj (vtable, size, SGEN_VTABLE_HAS_REFERENCES (vtable)); } if (G_LIKELY (p)) { SGEN_LOG (6, "Allocated pinned object %p, vtable: %p (%s), size: %zd", p, vtable, vtable->klass->name, size); if (size > SGEN_MAX_SMALL_OBJ_SIZE) MONO_GC_MAJOR_OBJ_ALLOC_LARGE ((mword)p, size, vtable->klass->name_space, vtable->klass->name); else MONO_GC_MAJOR_OBJ_ALLOC_PINNED ((mword)p, size, vtable->klass->name_space, vtable->klass->name); binary_protocol_alloc_pinned (p, vtable, size); } UNLOCK_GC; return p; }
/* * To be used for interned strings and possibly MonoThread, reflection handles. * We may want to explicitly free these objects. */ GCObject* sgen_alloc_obj_pinned (GCVTable vtable, size_t size) { GCObject *p; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; size = ALIGN_UP (size); LOCK_GC; if (size > SGEN_MAX_SMALL_OBJ_SIZE) { /* large objects are always pinned anyway */ p = (GCObject *)sgen_los_alloc_large_inner (vtable, size); } else { SGEN_ASSERT (9, sgen_client_vtable_is_inited (vtable), "class %s:%s is not initialized", sgen_client_vtable_get_namespace (vtable), sgen_client_vtable_get_name (vtable)); p = sgen_major_collector.alloc_small_pinned_obj (vtable, size, SGEN_VTABLE_HAS_REFERENCES (vtable)); } if (G_LIKELY (p)) { SGEN_LOG (6, "Allocated pinned object %p, vtable: %p (%s), size: %zd", p, vtable, sgen_client_vtable_get_name (vtable), size); sgen_binary_protocol_alloc_pinned (p, vtable, size, sgen_client_get_provenance ()); } UNLOCK_GC; return p; }
void* mono_gc_alloc_vector (MonoVTable *vtable, size_t size, uintptr_t max_length) { MonoArray *arr; TLAB_ACCESS_INIT; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; #ifndef DISABLE_CRITICAL_REGION ENTER_CRITICAL_REGION; arr = mono_gc_try_alloc_obj_nolock (vtable, size); if (arr) { /*This doesn't require fencing since EXIT_CRITICAL_REGION already does it for us*/ arr->max_length = (mono_array_size_t)max_length; EXIT_CRITICAL_REGION; return arr; } EXIT_CRITICAL_REGION; #endif LOCK_GC; arr = mono_gc_alloc_obj_nolock (vtable, size); if (G_UNLIKELY (!arr)) { UNLOCK_GC; return mono_gc_out_of_memory (size); } arr->max_length = (mono_array_size_t)max_length; UNLOCK_GC; return arr; }
GCObject* sgen_alloc_obj_mature (GCVTable vtable, size_t size) { GCObject *res; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; size = ALIGN_UP (size); LOCK_GC; res = alloc_degraded (vtable, size, TRUE); UNLOCK_GC; return res; }
gboolean sgen_can_alloc_size (size_t size) { SgenFragment *frag; if (!SGEN_CAN_ALIGN_UP (size)) return FALSE; size = SGEN_ALIGN_UP (size); for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) { if ((size_t)(frag->fragment_end - frag->fragment_next) >= size) return TRUE; } return FALSE; }
void* mono_gc_alloc_mature (MonoVTable *vtable) { void **res; size_t size = vtable->klass->instance_size; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; size = ALIGN_UP (size); LOCK_GC; res = alloc_degraded (vtable, size, TRUE); UNLOCK_GC; if (G_UNLIKELY (vtable->klass->has_finalize)) mono_object_register_finalizer ((MonoObject*)res); return res; }
void* mono_gc_alloc_obj (MonoVTable *vtable, size_t size) { void *res; SgenThreadInfo *__thread_info__; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; #ifndef DISABLE_CRITICAL_REGION TLAB_ACCESS_INIT; if (G_UNLIKELY (has_per_allocation_action)) { static int alloc_count; int current_alloc = InterlockedIncrement (&alloc_count); if (verify_before_allocs) { if ((current_alloc % verify_before_allocs) == 0) sgen_check_whole_heap_stw (); } if (collect_before_allocs) { if (((current_alloc % collect_before_allocs) == 0) && nursery_section) { LOCK_GC; sgen_perform_collection (0, GENERATION_NURSERY, "collect-before-alloc-triggered", TRUE); UNLOCK_GC; } } } ENTER_CRITICAL_REGION; res = mono_gc_try_alloc_obj_nolock (vtable, size); if (res) { EXIT_CRITICAL_REGION; return res; } EXIT_CRITICAL_REGION; #endif LOCK_GC; res = mono_gc_alloc_obj_nolock (vtable, size); UNLOCK_GC; if (G_UNLIKELY (!res)) return mono_gc_out_of_memory (size); return res; }
void* mono_gc_alloc_array (MonoVTable *vtable, size_t size, uintptr_t max_length, uintptr_t bounds_size) { MonoArray *arr; MonoArrayBounds *bounds; TLAB_ACCESS_INIT; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; #ifndef DISABLE_CRITICAL_REGION ENTER_CRITICAL_REGION; arr = mono_gc_try_alloc_obj_nolock (vtable, size); if (arr) { /*This doesn't require fencing since EXIT_CRITICAL_REGION already does it for us*/ arr->max_length = (mono_array_size_t)max_length; bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size); arr->bounds = bounds; EXIT_CRITICAL_REGION; goto done; } EXIT_CRITICAL_REGION; #endif LOCK_GC; arr = mono_gc_alloc_obj_nolock (vtable, size); if (G_UNLIKELY (!arr)) { UNLOCK_GC; return mono_gc_out_of_memory (size); } arr->max_length = (mono_array_size_t)max_length; bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size); arr->bounds = bounds; UNLOCK_GC; done: SGEN_ASSERT (6, SGEN_ALIGN_UP (size) == SGEN_ALIGN_UP (sgen_par_object_get_size (vtable, (MonoObject*)arr)), "Array has incorrect size."); return arr; }
void* mono_gc_alloc_array (MonoVTable *vtable, size_t size, uintptr_t max_length, uintptr_t bounds_size) { MonoArray *arr; MonoArrayBounds *bounds; SgenThreadInfo *__thread_info__; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; #ifndef DISABLE_CRITICAL_REGION TLAB_ACCESS_INIT; ENTER_CRITICAL_REGION; arr = mono_gc_try_alloc_obj_nolock (vtable, size); if (arr) { /*This doesn't require fencing since EXIT_CRITICAL_REGION already does it for us*/ arr->max_length = max_length; bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size); arr->bounds = bounds; EXIT_CRITICAL_REGION; return arr; } EXIT_CRITICAL_REGION; #endif LOCK_GC; arr = mono_gc_alloc_obj_nolock (vtable, size); if (G_UNLIKELY (!arr)) { UNLOCK_GC; return mono_gc_out_of_memory (size); } arr->max_length = max_length; bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size); arr->bounds = bounds; UNLOCK_GC; return arr; }
GCObject* sgen_alloc_obj (GCVTable vtable, size_t size) { GCObject *res; TLAB_ACCESS_INIT; if (!SGEN_CAN_ALIGN_UP (size)) return NULL; if (G_UNLIKELY (sgen_has_per_allocation_action)) { static int alloc_count; int current_alloc = mono_atomic_inc_i32 (&alloc_count); if (sgen_verify_before_allocs) { if ((current_alloc % sgen_verify_before_allocs) == 0) { LOCK_GC; sgen_check_whole_heap_stw (); UNLOCK_GC; } } if (sgen_collect_before_allocs) { if (((current_alloc % sgen_collect_before_allocs) == 0) && sgen_nursery_section) { LOCK_GC; sgen_perform_collection (0, GENERATION_NURSERY, "collect-before-alloc-triggered", TRUE, TRUE); UNLOCK_GC; } } } ENTER_CRITICAL_REGION; res = sgen_try_alloc_obj_nolock (vtable, size); if (res) { EXIT_CRITICAL_REGION; return res; } EXIT_CRITICAL_REGION; LOCK_GC; res = sgen_alloc_obj_nolock (vtable, size); UNLOCK_GC; return res; }