static NOINLINE void * slow_alloc(size_t obj_size) { void *obj; GCSTAT_TRIGGER(obj_size); do_gc(); if (HEAP_REST(sml_heap_from_space) >= obj_size) { obj = sml_heap_from_space.free; sml_heap_from_space.free += obj_size; #ifdef GC_STAT sml_heap_alloced(obj_size); #endif /* GC_STAT */ } else { #ifdef GCSTAT stat_notice("---"); stat_notice("event: error"); stat_notice("heap exceeded: intented to allocate %lu bytes.", (unsigned long)obj_size); if (gcstat.file) fclose(gcstat.file); #endif /* GCSTAT */ sml_fatal(0, "heap exceeded: intended to allocate %lu bytes.", (unsigned long)obj_size); } GIANT_UNLOCK(); #ifndef FAIR_COMPARISON sml_run_finalizer(); #endif /* FAIR_COMPARISON */ return obj; }
void sml_heap_gc(void) { GIANT_LOCK(); do_gc(); GIANT_UNLOCK(); #ifndef FAIR_COMPARISON sml_run_finalizer(); #endif /* FAIR_COMPARISON */ }
void sml_heap_gc() { GIANT_LOCK(NULL); STOP_THE_WORLD(); sml_rootset_enum_ptr(trace, MAJOR); sml_malloc_pop_and_mark(trace, MAJOR); sml_check_finalizer(trace, MAJOR); sml_malloc_sweep(MAJOR); RUN_THE_WORLD(); GIANT_UNLOCK(); sml_run_finalizer(NULL); }
void sml_heap_gc(void) { #ifdef GCTIME sml_timer_t b_start, b_end; sml_time_t gctime; //#endif /* GCTIME */ //#ifdef GCSTAT sml_time_t cleartime,t; sml_timer_t b_cleared; #endif /* GCSTAT */ #ifdef GCSTAT if (gcstat.verbose >= GCSTAT_VERBOSE_COUNT) { stat_notice("---"); stat_notice("event: start gc"); if (gcstat.last.trigger) stat_notice("trigger: %u", gcstat.last.trigger); print_alloc_count(); print_heap_occupancy(); } clear_last_counts(); #endif /* GCSTAT */ DBG(("start gc")); #ifdef GCTIME gcstat.gc.count++; sml_timer_now(b_start); #endif /* GCTIME */ #ifdef PRINT_ALLOC_TIME live_tmp = 0; count_gc++; double st; getRusage(st); #endif /* PRINT_ALLOC_TIME */ all_bitmaps_space_clear(); #ifdef GCTIME//GCSTAT sml_timer_now(b_cleared); #endif /* GCSTAT */ #ifdef PRINT_ALLOC_TIME double en; getRusage(en); all_time_bit_clear += (en - st); #endif /* PRINT_ALLOC_TIME */ /* mark root objects */ sml_rootset_enum_ptr(mark, MAJOR); DBG(("marking root objects completed")); /* STACK POP */ while (marking_stack.bottom != marking_stack.top) { marking_stack.top--; mark_children((*(marking_stack.top))); } sml_malloc_pop_and_mark(mark_all, MAJOR); DBG(("marking completed")); #ifdef CHECK clear_heap(); #endif /* CHECK */ /* check finalization */ sml_check_finalizer(mark_all, MAJOR); /* sweep malloc heap */ sml_malloc_sweep(MAJOR); #ifdef GCTIME sml_timer_now(b_end); #endif /* GCTIME */ DBG(("gc finished.")); #ifdef GCTIME sml_timer_dif(b_start, b_end, gctime); sml_time_accum(gctime, gcstat.gc.total_time); sml_timer_dif(b_start, b_cleared, cleartime); sml_time_accum(cleartime, gcstat.gc.clear_time); #endif #ifdef GCSTAT if (gcstat.verbose >= GCSTAT_VERBOSE_GC) { sml_timer_dif(gcstat.exec_begin, b_start, t); stat_notice("time: "TIMEFMT, TIMEARG(t)); stat_notice("---"); stat_notice("event: end gc"); sml_timer_dif(gcstat.exec_begin, b_end, t); stat_notice("time: "TIMEFMT, TIMEARG(t)); stat_notice("duration: "TIMEFMT, TIMEARG(gctime)); stat_notice("clear_time: "TIMEFMT, TIMEARG(cleartime)); stat_notice("clear_bytes: %lu", gcstat.last.clear_bytes); stat_notice("push: %u", gcstat.last.push_count); stat_notice("trace: %u", gcstat.last.trace_count); print_heap_occupancy(); } #endif /* GCSTAT */ #ifdef PRINT_ALLOC_TIME if(live_tmp > live_max) live_max = live_tmp; if(live_tmp < live_min) live_min = live_tmp; live_all += live_tmp; unsigned int i; for(i=0; i<THE_NUMBER_OF_FIXED_BLOCK; i++) { if(((print_info[i].count_mark - print_info[i].tmp_mark) * print_info[i].block_size) > print_info[i].max_live) print_info[i].max_live = ((print_info[i].count_mark - print_info[i].tmp_mark) * print_info[i].block_size); print_info[i].tmp_mark=print_info[i].count_mark; } #endif /* PRINT_ALLOC_TIME */ /* start finalizers */ sml_run_finalizer(NULL); }
SML_PRIMITIVE void * sml_alloc(unsigned int objsize, void *frame_pointer) { size_t alloc_size; unsigned int blocksize_log2; struct alloc_ptr *ptr; void *obj; /* ensure that alloc_size is at least BLOCKSIZE_MIN. */ alloc_size = ALIGNSIZE(OBJ_HEADER_SIZE + objsize, BLOCKSIZE_MIN); if (alloc_size > BLOCKSIZE_MAX) { GCSTAT_ALLOC_COUNT(malloc, 0, alloc_size); sml_save_frame_pointer(frame_pointer); return sml_obj_malloc(alloc_size); } blocksize_log2 = CEIL_LOG2(alloc_size); ASSERT(BLOCKSIZE_MIN_LOG2 <= blocksize_log2 && blocksize_log2 <= BLOCKSIZE_MAX_LOG2); ptr = &ALLOC_PTR_SET()->alloc_ptr[blocksize_log2]; if (!BITPTR_TEST(ptr->freebit)) { GCSTAT_ALLOC_COUNT(fast, blocksize_log2, alloc_size); BITPTR_INC(ptr->freebit); obj = ptr->free; ptr->free += ptr->blocksize_bytes; goto alloced; } sml_save_frame_pointer(frame_pointer); if (ptr->free != NULL) { obj = find_bitmap(ptr); if (obj) goto alloced; } obj = find_segment(ptr); if (obj) goto alloced; GCSTAT_TRIGGER(blocksize_log2); do_gc(MAJOR); obj = find_segment(ptr); if (obj) goto alloced_major; extend_heap(heap_space.extend_step); obj = find_segment(ptr); if (obj) goto alloced_major; sml_fatal(0, "heap exceeded: intended to allocate %u bytes.", ptr->blocksize_bytes); alloced_major: ASSERT(check_newobj(obj)); /* NOTE: sml_run_finalizer may cause garbage collection. */ obj = sml_run_finalizer(obj); goto finished; alloced: ASSERT(check_newobj(obj)); finished: OBJ_HEADER(obj) = 0; return obj; }
void sml_heap_gc() { do_gc(); sml_run_finalizer(NULL); }