static void root_scan_phase(mrb_state *mrb) { size_t i, e; if (!is_minor_gc(mrb)) { mrb->gray_list = NULL; mrb->atomic_gray_list = NULL; } mrb_gc_mark_gv(mrb); /* mark arena */ for (i=0,e=mrb->arena_idx; i<e; i++) { mrb_gc_mark(mrb, mrb->arena[i]); } /* mark class hierarchy */ mrb_gc_mark(mrb, (struct RBasic*)mrb->object_class); /* mark top_self */ mrb_gc_mark(mrb, (struct RBasic*)mrb->top_self); /* mark exception */ mrb_gc_mark(mrb, (struct RBasic*)mrb->exc); mark_context(mrb, mrb->root_c); if (mrb->root_c->fib) { mrb_gc_mark(mrb, (struct RBasic*)mrb->root_c->fib); } if (mrb->root_c != mrb->c) { mark_context(mrb, mrb->c); } }
MRB_API void mrb_incremental_gc(mrb_state *mrb) { mrb_gc *gc = &mrb->gc; if (gc->disabled || gc->iterating) return; GC_INVOKE_TIME_REPORT("mrb_incremental_gc()"); GC_TIME_START; if (is_minor_gc(gc)) { incremental_gc_until(mrb, gc, MRB_GC_STATE_ROOT); } else { incremental_gc_step(mrb, gc); } if (gc->state == MRB_GC_STATE_ROOT) { mrb_assert(gc->live >= gc->live_after_mark); gc->threshold = (gc->live_after_mark/100) * gc->interval_ratio; if (gc->threshold < GC_STEP_SIZE) { gc->threshold = GC_STEP_SIZE; } if (is_major_gc(gc)) { size_t threshold = gc->live_after_mark/100 * MAJOR_GC_INC_RATIO; gc->full = FALSE; if (threshold < MAJOR_GC_TOOMANY) { gc->majorgc_old_threshold = threshold; } else { /* too many objects allocated during incremental GC, */ /* instead of increasing threshold, invoke full GC. */ mrb_full_gc(mrb); } } else if (is_minor_gc(gc)) { if (gc->live > gc->majorgc_old_threshold) { clear_all_old(mrb, gc); gc->full = TRUE; } } } GC_TIME_STOP_AND_REPORT; }
void mrb_incremental_gc(mrb_state *mrb) { if (mrb->gc_disabled) return; GC_INVOKE_TIME_REPORT("mrb_incremental_gc()"); GC_TIME_START; if (is_minor_gc(mrb)) { do { incremental_gc(mrb, ~0); } while (mrb->gc_state != GC_STATE_NONE); } else { size_t limit = 0, result = 0; limit = (GC_STEP_SIZE/100) * mrb->gc_step_ratio; while (result < limit) { result += incremental_gc(mrb, limit); if (mrb->gc_state == GC_STATE_NONE) break; } } if (mrb->gc_state == GC_STATE_NONE) { gc_assert(mrb->live >= mrb->gc_live_after_mark); mrb->gc_threshold = (mrb->gc_live_after_mark/100) * mrb->gc_interval_ratio; if (mrb->gc_threshold < GC_STEP_SIZE) { mrb->gc_threshold = GC_STEP_SIZE; } if (is_major_gc(mrb)) { mrb->majorgc_old_threshold = mrb->gc_live_after_mark/100 * DEFAULT_MAJOR_GC_INC_RATIO; mrb->gc_full = FALSE; } else if (is_minor_gc(mrb)) { if (mrb->live > mrb->majorgc_old_threshold) { clear_all_old(mrb); mrb->gc_full = TRUE; } } } else { mrb->gc_threshold = mrb->live + GC_STEP_SIZE; } GC_TIME_STOP_AND_REPORT; }
static void root_scan_phase(mrb_state *mrb, mrb_gc *gc) { int i, e; if (!is_minor_gc(gc)) { gc->gray_list = NULL; gc->atomic_gray_list = NULL; } mrb_gc_mark_gv(mrb); /* mark arena */ for (i=0,e=gc->arena_idx; i<e; i++) { mrb_gc_mark(mrb, gc->arena[i]); } /* mark class hierarchy */ mrb_gc_mark(mrb, (struct RBasic*)mrb->object_class); /* mark built-in classes */ mrb_gc_mark(mrb, (struct RBasic*)mrb->class_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->module_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->proc_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->string_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->array_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->hash_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->range_class); #ifndef MRB_WITHOUT_FLOAT mrb_gc_mark(mrb, (struct RBasic*)mrb->float_class); #endif mrb_gc_mark(mrb, (struct RBasic*)mrb->fixnum_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->true_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->false_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->nil_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->symbol_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->kernel_module); mrb_gc_mark(mrb, (struct RBasic*)mrb->eException_class); mrb_gc_mark(mrb, (struct RBasic*)mrb->eStandardError_class); /* mark top_self */ mrb_gc_mark(mrb, (struct RBasic*)mrb->top_self); /* mark exception */ mrb_gc_mark(mrb, (struct RBasic*)mrb->exc); /* mark pre-allocated exception */ mrb_gc_mark(mrb, (struct RBasic*)mrb->nomem_err); mrb_gc_mark(mrb, (struct RBasic*)mrb->stack_err); #ifdef MRB_GC_FIXED_ARENA mrb_gc_mark(mrb, (struct RBasic*)mrb->arena_err); #endif mark_context(mrb, mrb->c); if (mrb->root_c != mrb->c) { mark_context(mrb, mrb->root_c); } }
static void root_scan_phase(mrb_state *mrb) { int i, j, e; mrb_callinfo *ci; if (!is_minor_gc(mrb)) { mrb->gray_list = 0; mrb->variable_gray_list = 0; } mrb_gc_mark_gv(mrb); /* mark arena */ for (i=0,e=mrb->arena_idx; i<e; i++) { mrb_gc_mark(mrb, mrb->arena[i]); } /* mark class hierarchy */ mrb_gc_mark(mrb, (struct RBasic*)mrb->object_class); /* mark exception */ mrb_gc_mark(mrb, (struct RBasic*)mrb->exc); /* mark stack */ e = mrb->stack - mrb->stbase; if (mrb->ci) e += mrb->ci->nregs; if (mrb->stbase + e > mrb->stend) e = mrb->stend - mrb->stbase; for (i=0; i<e; i++) { mrb_gc_mark_value(mrb, mrb->stbase[i]); } /* mark ensure stack */ e = (mrb->ci) ? mrb->ci->eidx : 0; for (i=0; i<e; i++) { mrb_gc_mark(mrb, (struct RBasic*)mrb->ensure[i]); } /* mark closure */ for (ci = mrb->cibase; ci <= mrb->ci; ci++) { if (!ci) continue; mrb_gc_mark(mrb, (struct RBasic*)ci->env); mrb_gc_mark(mrb, (struct RBasic*)ci->proc); mrb_gc_mark(mrb, (struct RBasic*)ci->target_class); } /* mark irep pool */ if (mrb->irep) { size_t len = mrb->irep_len; if (len > mrb->irep_capa) len = mrb->irep_capa; for (i=0; i<len; i++) { mrb_irep *irep = mrb->irep[i]; if (!irep) continue; for (j=0; j<irep->plen; j++) { mrb_gc_mark_value(mrb, irep->pool[j]); } } } }
MRB_API void mrb_incremental_gc(mrb_state *mrb) { mrb_gc *gc = &mrb->gc; if (gc->disabled || gc->iterating) return; GC_INVOKE_TIME_REPORT("mrb_incremental_gc()"); GC_TIME_START; if (is_minor_gc(gc)) { incremental_gc_until(mrb, gc, MRB_GC_STATE_ROOT); } else { incremental_gc_step(mrb, gc); } if (gc->state == MRB_GC_STATE_ROOT) { mrb_assert(gc->live >= gc->live_after_mark); gc->threshold = (gc->live_after_mark/100) * gc->interval_ratio; if (gc->threshold < GC_STEP_SIZE) { gc->threshold = GC_STEP_SIZE; } if (is_major_gc(gc)) { gc->majorgc_old_threshold = gc->live_after_mark/100 * DEFAULT_MAJOR_GC_INC_RATIO; gc->full = FALSE; } else if (is_minor_gc(gc)) { if (gc->live > gc->majorgc_old_threshold) { clear_all_old(mrb, gc); gc->full = TRUE; } } } GC_TIME_STOP_AND_REPORT; }
void mrb_incremental_gc(mrb_state *mrb) { if (mrb->gc_disabled) return; GC_INVOKE_TIME_REPORT("mrb_incremental_gc()"); GC_TIME_START; if (is_minor_gc(mrb)) { incremental_gc_until(mrb, GC_STATE_NONE); } else { incremental_gc_step(mrb); } if (mrb->gc_state == GC_STATE_NONE) { mrb_assert(mrb->live >= mrb->gc_live_after_mark); mrb->gc_threshold = (mrb->gc_live_after_mark/100) * mrb->gc_interval_ratio; if (mrb->gc_threshold < GC_STEP_SIZE) { mrb->gc_threshold = GC_STEP_SIZE; } if (is_major_gc(mrb)) { mrb->majorgc_old_threshold = mrb->gc_live_after_mark/100 * DEFAULT_MAJOR_GC_INC_RATIO; mrb->gc_full = FALSE; } else if (is_minor_gc(mrb)) { if (mrb->live > mrb->majorgc_old_threshold) { clear_all_old(mrb); mrb->gc_full = TRUE; } } } GC_TIME_STOP_AND_REPORT; }
static void root_scan_phase(mrb_state *mrb) { int j; size_t i, e; if (!is_minor_gc(mrb)) { mrb->gray_list = 0; mrb->variable_gray_list = 0; } mrb_gc_mark_gv(mrb); /* mark arena */ for (i=0,e=mrb->arena_idx; i<e; i++) { mrb_gc_mark(mrb, mrb->arena[i]); } /* mark class hierarchy */ mrb_gc_mark(mrb, (struct RBasic*)mrb->object_class); /* mark top_self */ mrb_gc_mark(mrb, (struct RBasic*)mrb->top_self); /* mark exception */ mrb_gc_mark(mrb, (struct RBasic*)mrb->exc); mark_context(mrb, mrb->c); /* mark irep pool */ if (mrb->irep) { size_t len = mrb->irep_len; if (len > mrb->irep_capa) len = mrb->irep_capa; for (i=0; i<len; i++) { mrb_irep *irep = mrb->irep[i]; if (!irep) continue; for (j=0; j<irep->plen; j++) { mrb_gc_mark_value(mrb, irep->pool[j]); } } } }
static size_t incremental_sweep_phase(mrb_state *mrb, size_t limit) { struct heap_page *page = mrb->sweeps; size_t tried_sweep = 0; while (page && (tried_sweep < limit)) { RVALUE *p = page->objects; RVALUE *e = p + MRB_HEAP_PAGE_SIZE; size_t freed = 0; mrb_bool dead_slot = TRUE; int full = (page->freelist == NULL); if (is_minor_gc(mrb) && page->old) { /* skip a slot which doesn't contain any young object */ p = e; dead_slot = FALSE; } while (p<e) { if (is_dead(mrb, &p->as.basic)) { if (p->as.basic.tt != MRB_TT_FREE) { obj_free(mrb, &p->as.basic); p->as.free.next = page->freelist; page->freelist = (struct RBasic*)p; freed++; } } else { if (!is_generational(mrb)) paint_partial_white(mrb, &p->as.basic); /* next gc target */ dead_slot = 0; } p++; } /* free dead slot */ if (dead_slot && freed < MRB_HEAP_PAGE_SIZE) { struct heap_page *next = page->next; unlink_heap_page(mrb, page); unlink_free_heap_page(mrb, page); mrb_free(mrb, page); page = next; } else { if (full && freed > 0) { link_free_heap_page(mrb, page); } if (page->freelist == NULL && is_minor_gc(mrb)) page->old = TRUE; else page->old = FALSE; page = page->next; } tried_sweep += MRB_HEAP_PAGE_SIZE; mrb->live -= freed; mrb->gc_live_after_mark -= freed; } mrb->sweeps = page; return tried_sweep; }
void test_incremental_gc(void) { mrb_state *mrb = mrb_open(); size_t max = ~0, live = 0, total = 0, freed = 0; RVALUE *free; struct heap_page *page; puts("test_incremental_gc"); change_gen_gc_mode(mrb, FALSE); puts(" in mrb_full_gc"); mrb_full_gc(mrb); mrb_assert(mrb->gc_state == GC_STATE_NONE); puts(" in GC_STATE_NONE"); incremental_gc(mrb, max); mrb_assert(mrb->gc_state == GC_STATE_MARK); puts(" in GC_STATE_MARK"); incremental_gc_until(mrb, GC_STATE_SWEEP); mrb_assert(mrb->gc_state == GC_STATE_SWEEP); puts(" in GC_STATE_SWEEP"); page = mrb->heaps; while (page) { RVALUE *p = page->objects; RVALUE *e = p + MRB_HEAP_PAGE_SIZE; while (p<e) { if (is_black(&p->as.basic)) { live++; } if (is_gray(&p->as.basic) && !is_dead(mrb, &p->as.basic)) { printf("%p\n", &p->as.basic); } p++; } page = page->next; total += MRB_HEAP_PAGE_SIZE; } mrb_assert(mrb->gray_list == NULL); incremental_gc(mrb, max); mrb_assert(mrb->gc_state == GC_STATE_SWEEP); incremental_gc(mrb, max); mrb_assert(mrb->gc_state == GC_STATE_NONE); free = (RVALUE*)mrb->heaps->freelist; while (free) { freed++; free = (RVALUE*)free->as.free.next; } mrb_assert(mrb->live == live); mrb_assert(mrb->live == total-freed); puts("test_incremental_gc(gen)"); incremental_gc_until(mrb, GC_STATE_SWEEP); change_gen_gc_mode(mrb, TRUE); mrb_assert(mrb->gc_full == FALSE); mrb_assert(mrb->gc_state == GC_STATE_NONE); puts(" in minor"); mrb_assert(is_minor_gc(mrb)); mrb_assert(mrb->majorgc_old_threshold > 0); mrb->majorgc_old_threshold = 0; mrb_incremental_gc(mrb); mrb_assert(mrb->gc_full == TRUE); mrb_assert(mrb->gc_state == GC_STATE_NONE); puts(" in major"); mrb_assert(is_major_gc(mrb)); do { mrb_incremental_gc(mrb); } while (mrb->gc_state != GC_STATE_NONE); mrb_assert(mrb->gc_full == FALSE); mrb_close(mrb); }