unsigned char *select_gc(int size) { // FIXME: missing information of finalizible objects added to evacuation area during allocation heap.old_objects.prev_pos = heap.old_objects.pos; unsigned char *res; int alg = gc_algorithm % 10; switch (alg) { case 0: break; case 1: return full_gc(size); case 2: return slide_gc(size); default: abort(); } GC_TYPE gc = heap.next_gc; TRACE2("gc.select", "starting gc = " << gc_name(gc)); switch(gc) { case GC_COPY: res = copy_gc(size); break; case GC_FULL: res = full_gc(size); break; case GC_SLIDE_COMPACT: res = slide_gc(size); break; default: abort(); } GC_TYPE gc_out = gc_type; if (gc_out != gc) { // too small reserved space or behaviour changed } if (!res) { TRACE2("gc.mem", "Not enough free memory after collection to allocate " << size << " bytes"); } TRACE2("gc.mem", "select_gc = " << res); if ((!res) && gc != GC_FULL) { TRACE2("gc.select", "no free mem after gc, trying full gc"); heap.next_gc = GC_FULL; res = full_gc(size); } TRACE2("gc.mem", "select_gc2 = " << res); if (res == 0 && heap.size != heap.max_size) { assert(heap.pos_limit == heap.allocation_region_end()); heap_extend(round_up(heap.size + size, 65536)); if (heap.pos + size <= heap.pos_limit) { res = heap.pos; heap.pos += size; } } return res; }
void lgc_state_register(lgc_state_t* gc, lgc_object_t* obj) { lgc_list_push_back(gc->gen0, (lgc_list_t*)obj); ++ gc->count; if ((gc->count % gc->threshold2) == 0) { full_gc(gc, 2); } else if ((gc->count % gc->threshold1) == 0) { full_gc(gc, 1); } else if ((gc->count % gc->threshold0) == 0) { full_gc(gc, 0); } }
void select_force_gc() { vm_gc_lock_enum(); if (gc_algorithm < 10) { force_gc(); } else if ((gc_algorithm / 10) == 2) { full_gc(0); } else if ((gc_algorithm / 10) == 3) { heap.old_objects.prev_pos = heap.old_objects.pos; copy_gc(0); } vm_gc_unlock_enum(); hythread_suspend_disable(); vm_hint_finalize(); hythread_suspend_enable(); }