void put_all_fin_on_exit(GC *gc) { Pool *obj_with_fin_pool = gc->finref_metadata->obj_with_fin_pool; Pool *free_pool = gc->finref_metadata->free_pool; /* Because we are manipulating obj_with_fin_pool, GC lock must be hold in case that GC happens */ vm_gc_lock_enum(); /* FIXME: holding gc lock is not enough, perhaps there are mutators that are allocating objects with finalizer * could be fixed as this: * in fspace_alloc() and lspace_alloc() hold gc lock through * allocating mem and adding the objects with finalizer to the pool */ lock(gc->mutator_list_lock); gc_set_obj_with_fin(gc); unlock(gc->mutator_list_lock); Vector_Block *block = pool_get_entry(obj_with_fin_pool); while(block){ POINTER_SIZE_INT *iter = vector_block_iterator_init(block); while(!vector_block_iterator_end(block, iter)){ Managed_Object_Handle p_obj = (Managed_Object_Handle)read_slot((REF*)iter); if(p_obj) vm_finalize_object(p_obj); iter = vector_block_iterator_advance(block, iter); } vector_block_clear(block); pool_put_entry(free_pool, block); block = pool_get_entry(obj_with_fin_pool); } vm_gc_unlock_enum(); }
// for the case concurrent marking is not finished before heap is exhausted static void gc_partial_con_PMSS(GC *gc) { INFO2("gc.con.info", "[PMSS] Heap has been exhuasted, current collection = " << gc->num_collections ); // wait concurrent marking finishes int64 wait_start = time_now(); gc_disable_alloc_obj_live(gc); // in the STW manner, so we can disable it at anytime before the mutators are resumed //in the stop the world phase (only conclctors is running at the moment), so the spin lock will not lose more performance while( gc->gc_concurrent_status == GC_CON_START_MARKERS || gc->gc_concurrent_status == GC_CON_TRACING || gc->gc_concurrent_status == GC_CON_TRACE_DONE) { vm_thread_yield(); //let the unfinished marker run } /*just debugging*/ gc_ms_get_current_heap_usage((GC_MS *)gc); int64 pause_time = time_now() - wait_start; INFO2("gc.con.info", "[PMSS]wait marking time="<<pause_time<<" us" ); Con_Collection_Statistics *con_collection_stat = gc_ms_get_con_collection_stat((GC_MS*)gc); unsigned int marking_time_shortage = (unsigned int)(con_collection_stat->marking_end_time - wait_start); INFO2("gc.con.info", "[PMSS] marking late time [" << marking_time_shortage << "] us" ); // start STW reclaiming heap gc_con_update_stat_heap_exhausted(gc); // calculate util rate gc_reset_mutator_context(gc); if(!IGNORE_FINREF ) gc_set_obj_with_fin(gc); gc_ms_reclaim_heap((GC_MS*)gc); // reset after partial stop the world collection gc_reset_after_con_collection(gc); set_con_nil(gc); }
// for the case pure stop the world static void gc_partial_con_PSTW( GC *gc) { int64 time_collection_start = time_now(); INFO2("gc.space.stat","Stop-the-world collection = "<<gc->num_collections<<""); INFO2("gc.con.info", "from last check point =" << (unsigned int)(time_collection_start -get_last_check_point()) ); // stop the world enumeration gc->num_collections++; int disable_count = hythread_reset_suspend_disable(); gc_set_rootset_type(ROOTSET_IS_REF); gc_prepare_rootset(gc); if(gc->cause != GC_CAUSE_RUNTIME_FORCE_GC ) { unsigned int new_obj_size = gc_get_mutator_new_obj_size(gc); Con_Collection_Statistics * con_collection_stat = gc_ms_get_con_collection_stat((GC_MS*)gc); con_collection_stat->heap_utilization_rate = (float)(con_collection_stat->surviving_size_at_gc_end + new_obj_size)/gc->committed_heap_size; } //reclaim heap gc_reset_mutator_context(gc); if(!IGNORE_FINREF ) gc_set_obj_with_fin(gc); gc_ms_reclaim_heap((GC_MS*)gc); //update live size gc_PSTW_update_stat_after_marking(gc); // reset the collection and resume mutators gc_reset_after_con_collection(gc); set_con_nil(gc); // concurrent scheduling will continue after mutators are resumed vm_resume_threads_after(); assert(hythread_is_suspend_enabled()); hythread_set_suspend_disable(disable_count); }
/* gc start sweeping phase */ void gc_prepare_sweeping(GC *gc) { INFO2("gc.con.info", "Concurrent collection, current collection = " << gc->num_collections ); /*FIXME: enable finref*/ if(!IGNORE_FINREF ){ gc_set_obj_with_fin(gc); Collector* collector = gc->collectors[0]; collector_identify_finref(collector); #ifndef BUILD_IN_REFERENT } else { conclctor_set_weakref_sets(gc); gc_update_weakref_ignore_finref(gc); #endif } gc_identify_dead_weak_roots(gc); }
static unsigned int gc_con_heap_full_mostly_con( GC *gc ) { while( gc->gc_concurrent_status == GC_CON_START_MARKERS ) { // we should enumerate rootset after old rootset is traced vm_thread_yield(); } int64 final_start = time_now(); int disable_count = hythread_reset_suspend_disable(); gc_set_rootset_type(ROOTSET_IS_OBJ); gc_prepare_rootset(gc); gc_set_barrier_function(WB_REM_NIL); //in stw phase, so we can remove write barrier at any time terminate_mostly_con_mark(); // terminate current mostly concurrent marking //in the stop the world phase (only conclctors is running at the moment), so the spin lock will not lose more performance while(gc->gc_concurrent_status == GC_CON_TRACING) { vm_thread_yield(); //let the unfinished marker run } //final marking phase gc_clear_conclctor_role(gc); wspace_mostly_con_final_mark(gc); /*just debugging*/ int64 final_time = time_now() - final_start; INFO2("gc.scheduler", "[MOSTLY_CON] final marking time=" << final_time << " us"); gc_ms_get_current_heap_usage((GC_MS *)gc); // start STW reclaiming heap gc_con_update_stat_heap_exhausted(gc); // calculate util rate gc_reset_mutator_context(gc); if(!IGNORE_FINREF ) gc_set_obj_with_fin(gc); gc_ms_reclaim_heap((GC_MS*)gc); // reset after partial stop the world collection gc_reset_after_con_collection(gc); set_con_nil(gc); vm_resume_threads_after(); hythread_set_suspend_disable(disable_count); return GC_PARTIAL_PMSS; }
// only when current sweep is set to false static void gc_partial_con_CMSS(GC *gc) { INFO2("gc.con.info", "[CMSS] Heap has been exhuasted, current collection = " << gc->num_collections ); gc_disable_alloc_obj_live(gc); // in the STW manner, so we can disable it at anytime before the mutators are resumed /*just debugging*/ gc_ms_get_current_heap_usage((GC_MS *)gc); Con_Collection_Statistics *con_collection_stat = gc_ms_get_con_collection_stat((GC_MS*)gc); unsigned int from_marking_end = (unsigned int)(time_now() - con_collection_stat->marking_end_time); INFO2("gc.con.info", "[CMSS] marking early time [" << from_marking_end << "] us" ); gc_con_update_stat_heap_exhausted(gc); // calculate util rate // start reclaiming heap, it will skip the marking phase gc_reset_mutator_context(gc); if(!IGNORE_FINREF ) gc_set_obj_with_fin(gc); gc_ms_reclaim_heap((GC_MS*)gc); // reset after partial stop the world collection gc_reset_after_con_collection(gc); set_con_nil(gc); }