Ejemplo n.º 1
0
void gc_gen_stats_verbose(GC_Gen* gc)
{
  GC_Gen_Stats* stats = gc->stats;
  Boolean is_los_collected = stats->is_los_collected;
  if (collect_is_minor()){
    TRACE2("gc.space", "GC: NOS Collection stats: "
      <<"\nGC: " << (gc_is_gen_mode()?"generational":"nongenerational")
      <<"\nGC: collection algo: " << (minor_is_semispace()?"semi-space":"partial-forward")
      <<"\nGC: num surviving objs: " << stats->nos_surviving_obj_num_minor
      <<"\nGC: size surviving objs: " << verbose_print_size(stats->nos_surviving_obj_size_minor)
      <<"\nGC: surviving ratio: " << (int)(stats->nos_surviving_ratio_minor*100) << "%\n");
  }else{
    TRACE2("gc.space", "GC: MOS Collection stats: "
      <<"\nGC: collection algo: " << (major_is_marksweep()?"mark-sweep":"slide compact")
      <<"\nGC: num surviving objs: "<<stats->nos_mos_suviving_obj_num_major
      <<"\nGC: size surviving objs: "<<verbose_print_size(stats->nos_mos_suviving_obj_size_major)
      <<"\nGC: surviving ratio: "<<(int)(stats->nos_mos_suviving_ratio_major*100)<<"%\n");
  }

  if(stats->is_los_collected) { /*if los is collected, need to output los related info*/
    TRACE2("gc.space", "GC: Lspace Collection stats: "
      <<"\nGC: collection algo: "<<(collect_is_major()?"slide compact":"mark sweep")
      <<"\nGC: num surviving objs: "<<stats->los_suviving_obj_num
      <<"\nGC: size surviving objs: "<<verbose_print_size(stats->los_suviving_obj_size)
      <<"\nGC: surviving ratio: "<<(int)(stats->los_surviving_ratio*100)<<"%\n");
  }

}
Ejemplo n.º 2
0
/* In two cases mark-sweep needs fixing repointed refs:
 * 1. ms with compaction
 * 2. ms as a mos collection algorithm
 */
static inline void moving_mark_sweep_update_ref(GC *gc, REF *p_ref, Boolean double_fix)
{
  /* There are only two kinds of p_ref being added into finref_repset_pool:
   * 1. p_ref is in a vector block from one finref pool;
   * 2. p_ref is a referent field.
   * So if p_ref belongs to heap, it must be a referent field pointer.
   * Objects except a tree root which are resurrected need not be recorded in finref_repset_pool.
   */
  if(address_belongs_to_gc_heap((void*)p_ref, gc)){
    unsigned int offset = get_gc_referent_offset();
    Partial_Reveal_Object *p_old_ref = (Partial_Reveal_Object*)((POINTER_SIZE_INT)p_ref - offset);
    if(obj_is_fw_in_oi(p_old_ref)){
      Partial_Reveal_Object *p_new_ref = obj_get_fw_in_oi(p_old_ref);
      /* Only major collection in MS Gen GC might need double_fix.
       * Double fixing happens when both forwarding and compaction happen.
       */
      if(double_fix && obj_is_fw_in_oi(p_new_ref)){
        assert(major_is_marksweep());
        p_new_ref = obj_get_fw_in_oi(p_new_ref);
        assert(address_belongs_to_gc_heap(p_new_ref, gc));
      }
      p_ref = (REF*)((POINTER_SIZE_INT)p_new_ref + offset);
    }
  }
  Partial_Reveal_Object *p_obj = read_slot(p_ref);
  /* assert(obj_need_move(gc, p_obj));
   * This assertion is commented out because it assert(!obj_is_dead(gc, p_obj)).
   * When gc_fix_rootset is invoked, mark bit and alloc bit have been flipped in Mark-Sweep,
   * so this assertion will fail.
   * But for sure p_obj here must be an one needing moving.
   */
  p_obj = obj_get_fw_in_oi(p_obj);
  /* Only major collection in MS Gen GC might need double_fix.
   * Double fixing happens when both forwarding and compaction happen.
   */
  if(double_fix && obj_is_fw_in_oi(p_obj)){
    assert(major_is_marksweep());
    p_obj = obj_get_fw_in_oi(p_obj);
    assert(address_belongs_to_gc_heap(p_obj, gc));
  }
  write_slot(p_ref, p_obj);
}
Ejemplo n.º 3
0
// Resurrect the obj tree whose root is the obj which p_ref points to
static inline void resurrect_obj_tree(Collector *collector, REF *p_ref)
{
  GC *gc = collector->gc;
  GC_Metadata *metadata = gc->metadata;
  Partial_Reveal_Object *p_obj = read_slot(p_ref);
  assert(p_obj && gc_obj_is_dead(gc, p_obj));
  
  void *p_ref_or_obj = p_ref;
  Trace_Object_Func trace_object;
  
  /* set trace_object() function */
  if(collect_is_minor()){
    if(gc_is_gen_mode()){
      if(minor_is_forward())
        trace_object = trace_obj_in_gen_fw;
      else if(minor_is_semispace())
        trace_object = trace_obj_in_gen_ss;
      else 
        assert(0);
    }else{
      if(minor_is_forward())
        trace_object = trace_obj_in_nongen_fw;
      else if(minor_is_semispace())
        trace_object = trace_obj_in_nongen_ss;
      else 
        assert(0);
    }
  } else if(collect_is_major_normal() || !gc_has_nos()){
    p_ref_or_obj = p_obj;
    if(gc_has_space_tuner(gc) && (gc->tuner->kind != TRANS_NOTHING)){
      trace_object = trace_obj_in_space_tune_marking;
      unsigned int obj_size = vm_object_size(p_obj);
#ifdef USE_32BITS_HASHCODE
      obj_size += hashcode_is_set(p_obj) ? GC_OBJECT_ALIGNMENT : 0;
#endif
      if(!obj_belongs_to_space(p_obj, gc_get_los((GC_Gen*)gc))){
        collector->non_los_live_obj_size += obj_size;
        collector->segment_live_size[SIZE_TO_SEGMENT_INDEX(obj_size)] += obj_size;
      } else {
        collector->los_live_obj_size += round_up_to_size(obj_size, KB); 
      }
    } else if(!gc_has_nos()){
      trace_object = trace_obj_in_ms_marking;
    } else {
      trace_object = trace_obj_in_normal_marking;
    }
  } else if(collect_is_fallback()){
    if(major_is_marksweep())
      trace_object = trace_obj_in_ms_fallback_marking;
    else
      trace_object = trace_obj_in_fallback_marking;
  } else {
    assert(major_is_marksweep());
    p_ref_or_obj = p_obj;
   if( gc->gc_concurrent_status == GC_CON_NIL ) 
      trace_object = trace_obj_in_ms_marking;
    else
      trace_object = trace_obj_in_ms_concurrent_mark;
  }
  
  collector->trace_stack = free_task_pool_get_entry(metadata);
  collector_tracestack_push(collector, p_ref_or_obj);
  pool_put_entry(metadata->mark_task_pool, collector->trace_stack);
  
  collector->trace_stack = free_task_pool_get_entry(metadata);
  Vector_Block *task_block = pool_get_entry(metadata->mark_task_pool);
  while(task_block){
    POINTER_SIZE_INT *iter = vector_block_iterator_init(task_block);
    while(!vector_block_iterator_end(task_block, iter)){
      void *p_ref_or_obj = (void*)*iter;
      assert(((collect_is_minor()||collect_is_fallback()) && *(Partial_Reveal_Object **)p_ref_or_obj)
              || ((collect_is_major_normal()||major_is_marksweep()||!gc_has_nos()) && p_ref_or_obj));
      trace_object(collector, p_ref_or_obj);
      if(collector->result == FALSE)  break; /* Resurrection fallback happens; force return */
      
      iter = vector_block_iterator_advance(task_block, iter);
    }
    vector_stack_clear(task_block);
    pool_put_entry(metadata->free_task_pool, task_block);
    
    if(collector->result == FALSE){
      gc_task_pool_clear(metadata->mark_task_pool);
      break; /* force return */
    }
    
    task_block = pool_get_entry(metadata->mark_task_pool);
  }
  
  task_block = (Vector_Block*)collector->trace_stack;
  vector_stack_clear(task_block);
  pool_put_entry(metadata->free_task_pool, task_block);
  collector->trace_stack = NULL;
}
Ejemplo n.º 4
0
static FORCE_INLINE void scan_object(Heap_Verifier* heap_verifier, Partial_Reveal_Object *p_obj) 
{
  GC_Verifier* gc_verifier = heap_verifier->gc_verifier;

#if !defined(USE_UNIQUE_MARK_SWEEP_GC) && !defined(USE_UNIQUE_MOVE_COMPACT_GC)
  if(gc_verifier->is_before_fallback_collection) {
    if(obj_belongs_to_nos(p_obj) && obj_is_fw_in_oi(p_obj)){
      assert(obj_get_vt(p_obj) == obj_get_vt(obj_get_fw_in_oi(p_obj)));
      p_obj = obj_get_fw_in_oi(p_obj);
      assert(p_obj);
    }
  }
#endif
  
  if(!obj_mark_in_vt(p_obj)) return;

  if( !major_is_marksweep() && p_obj >= los_boundary ){
    Block_Header* block = GC_BLOCK_HEADER(p_obj);
    if( heap_verifier->is_before_gc)  block->num_live_objs++;
    /* we can't set block->num_live_objs = 0 if !is_before_gc, because the some blocks may be freed hence not
        visited after GC. So we should reset it in GC space reset functions. */
  }

  verify_object_header(p_obj, heap_verifier); 
  verifier_update_verify_info(p_obj, heap_verifier);

   /*FIXME: */
  if (!object_has_ref_field(p_obj)) return;
    
  REF* p_ref;

  if (object_is_array(p_obj)) {  
  
    Partial_Reveal_Array* array = (Partial_Reveal_Array*)p_obj;
    unsigned int array_length = array->array_len; 
    p_ref = (REF*)((POINTER_SIZE_INT)array + (int)array_first_element_offset(array));

    for (unsigned int i = 0; i < array_length; i++) {
      scan_slot(heap_verifier, p_ref+i);
    }   

  }else{ 
    
    unsigned int num_refs = object_ref_field_num(p_obj);
    int* ref_iterator = object_ref_iterator_init(p_obj);
 
    for(unsigned int i=0; i<num_refs; i++){  
      p_ref = object_ref_iterator_get(ref_iterator+i, p_obj);  
      scan_slot(heap_verifier, p_ref);
    }

#ifndef BUILD_IN_REFERENT
     WeakReferenceType type = special_reference_type(p_obj);
    if(type == SOFT_REFERENCE && verifier_collect_is_minor(gc_verifier)){
      p_ref = obj_get_referent_field(p_obj);
      scan_slot(heap_verifier, p_ref);
    } 
#endif  
  }
  return;
}