Example #1
0
  void ImmixGC::collect_finish(GCData* data) {
    collect_scan(data);

    ObjectArray* marked_set = object_memory_->swap_marked_set();
    for(ObjectArray::iterator oi = marked_set->begin();
        oi != marked_set->end();
        ++oi) {
      Object* obj = *oi;
      if(obj) saw_object(obj);
    }
    delete marked_set;

    // Users manipulate values accessible from the data* within an
    // RData without running a write barrier. Thusly if we see any rdata
    // we must always scan it again here because it could contain new pointers.
    //
    // We do this in a loop because the scanning might generate new entries
    // on the mark stack.
    do {
      for(Allocator<capi::Handle>::Iterator i(data->handles()->allocator()); i.more(); i.advance()) {
        capi::Handle* hdl = i.current();
        if(!hdl->in_use_p()) continue;
        if(hdl->is_rdata()) {
          Object* obj = hdl->object();
          if(obj->marked_p(object_memory_->mark())) {
            scan_object(obj);
          }
        }
      }
    } while(process_mark_stack());

    // We've now finished marking the entire object graph.
    // Clean weakrefs before keeping additional objects alive
    // for finalization, so people don't get a hold of finalized
    // objects through weakrefs.
    clean_weakrefs();

    // Marking objects to be Finalized can cause more things to continue to
    // live, so we must check the mark_stack again.
    do {
      walk_finalizers();
      scan_fibers(data, true);
    } while(process_mark_stack());

    // Remove unreachable locked objects still in the list
    if(data->threads()) {
      for(std::list<ManagedThread*>::iterator i = data->threads()->begin();
          i != data->threads()->end();
          ++i) {
        clean_locked_objects(*i, false);
      }
    }

    // Clear unreachable objects from the various remember sets
    unsigned int mark = object_memory_->mark();
    object_memory_->unremember_objects(mark);
  }
Example #2
0
  /**
   * Perform garbage collection on the young objects.
   */
  void BakerGC::collect(GCData* data, YoungCollectStats* stats) {

#ifdef HAVE_VALGRIND_H
    (void)VALGRIND_MAKE_MEM_DEFINED(next->start().as_int(), next->size());
    (void)VALGRIND_MAKE_MEM_DEFINED(current->start().as_int(), current->size());
#endif
    mprotect(next->start(), next->size(), PROT_READ | PROT_WRITE);
    mprotect(current->start(), current->size(), PROT_READ | PROT_WRITE);

    check_growth_start();

    ObjectArray *current_rs = object_memory_->swap_remember_set();

    total_objects = 0;

    copy_spills_ = 0;
    reset_promoted();

    // Start by copying objects in the remember set
    for(ObjectArray::iterator oi = current_rs->begin();
        oi != current_rs->end();
        ++oi) {
      Object* tmp = *oi;
      // unremember_object throws a NULL in to remove an object
      // so we don't have to compact the set in unremember
      if(tmp) {
        // Remove the Remember bit, since we're clearing the set.
        tmp->clear_remember();
        scan_object(tmp);
      }
    }

    delete current_rs;

    scan_mark_set();
    scan_mature_mark_stack();

    for(Roots::Iterator i(data->roots()); i.more(); i.advance()) {
      i->set(saw_object(i->get()));
    }

    if(data->threads()) {
      for(std::list<ManagedThread*>::iterator i = data->threads()->begin();
          i != data->threads()->end();
          ++i) {
        scan(*i, true);
      }
    }

    for(Allocator<capi::Handle>::Iterator i(data->handles()->allocator()); i.more(); i.advance()) {
      if(!i->in_use_p()) continue;

      if(!i->weak_p() && i->object()->young_object_p()) {
        i->set_object(saw_object(i->object()));

      // Users manipulate values accessible from the data* within an
      // RData without running a write barrier. Thusly if we see a mature
      // rdata, we must always scan it because it could contain
      // young pointers.
      } else if(!i->object()->young_object_p() && i->is_rdata()) {
        scan_object(i->object());
      }
    }

    std::list<capi::GlobalHandle*>* gh = data->global_handle_locations();

    if(gh) {
      for(std::list<capi::GlobalHandle*>::iterator i = gh->begin();
          i != gh->end();
          ++i) {
        capi::GlobalHandle* global_handle = *i;
        capi::Handle** loc = global_handle->handle();
        if(capi::Handle* hdl = *loc) {
          if(!REFERENCE_P(hdl)) continue;
          if(hdl->valid_p()) {
            Object* obj = hdl->object();
            if(obj && obj->reference_p() && obj->young_object_p()) {
              hdl->set_object(saw_object(obj));
            }
          } else {
            std::cerr << "Detected bad handle checking global capi handles\n";
          }
        }
      }
    }

#ifdef ENABLE_LLVM
    if(LLVMState* ls = data->llvm_state()) ls->gc_scan(this);
#endif

    // Handle all promotions to non-young space that occurred.
    handle_promotions();

    assert(fully_scanned_p());
    // We're now done seeing the entire object graph of normal, live references.
    // Now we get to handle the unusual references, like finalizers and such.

    // Check any weakrefs and replace dead objects with nil
    // We need to do this before checking finalizers so people can't access
    // objects kept alive for finalization through weakrefs.
    clean_weakrefs(true);

    do {
      // Objects with finalizers must be kept alive until the finalizers have
      // run.
      walk_finalizers();
      // Scan any fibers that aren't running but still active
      scan_fibers(data, false);
      handle_promotions();
    } while(!promoted_stack_.empty() && !fully_scanned_p());

    // Remove unreachable locked objects still in the list
    if(data->threads()) {
      for(std::list<ManagedThread*>::iterator i = data->threads()->begin();
          i != data->threads()->end();
          ++i) {
        clean_locked_objects(*i, true);
      }
    }

    // Update the pending mark set to remove unreachable objects.
    update_mark_set();

    // Update the existing mark stack of the mature gen because young
    // objects might have moved.
    update_mature_mark_stack();

    // Update the weak ref set to remove unreachable weak refs.
    update_weak_refs_set();

    // Swap the 2 halves
    Heap* x = next;
    next = current;
    current = x;

    if(stats) {
      stats->lifetime = lifetime_;
      stats->percentage_used = current->percentage_used();
      stats->promoted_objects = promoted_objects_;
      stats->excess_objects = copy_spills_;
    }

    // Tune the age at which promotion occurs
    if(autotune_lifetime_) {
      double used = current->percentage_used();
      if(used > cOverFullThreshold) {
        if(tune_threshold_ >= cOverFullTimes) {
          if(lifetime_ > cMinimumLifetime) lifetime_--;
        } else {
          tune_threshold_++;
        }
      } else if(used < cUnderFullThreshold) {
        if(tune_threshold_ <= cUnderFullTimes) {
          if(lifetime_ < cMaximumLifetime) lifetime_++;
        } else {
          tune_threshold_--;
        }
      } else if(tune_threshold_ > 0) {
        tune_threshold_--;
      } else if(tune_threshold_ < 0) {
        tune_threshold_++;
      } else if(tune_threshold_ == 0) {
        if(lifetime_ < original_lifetime_) {
          lifetime_++;
        } else if(lifetime_ > original_lifetime_) {
          lifetime_--;
        }
      }
    }

  }