Пример #1
0
void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
                                    bool clear_all_softrefs) {
  // Recursively traverse all live objects and mark them
  EventMark m("1 mark object");
  TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
  GenMarkSweep::trace(" 1");

  SharedHeap* sh = SharedHeap::heap();

  sh->process_strong_roots(true,  // activeate StrongRootsScope
                           true,  // Collecting permanent generation.
                           SharedHeap::SO_SystemClasses,
                           &GenMarkSweep::follow_root_closure,
                           &GenMarkSweep::follow_code_root_closure,
                           &GenMarkSweep::follow_root_closure);

  // Process reference objects found during marking
  ReferenceProcessor* rp = GenMarkSweep::ref_processor();
  rp->setup_policy(clear_all_softrefs);
  rp->process_discovered_references(&GenMarkSweep::is_alive,
                                    &GenMarkSweep::keep_alive,
                                    &GenMarkSweep::follow_stack_closure,
                                    NULL);

  // Follow system dictionary roots and unload classes
  bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
  assert(GenMarkSweep::_marking_stack.is_empty(),
         "stack should be empty by now");

  // Follow code cache roots (has to be done after system dictionary,
  // assumes all live klasses are marked)
  CodeCache::do_unloading(&GenMarkSweep::is_alive,
                                   &GenMarkSweep::keep_alive,
                                   purged_class);
  GenMarkSweep::follow_stack();

  // Update subklass/sibling/implementor links of live klasses
  GenMarkSweep::follow_weak_klass_links();
  assert(GenMarkSweep::_marking_stack.is_empty(),
         "stack should be empty by now");

  // Visit memoized MDO's and clear any unmarked weak refs
  GenMarkSweep::follow_mdo_weak_refs();
  assert(GenMarkSweep::_marking_stack.is_empty(), "just drained");


  // Visit symbol and interned string tables and delete unmarked oops
  SymbolTable::unlink(&GenMarkSweep::is_alive);
  StringTable::unlink(&GenMarkSweep::is_alive);

  assert(GenMarkSweep::_marking_stack.is_empty(),
         "stack should be empty by now");
}
void DefNewGeneration::collect(bool   full,
                               bool   clear_all_soft_refs,
                               size_t size,
                               bool   is_tlab) {
  assert(full || size > 0, "otherwise we don't want to collect");

  GenCollectedHeap* gch = GenCollectedHeap::heap();

  _gc_timer->register_gc_start();
  DefNewTracer gc_tracer;
  gc_tracer.report_gc_start(gch->gc_cause(), _gc_timer->gc_start());

  _next_gen = gch->next_gen(this);

  // If the next generation is too full to accommodate promotion
  // from this generation, pass on collection; let the next generation
  // do it.
  if (!collection_attempt_is_safe()) {
    if (Verbose && PrintGCDetails) {
      gclog_or_tty->print(" :: Collection attempt not safe :: ");
    }
    gch->set_incremental_collection_failed(); // Slight lie: we did not even attempt one
    return;
  }
  assert(to()->is_empty(), "Else not collection_attempt_is_safe");

  init_assuming_no_promotion_failure();

  GCTraceTime t1(GCCauseString("GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL);
  // Capture heap used before collection (for printing).
  size_t gch_prev_used = gch->used();

  gch->trace_heap_before_gc(&gc_tracer);

  SpecializationStats::clear();

  // These can be shared for all code paths
  IsAliveClosure is_alive(this);
  ScanWeakRefClosure scan_weak_ref(this);

  age_table()->clear();
  to()->clear(SpaceDecorator::Mangle);

  gch->rem_set()->prepare_for_younger_refs_iterate(false);

  assert(gch->no_allocs_since_save_marks(0),
         "save marks have not been newly set.");

  // Not very pretty.
  CollectorPolicy* cp = gch->collector_policy();

  FastScanClosure fsc_with_no_gc_barrier(this, false);
  FastScanClosure fsc_with_gc_barrier(this, true);

  KlassScanClosure klass_scan_closure(&fsc_with_no_gc_barrier,
                                      gch->rem_set()->klass_rem_set());

  set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
  FastEvacuateFollowersClosure evacuate_followers(gch, _level, this,
                                                  &fsc_with_no_gc_barrier,
                                                  &fsc_with_gc_barrier);

  assert(gch->no_allocs_since_save_marks(0),
         "save marks have not been newly set.");

  int so = SharedHeap::SO_AllClasses | SharedHeap::SO_Strings | SharedHeap::SO_CodeCache;

  gch->gen_process_strong_roots(_level,
                                true,  // Process younger gens, if any,
                                       // as strong roots.
                                true,  // activate StrongRootsScope
                                true,  // is scavenging
                                SharedHeap::ScanningOption(so),
                                &fsc_with_no_gc_barrier,
                                true,   // walk *all* scavengable nmethods
                                &fsc_with_gc_barrier,
                                &klass_scan_closure);

  // "evacuate followers".
  evacuate_followers.do_void();

  FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
  ReferenceProcessor* rp = ref_processor();
  rp->setup_policy(clear_all_soft_refs);
  const ReferenceProcessorStats& stats =
  rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
                                    NULL, _gc_timer);
  gc_tracer.report_gc_reference_stats(stats);

  if (!_promotion_failed) {
    // Swap the survivor spaces.
    eden()->clear(SpaceDecorator::Mangle);
    from()->clear(SpaceDecorator::Mangle);
    if (ZapUnusedHeapArea) {
      // This is now done here because of the piece-meal mangling which
      // can check for valid mangling at intermediate points in the
      // collection(s).  When a minor collection fails to collect
      // sufficient space resizing of the young generation can occur
      // an redistribute the spaces in the young generation.  Mangle
      // here so that unzapped regions don't get distributed to
      // other spaces.
      to()->mangle_unused_area();
    }
    swap_spaces();

    assert(to()->is_empty(), "to space should be empty now");

    adjust_desired_tenuring_threshold();

    // A successful scavenge should restart the GC time limit count which is
    // for full GC's.
    AdaptiveSizePolicy* size_policy = gch->gen_policy()->size_policy();
    size_policy->reset_gc_overhead_limit_count();
    if (PrintGC && !PrintGCDetails) {
      gch->print_heap_change(gch_prev_used);
    }
    assert(!gch->incremental_collection_failed(), "Should be clear");
  } else {
    assert(_promo_failure_scan_stack.is_empty(), "post condition");
    _promo_failure_scan_stack.clear(true); // Clear cached segments.

    remove_forwarding_pointers();
    if (PrintGCDetails) {
      gclog_or_tty->print(" (promotion failed) ");
    }
    // Add to-space to the list of space to compact
    // when a promotion failure has occurred.  In that
    // case there can be live objects in to-space
    // as a result of a partial evacuation of eden
    // and from-space.
    swap_spaces();   // For uniformity wrt ParNewGeneration.
    from()->set_next_compaction_space(to());
    gch->set_incremental_collection_failed();

    // Inform the next generation that a promotion failure occurred.
    _next_gen->promotion_failure_occurred();
    gc_tracer.report_promotion_failed(_promotion_failed_info);

    // Reset the PromotionFailureALot counters.
    NOT_PRODUCT(Universe::heap()->reset_promotion_should_fail();)
  }
  // set new iteration safe limit for the survivor spaces
  from()->set_concurrent_iteration_safe_limit(from()->top());
  to()->set_concurrent_iteration_safe_limit(to()->top());
  SpecializationStats::print();

  // We need to use a monotonically non-decreasing time in ms
  // or we will see time-warp warnings and os::javaTimeMillis()
  // does not guarantee monotonicity.
  jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
  update_time_of_last_gc(now);

  gch->trace_heap_after_gc(&gc_tracer);
  gc_tracer.report_tenuring_threshold(tenuring_threshold());

  _gc_timer->register_gc_end();

  gc_tracer.report_gc_end(_gc_timer->gc_end(), _gc_timer->time_partitions());
}
void DefNewGeneration::collect(bool   full,
                               bool   clear_all_soft_refs,
                               size_t size,
                               bool   is_tlab) {
  assert(full || size > 0, "otherwise we don't want to collect");
  GenCollectedHeap* gch = GenCollectedHeap::heap();
  _next_gen = gch->next_gen(this);
  assert(_next_gen != NULL,
    "This must be the youngest gen, and not the only gen");

  // If the next generation is too full to accomodate promotion
  // from this generation, pass on collection; let the next generation
  // do it.
  if (!collection_attempt_is_safe()) {
    gch->set_incremental_collection_will_fail();
    return;
  }
  assert(to()->is_empty(), "Else not collection_attempt_is_safe");

  init_assuming_no_promotion_failure();

  TraceTime t1("GC", PrintGC && !PrintGCDetails, true, gclog_or_tty);
  // Capture heap used before collection (for printing).
  size_t gch_prev_used = gch->used();

  SpecializationStats::clear();

  // These can be shared for all code paths
  IsAliveClosure is_alive(this);
  ScanWeakRefClosure scan_weak_ref(this);

  age_table()->clear();
  to()->clear(SpaceDecorator::Mangle);

  gch->rem_set()->prepare_for_younger_refs_iterate(false);

  assert(gch->no_allocs_since_save_marks(0),
         "save marks have not been newly set.");

  // Not very pretty.
  CollectorPolicy* cp = gch->collector_policy();

  FastScanClosure fsc_with_no_gc_barrier(this, false);
  FastScanClosure fsc_with_gc_barrier(this, true);

  set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
  FastEvacuateFollowersClosure evacuate_followers(gch, _level, this,
                                                  &fsc_with_no_gc_barrier,
                                                  &fsc_with_gc_barrier);

  assert(gch->no_allocs_since_save_marks(0),
         "save marks have not been newly set.");

  gch->gen_process_strong_roots(_level,
                                true,  // Process younger gens, if any,
                                       // as strong roots.
                                true,  // activate StrongRootsScope
                                false, // not collecting perm generation.
                                SharedHeap::SO_AllClasses,
                                &fsc_with_no_gc_barrier,
                                true,   // walk *all* scavengable nmethods
                                &fsc_with_gc_barrier);

  // "evacuate followers".
  evacuate_followers.do_void();

  FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
  ReferenceProcessor* rp = ref_processor();
  rp->setup_policy(clear_all_soft_refs);
  rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
                                    NULL);
  if (!promotion_failed()) {
    // Swap the survivor spaces.
    eden()->clear(SpaceDecorator::Mangle);
    from()->clear(SpaceDecorator::Mangle);
    if (ZapUnusedHeapArea) {
      // This is now done here because of the piece-meal mangling which
      // can check for valid mangling at intermediate points in the
      // collection(s).  When a minor collection fails to collect
      // sufficient space resizing of the young generation can occur
      // an redistribute the spaces in the young generation.  Mangle
      // here so that unzapped regions don't get distributed to
      // other spaces.
      to()->mangle_unused_area();
    }
    swap_spaces();

    assert(to()->is_empty(), "to space should be empty now");

    // Set the desired survivor size to half the real survivor space
    _tenuring_threshold =
      age_table()->compute_tenuring_threshold(to()->capacity()/HeapWordSize);

    if (PrintGC && !PrintGCDetails) {
      gch->print_heap_change(gch_prev_used);
    }
  } else {
    assert(HandlePromotionFailure,
      "Should not be here unless promotion failure handling is on");
    assert(_promo_failure_scan_stack != NULL &&
      _promo_failure_scan_stack->length() == 0, "post condition");

    // deallocate stack and it's elements
    delete _promo_failure_scan_stack;
    _promo_failure_scan_stack = NULL;

    remove_forwarding_pointers();
    if (PrintGCDetails) {
      gclog_or_tty->print(" (promotion failed) ");
    }
    // Add to-space to the list of space to compact
    // when a promotion failure has occurred.  In that
    // case there can be live objects in to-space
    // as a result of a partial evacuation of eden
    // and from-space.
    swap_spaces();   // For the sake of uniformity wrt ParNewGeneration::collect().
    from()->set_next_compaction_space(to());
    gch->set_incremental_collection_will_fail();

    // Inform the next generation that a promotion failure occurred.
    _next_gen->promotion_failure_occurred();

    // Reset the PromotionFailureALot counters.
    NOT_PRODUCT(Universe::heap()->reset_promotion_should_fail();)
  }
  // set new iteration safe limit for the survivor spaces
  from()->set_concurrent_iteration_safe_limit(from()->top());
  to()->set_concurrent_iteration_safe_limit(to()->top());
  SpecializationStats::print();
  update_time_of_last_gc(os::javaTimeMillis());
}
Пример #4
0
void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
                                    bool clear_all_softrefs) {
  // Recursively traverse all live objects and mark them
  GCTraceTime(Info, gc, phases) tm("Phase 1: Mark live objects", gc_timer());

  G1CollectedHeap* g1h = G1CollectedHeap::heap();

  // Need cleared claim bits for the roots processing
  ClassLoaderDataGraph::clear_claimed_marks();

  MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
  {
    G1RootProcessor root_processor(g1h, 1);
    if (ClassUnloading) {
      root_processor.process_strong_roots(&GenMarkSweep::follow_root_closure,
                                          &GenMarkSweep::follow_cld_closure,
                                          &follow_code_closure);
    } else {
      root_processor.process_all_roots_no_string_table(
                                          &GenMarkSweep::follow_root_closure,
                                          &GenMarkSweep::follow_cld_closure,
                                          &follow_code_closure);
    }
  }

  {
    GCTraceTime(Debug, gc, phases) trace("Reference Processing", gc_timer());

    // Process reference objects found during marking
    ReferenceProcessor* rp = GenMarkSweep::ref_processor();
    assert(rp == g1h->ref_processor_stw(), "Sanity");

    rp->setup_policy(clear_all_softrefs);
    const ReferenceProcessorStats& stats =
        rp->process_discovered_references(&GenMarkSweep::is_alive,
                                          &GenMarkSweep::keep_alive,
                                          &GenMarkSweep::follow_stack_closure,
                                          NULL,
                                          gc_timer());
    gc_tracer()->report_gc_reference_stats(stats);
  }

  // This is the point where the entire marking should have completed.
  assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");

  if (ClassUnloading) {
    GCTraceTime(Debug, gc, phases) trace("Class Unloading", gc_timer());

    // Unload classes and purge the SystemDictionary.
    bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);

    // Unload nmethods.
    CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);

    // Prune dead klasses from subklass/sibling/implementor lists.
    Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
  }

  {
    GCTraceTime(Debug, gc, phases) trace("Scrub String and Symbol Tables", gc_timer());
    // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
    g1h->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
  }

  if (G1StringDedup::is_enabled()) {
    GCTraceTime(Debug, gc, phases) trace("String Deduplication Unlink", gc_timer());
    G1StringDedup::unlink(&GenMarkSweep::is_alive);
  }

  if (VerifyDuringGC) {
    HandleMark hm;  // handle scope
#if defined(COMPILER2) || INCLUDE_JVMCI
    DerivedPointerTableDeactivate dpt_deact;
#endif
    g1h->prepare_for_verify();
    // Note: we can verify only the heap here. When an object is
    // marked, the previous value of the mark word (including
    // identity hash values, ages, etc) is preserved, and the mark
    // word is set to markOop::marked_value - effectively removing
    // any hash values from the mark word. These hash values are
    // used when verifying the dictionaries and so removing them
    // from the mark word can make verification of the dictionaries
    // fail. At the end of the GC, the original mark word values
    // (including hash values) are restored to the appropriate
    // objects.
    GCTraceTime(Info, gc, verify)("During GC (full)");
    g1h->verify(VerifyOption_G1UseMarkWord);
  }

  gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
}
Пример #5
0
void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
                                    bool clear_all_softrefs) {
    // Recursively traverse all live objects and mark them
    GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer());
    GenMarkSweep::trace(" 1");

    SharedHeap* sh = SharedHeap::heap();

    // Need cleared claim bits for the strong roots processing
    ClassLoaderDataGraph::clear_claimed_marks();

    sh->process_strong_roots(true,  // activate StrongRootsScope
                             false, // not scavenging.
                             SharedHeap::SO_SystemClasses,
                             &GenMarkSweep::follow_root_closure,
                             &GenMarkSweep::follow_code_root_closure,
                             &GenMarkSweep::follow_klass_closure);

    // Process reference objects found during marking
    ReferenceProcessor* rp = GenMarkSweep::ref_processor();
    assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity");

    rp->setup_policy(clear_all_softrefs);
    const ReferenceProcessorStats& stats =
        rp->process_discovered_references(&GenMarkSweep::is_alive,
                                          &GenMarkSweep::keep_alive,
                                          &GenMarkSweep::follow_stack_closure,
                                          NULL,
                                          gc_timer());
    gc_tracer()->report_gc_reference_stats(stats);


    // This is the point where the entire marking should have completed.
    assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");

    // Unload classes and purge the SystemDictionary.
    bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);

    // Unload nmethods.
    CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);

    // Prune dead klasses from subklass/sibling/implementor lists.
    Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);

    // Delete entries for dead interned strings.
    StringTable::unlink(&GenMarkSweep::is_alive);

    // Clean up unreferenced symbols in symbol table.
    SymbolTable::unlink();

    if (VerifyDuringGC) {
        HandleMark hm;  // handle scope
        COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
        Universe::heap()->prepare_for_verify();
        // Note: we can verify only the heap here. When an object is
        // marked, the previous value of the mark word (including
        // identity hash values, ages, etc) is preserved, and the mark
        // word is set to markOop::marked_value - effectively removing
        // any hash values from the mark word. These hash values are
        // used when verifying the dictionaries and so removing them
        // from the mark word can make verification of the dictionaries
        // fail. At the end of the GC, the orginal mark word values
        // (including hash values) are restored to the appropriate
        // objects.
        if (!VerifySilently) {
            gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
        }
        Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord);
        if (!VerifySilently) {
            gclog_or_tty->print_cr("]");
        }
    }

    gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
}