< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page

 531 void ParallelCompactData::verify_clear() {
 532   for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
 533     if (!region(cur_idx)->is_clear()) {
 534       log_warning(gc)("Uncleared Region: %u", cur_idx);
 535       region(cur_idx)->verify_clear();
 536     }
 537   }
 538 }
 539 #endif  // #ifdef ASSERT
 540 
 541 STWGCTimer          PSParallelCompact::_gc_timer;
 542 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 543 elapsedTimer        PSParallelCompact::_accumulated_time;
 544 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;
 545 CollectorCounters*  PSParallelCompact::_counters = nullptr;
 546 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 547 ParallelCompactData PSParallelCompact::_summary_data;
 548 
 549 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 550 
 551 class PCAdjustPointerClosure: public BasicOopIterateClosure {
 552   template <typename T>
 553   void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
 554 
 555 public:
 556   virtual void do_oop(oop* p)                { do_oop_work(p); }
 557   virtual void do_oop(narrowOop* p)          { do_oop_work(p); }
 558 
 559   virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
 560 };
 561 
 562 static PCAdjustPointerClosure pc_adjust_pointer_closure;
 563 
 564 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 565 
 566 void PSParallelCompact::post_initialize() {
 567   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 568   _span_based_discoverer.set_span(heap->reserved_region());
 569   _ref_processor =
 570     new ReferenceProcessor(&_span_based_discoverer,
 571                            ParallelGCThreads,   // mt processing degree
 572                            ParallelGCThreads,   // mt discovery degree
 573                            false,               // concurrent_discovery
 574                            &_is_alive_closure); // non-header is alive closure
 575 
 576   _counters = new CollectorCounters("Parallel full collection pauses", 1);
 577 
 578   // Initialize static fields in ParCompactionManager.
 579   ParCompactionManager::initialize(mark_bitmap());
 580 }
 581 
 582 bool PSParallelCompact::initialize_aux_data() {

1039 
1040 #if COMPILER2_OR_JVMCI
1041     DerivedPointerTable::clear();
1042 #endif
1043 
1044     ref_processor()->start_discovery(clear_all_soft_refs);
1045 
1046     ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1047                               false /* unregister_nmethods_during_purge */,
1048                               false /* lock_nmethod_free_separately */);
1049 
1050     marking_phase(&_gc_tracer);
1051 
1052     summary_phase();
1053 
1054 #if COMPILER2_OR_JVMCI
1055     assert(DerivedPointerTable::is_active(), "Sanity");
1056     DerivedPointerTable::set_active(false);
1057 #endif
1058 


1059     forward_to_new_addr();
1060 
1061     adjust_pointers();
1062 
1063     compact();
1064 


1065     ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1066 
1067     ParCompactionManager::verify_all_region_stack_empty();
1068 
1069     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1070     // done before resizing.
1071     post_compact();
1072 
1073     // Let the size policy know we're done
1074     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1075 
1076     if (UseAdaptiveSizePolicy) {
1077       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1078       log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1079                           old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1080 
1081       // Don't check if the size_policy is ready here.  Let
1082       // the size_policy check that internally.
1083       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1084           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {

 531 void ParallelCompactData::verify_clear() {
 532   for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
 533     if (!region(cur_idx)->is_clear()) {
 534       log_warning(gc)("Uncleared Region: %u", cur_idx);
 535       region(cur_idx)->verify_clear();
 536     }
 537   }
 538 }
 539 #endif  // #ifdef ASSERT
 540 
 541 STWGCTimer          PSParallelCompact::_gc_timer;
 542 ParallelOldTracer   PSParallelCompact::_gc_tracer;
 543 elapsedTimer        PSParallelCompact::_accumulated_time;
 544 unsigned int        PSParallelCompact::_maximum_compaction_gc_num = 0;
 545 CollectorCounters*  PSParallelCompact::_counters = nullptr;
 546 ParMarkBitMap       PSParallelCompact::_mark_bitmap;
 547 ParallelCompactData PSParallelCompact::_summary_data;
 548 
 549 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
 550 
 551 class PCAdjustPointerClosureNew: public BasicOopIterateClosure {
 552   template <typename T>
 553   void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
 554 
 555 public:
 556   virtual void do_oop(oop* p)                { do_oop_work(p); }
 557   virtual void do_oop(narrowOop* p)          { do_oop_work(p); }
 558 
 559   virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
 560 };
 561 
 562 static PCAdjustPointerClosureNew pc_adjust_pointer_closure;
 563 
 564 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
 565 
 566 void PSParallelCompact::post_initialize() {
 567   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 568   _span_based_discoverer.set_span(heap->reserved_region());
 569   _ref_processor =
 570     new ReferenceProcessor(&_span_based_discoverer,
 571                            ParallelGCThreads,   // mt processing degree
 572                            ParallelGCThreads,   // mt discovery degree
 573                            false,               // concurrent_discovery
 574                            &_is_alive_closure); // non-header is alive closure
 575 
 576   _counters = new CollectorCounters("Parallel full collection pauses", 1);
 577 
 578   // Initialize static fields in ParCompactionManager.
 579   ParCompactionManager::initialize(mark_bitmap());
 580 }
 581 
 582 bool PSParallelCompact::initialize_aux_data() {

1039 
1040 #if COMPILER2_OR_JVMCI
1041     DerivedPointerTable::clear();
1042 #endif
1043 
1044     ref_processor()->start_discovery(clear_all_soft_refs);
1045 
1046     ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1047                               false /* unregister_nmethods_during_purge */,
1048                               false /* lock_nmethod_free_separately */);
1049 
1050     marking_phase(&_gc_tracer);
1051 
1052     summary_phase();
1053 
1054 #if COMPILER2_OR_JVMCI
1055     assert(DerivedPointerTable::is_active(), "Sanity");
1056     DerivedPointerTable::set_active(false);
1057 #endif
1058 
1059     FullGCForwarding::begin();
1060 
1061     forward_to_new_addr();
1062 
1063     adjust_pointers();
1064 
1065     compact();
1066 
1067     FullGCForwarding::end();
1068 
1069     ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1070 
1071     ParCompactionManager::verify_all_region_stack_empty();
1072 
1073     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1074     // done before resizing.
1075     post_compact();
1076 
1077     // Let the size policy know we're done
1078     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1079 
1080     if (UseAdaptiveSizePolicy) {
1081       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1082       log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1083                           old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1084 
1085       // Don't check if the size_policy is ready here.  Let
1086       // the size_policy check that internally.
1087       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1088           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
< prev index next >