228 // Coming out of Full GC, we would not have any forwarded objects.
229 // This also prevents resolves with fwdptr from kicking in while adjusting pointers in phase3.
230 heap->set_has_forwarded_objects(false);
231
232 heap->set_full_gc_move_in_progress(true);
233
234 // Setup workers for the rest
235 OrderAccess::fence();
236
237 // Initialize worker slices
238 ShenandoahHeapRegionSet** worker_slices = NEW_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, heap->max_workers(), mtGC);
239 for (uint i = 0; i < heap->max_workers(); i++) {
240 worker_slices[i] = new ShenandoahHeapRegionSet();
241 }
242
243 {
244 // The rest of code performs region moves, where region status is undefined
245 // until all phases run together.
246 ShenandoahHeapLocker lock(heap->lock());
247
248 phase2_calculate_target_addresses(worker_slices);
249
250 OrderAccess::fence();
251
252 phase3_update_references();
253
254 phase4_compact_objects(worker_slices);
255
256 phase5_epilog();
257 }
258
259 // Resize metaspace
260 MetaspaceGC::compute_new_size();
261
262 // Free worker slices
263 for (uint i = 0; i < heap->max_workers(); i++) {
264 delete worker_slices[i];
265 }
266 FREE_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, worker_slices);
267
268 heap->set_full_gc_move_in_progress(false);
269 heap->set_full_gc_in_progress(false);
270
271 if (ShenandoahVerify) {
272 heap->verifier()->verify_after_fullgc();
273 }
274
275 if (VerifyAfterGC) {
276 Universe::verify();
334 }
335
336 void finish() {
337 assert(_to_region != nullptr, "should not happen");
338 _to_region->set_new_top(_compact_point);
339 }
340
341 bool is_compact_same_region() {
342 return _from_region == _to_region;
343 }
344
345 int empty_regions_pos() {
346 return _empty_regions_pos;
347 }
348
349 void do_object(oop p) {
350 assert(_from_region != nullptr, "must set before work");
351 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
352 assert(!_heap->complete_marking_context()->allocated_after_mark_start(p), "must be truly marked");
353
354 size_t obj_size = p->size();
355 if (_compact_point + obj_size > _to_region->end()) {
356 finish();
357
358 // Object doesn't fit. Pick next empty region and start compacting there.
359 ShenandoahHeapRegion* new_to_region;
360 if (_empty_regions_pos < _empty_regions.length()) {
361 new_to_region = _empty_regions.at(_empty_regions_pos);
362 _empty_regions_pos++;
363 } else {
364 // Out of empty region? Compact within the same region.
365 new_to_region = _from_region;
366 }
367
368 assert(new_to_region != _to_region, "must not reuse same to-region");
369 assert(new_to_region != nullptr, "must not be null");
370 _to_region = new_to_region;
371 _compact_point = _to_region->bottom();
372 }
373
374 // Object fits into current region, record new location, if object does not move:
375 assert(_compact_point + obj_size <= _to_region->end(), "must fit");
376 shenandoah_assert_not_forwarded(nullptr, p);
377 if (_compact_point != cast_from_oop<HeapWord*>(p)) {
378 _preserved_marks->push_if_necessary(p, p->mark());
379 FullGCForwarding::forward_to(p, cast_to_oop(_compact_point));
380 }
381 _compact_point += obj_size;
382 }
383 };
384
385 class ShenandoahPrepareForCompactionTask : public WorkerTask {
386 private:
387 PreservedMarksSet* const _preserved_marks;
388 ShenandoahHeap* const _heap;
389 ShenandoahHeapRegionSet** const _worker_slices;
390
391 public:
871 private:
872 ShenandoahHeap* const _heap;
873 uint const _worker_id;
874
875 public:
876 ShenandoahCompactObjectsClosure(uint worker_id) :
877 _heap(ShenandoahHeap::heap()), _worker_id(worker_id) {}
878
879 void do_object(oop p) {
880 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
881 size_t size = p->size();
882 if (FullGCForwarding::is_forwarded(p)) {
883 HeapWord* compact_from = cast_from_oop<HeapWord*>(p);
884 HeapWord* compact_to = cast_from_oop<HeapWord*>(FullGCForwarding::forwardee(p));
885 assert(compact_from != compact_to, "Forwarded object should move");
886 Copy::aligned_conjoint_words(compact_from, compact_to, size);
887 oop new_obj = cast_to_oop(compact_to);
888
889 ContinuationGCSupport::relativize_stack_chunk(new_obj);
890 new_obj->init_mark();
891 }
892 }
893 };
894
895 class ShenandoahCompactObjectsTask : public WorkerTask {
896 private:
897 ShenandoahHeap* const _heap;
898 ShenandoahHeapRegionSet** const _worker_slices;
899
900 public:
901 ShenandoahCompactObjectsTask(ShenandoahHeapRegionSet** worker_slices) :
902 WorkerTask("Shenandoah Compact Objects"),
903 _heap(ShenandoahHeap::heap()),
904 _worker_slices(worker_slices) {
905 }
906
907 void work(uint worker_id) {
908 ShenandoahParallelWorkerSession worker_session(worker_id);
909 ShenandoahHeapRegionSetIterator slice(_worker_slices[worker_id]);
910
|
228 // Coming out of Full GC, we would not have any forwarded objects.
229 // This also prevents resolves with fwdptr from kicking in while adjusting pointers in phase3.
230 heap->set_has_forwarded_objects(false);
231
232 heap->set_full_gc_move_in_progress(true);
233
234 // Setup workers for the rest
235 OrderAccess::fence();
236
237 // Initialize worker slices
238 ShenandoahHeapRegionSet** worker_slices = NEW_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, heap->max_workers(), mtGC);
239 for (uint i = 0; i < heap->max_workers(); i++) {
240 worker_slices[i] = new ShenandoahHeapRegionSet();
241 }
242
243 {
244 // The rest of code performs region moves, where region status is undefined
245 // until all phases run together.
246 ShenandoahHeapLocker lock(heap->lock());
247
248 FullGCForwarding::begin();
249
250 phase2_calculate_target_addresses(worker_slices);
251
252 OrderAccess::fence();
253
254 phase3_update_references();
255
256 phase4_compact_objects(worker_slices);
257
258 phase5_epilog();
259
260 FullGCForwarding::end();
261 }
262
263 // Resize metaspace
264 MetaspaceGC::compute_new_size();
265
266 // Free worker slices
267 for (uint i = 0; i < heap->max_workers(); i++) {
268 delete worker_slices[i];
269 }
270 FREE_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, worker_slices);
271
272 heap->set_full_gc_move_in_progress(false);
273 heap->set_full_gc_in_progress(false);
274
275 if (ShenandoahVerify) {
276 heap->verifier()->verify_after_fullgc();
277 }
278
279 if (VerifyAfterGC) {
280 Universe::verify();
338 }
339
340 void finish() {
341 assert(_to_region != nullptr, "should not happen");
342 _to_region->set_new_top(_compact_point);
343 }
344
345 bool is_compact_same_region() {
346 return _from_region == _to_region;
347 }
348
349 int empty_regions_pos() {
350 return _empty_regions_pos;
351 }
352
353 void do_object(oop p) {
354 assert(_from_region != nullptr, "must set before work");
355 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
356 assert(!_heap->complete_marking_context()->allocated_after_mark_start(p), "must be truly marked");
357
358 size_t old_size = p->size();
359 size_t new_size = p->copy_size(old_size, p->mark());
360 size_t obj_size = _compact_point == cast_from_oop<HeapWord*>(p) ? old_size : new_size;
361 if (_compact_point + obj_size > _to_region->end()) {
362 finish();
363
364 // Object doesn't fit. Pick next empty region and start compacting there.
365 ShenandoahHeapRegion* new_to_region;
366 if (_empty_regions_pos < _empty_regions.length()) {
367 new_to_region = _empty_regions.at(_empty_regions_pos);
368 _empty_regions_pos++;
369 } else {
370 // Out of empty region? Compact within the same region.
371 new_to_region = _from_region;
372 }
373
374 assert(new_to_region != _to_region, "must not reuse same to-region");
375 assert(new_to_region != nullptr, "must not be null");
376 _to_region = new_to_region;
377 _compact_point = _to_region->bottom();
378 obj_size = _compact_point == cast_from_oop<HeapWord*>(p) ? old_size : new_size;
379 }
380
381 // Object fits into current region, record new location, if object does not move:
382 assert(_compact_point + obj_size <= _to_region->end(), "must fit");
383 shenandoah_assert_not_forwarded(nullptr, p);
384 if (_compact_point != cast_from_oop<HeapWord*>(p)) {
385 _preserved_marks->push_if_necessary(p, p->mark());
386 FullGCForwarding::forward_to(p, cast_to_oop(_compact_point));
387 }
388 _compact_point += obj_size;
389 }
390 };
391
392 class ShenandoahPrepareForCompactionTask : public WorkerTask {
393 private:
394 PreservedMarksSet* const _preserved_marks;
395 ShenandoahHeap* const _heap;
396 ShenandoahHeapRegionSet** const _worker_slices;
397
398 public:
878 private:
879 ShenandoahHeap* const _heap;
880 uint const _worker_id;
881
882 public:
883 ShenandoahCompactObjectsClosure(uint worker_id) :
884 _heap(ShenandoahHeap::heap()), _worker_id(worker_id) {}
885
886 void do_object(oop p) {
887 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
888 size_t size = p->size();
889 if (FullGCForwarding::is_forwarded(p)) {
890 HeapWord* compact_from = cast_from_oop<HeapWord*>(p);
891 HeapWord* compact_to = cast_from_oop<HeapWord*>(FullGCForwarding::forwardee(p));
892 assert(compact_from != compact_to, "Forwarded object should move");
893 Copy::aligned_conjoint_words(compact_from, compact_to, size);
894 oop new_obj = cast_to_oop(compact_to);
895
896 ContinuationGCSupport::relativize_stack_chunk(new_obj);
897 new_obj->init_mark();
898 new_obj->initialize_hash_if_necessary(p);
899 }
900 }
901 };
902
903 class ShenandoahCompactObjectsTask : public WorkerTask {
904 private:
905 ShenandoahHeap* const _heap;
906 ShenandoahHeapRegionSet** const _worker_slices;
907
908 public:
909 ShenandoahCompactObjectsTask(ShenandoahHeapRegionSet** worker_slices) :
910 WorkerTask("Shenandoah Compact Objects"),
911 _heap(ShenandoahHeap::heap()),
912 _worker_slices(worker_slices) {
913 }
914
915 void work(uint worker_id) {
916 ShenandoahParallelWorkerSession worker_session(worker_id);
917 ShenandoahHeapRegionSetIterator slice(_worker_slices[worker_id]);
918
|