@@ -102,6 +102,16 @@ ShenandoahGC::ShenandoahDegenPoint ShenandoahConcurrentGC::degen_point() const {
102
102
return _degen_point;
103
103
}
104
104
105
+ void ShenandoahConcurrentGC::entry_concurrent_update_refs_prepare (ShenandoahHeap* const heap) {
106
+ TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
107
+ const char * msg = conc_init_update_refs_event_message ();
108
+ ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::conc_update_refs_prepare);
109
+ EventMark em (" %s" , msg);
110
+
111
+ // Evacuation is complete, retire gc labs and change gc state
112
+ heap->concurrent_prepare_for_update_refs ();
113
+ }
114
+
105
115
bool ShenandoahConcurrentGC::collect (GCCause::Cause cause) {
106
116
ShenandoahHeap* const heap = ShenandoahHeap::heap ();
107
117
@@ -192,8 +202,7 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
192
202
return false ;
193
203
}
194
204
195
- // Evacuation is complete, retire gc labs
196
- heap->concurrent_prepare_for_update_refs ();
205
+ entry_concurrent_update_refs_prepare (heap);
197
206
198
207
// Perform update-refs phase.
199
208
if (ShenandoahVerify || ShenandoahPacing) {
@@ -216,24 +225,14 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
216
225
// Update references freed up collection set, kick the cleanup to reclaim the space.
217
226
entry_cleanup_complete ();
218
227
} else {
219
- // We chose not to evacuate because we found sufficient immediate garbage.
220
- // However, there may still be regions to promote in place, so do that now.
221
- if (has_in_place_promotions (heap)) {
222
- entry_promote_in_place ();
223
-
224
- // If the promote-in-place operation was cancelled, we can have the degenerated
225
- // cycle complete the operation. It will see that no evacuations are in progress,
226
- // and that there are regions wanting promotion. The risk with not handling the
227
- // cancellation would be failing to restore top for these regions and leaving
228
- // them unable to serve allocations for the old generation.
229
- if (check_cancellation_and_abort (ShenandoahDegenPoint::_degenerated_evac)) {
230
- return false ;
231
- }
228
+ if (!entry_final_roots ()) {
229
+ assert (_degen_point != _degenerated_unset, " Need to know where to start degenerated cycle" );
230
+ return false ;
232
231
}
233
232
234
- // At this point, the cycle is effectively complete. If the cycle has been cancelled here,
235
- // the control thread will detect it on its next iteration and run a degenerated young cycle.
236
- vmop_entry_final_roots ();
233
+ if (VerifyAfterGC) {
234
+ vmop_entry_verify_final_roots ();
235
+ }
237
236
_abbreviated = true ;
238
237
}
239
238
@@ -251,6 +250,52 @@ bool ShenandoahConcurrentGC::collect(GCCause::Cause cause) {
251
250
return true ;
252
251
}
253
252
253
+ bool ShenandoahConcurrentGC::complete_abbreviated_cycle () {
254
+ shenandoah_assert_generational ();
255
+
256
+ ShenandoahGenerationalHeap* const heap = ShenandoahGenerationalHeap::heap ();
257
+
258
+ // We chose not to evacuate because we found sufficient immediate garbage.
259
+ // However, there may still be regions to promote in place, so do that now.
260
+ if (heap->old_generation ()->has_in_place_promotions ()) {
261
+ entry_promote_in_place ();
262
+
263
+ // If the promote-in-place operation was cancelled, we can have the degenerated
264
+ // cycle complete the operation. It will see that no evacuations are in progress,
265
+ // and that there are regions wanting promotion. The risk with not handling the
266
+ // cancellation would be failing to restore top for these regions and leaving
267
+ // them unable to serve allocations for the old generation.This will leave the weak
268
+ // roots flag set (the degenerated cycle will unset it).
269
+ if (check_cancellation_and_abort (ShenandoahDegenPoint::_degenerated_evac)) {
270
+ return false ;
271
+ }
272
+ }
273
+
274
+ // At this point, the cycle is effectively complete. If the cycle has been cancelled here,
275
+ // the control thread will detect it on its next iteration and run a degenerated young cycle.
276
+ if (!_generation->is_old ()) {
277
+ heap->update_region_ages (_generation->complete_marking_context ());
278
+ }
279
+
280
+ if (!heap->is_concurrent_old_mark_in_progress ()) {
281
+ heap->concurrent_final_roots ();
282
+ } else {
283
+ // Since the cycle was shortened for having enough immediate garbage, this will be
284
+ // the last phase before concurrent marking of old resumes. We must be sure
285
+ // that old mark threads don't see any pointers to garbage in the SATB queues. Even
286
+ // though nothing was evacuated, overwriting unreachable weak roots with null may still
287
+ // put pointers to regions that become trash in the SATB queues. The following will
288
+ // piggyback flushing the thread local SATB queues on the same handshake that propagates
289
+ // the gc state change.
290
+ ShenandoahSATBMarkQueueSet& satb_queues = ShenandoahBarrierSet::satb_mark_queue_set ();
291
+ ShenandoahFlushSATBHandshakeClosure complete_thread_local_satb_buffers (satb_queues);
292
+ heap->concurrent_final_roots (&complete_thread_local_satb_buffers);
293
+ heap->old_generation ()->concurrent_transfer_pointers_from_satb ();
294
+ }
295
+ return true ;
296
+ }
297
+
298
+
254
299
void ShenandoahConcurrentGC::vmop_entry_init_mark () {
255
300
ShenandoahHeap* const heap = ShenandoahHeap::heap ();
256
301
TraceCollectorStats tcs (heap->monitoring_support ()->stw_collection_counters ());
@@ -291,7 +336,7 @@ void ShenandoahConcurrentGC::vmop_entry_final_update_refs() {
291
336
VMThread::execute (&op);
292
337
}
293
338
294
- void ShenandoahConcurrentGC::vmop_entry_final_roots () {
339
+ void ShenandoahConcurrentGC::vmop_entry_verify_final_roots () {
295
340
ShenandoahHeap* const heap = ShenandoahHeap::heap ();
296
341
TraceCollectorStats tcs (heap->monitoring_support ()->stw_collection_counters ());
297
342
ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::final_roots_gross);
@@ -347,12 +392,12 @@ void ShenandoahConcurrentGC::entry_final_update_refs() {
347
392
op_final_update_refs ();
348
393
}
349
394
350
- void ShenandoahConcurrentGC::entry_final_roots () {
351
- const char * msg = final_roots_event_message ();
395
+ void ShenandoahConcurrentGC::entry_verify_final_roots () {
396
+ const char * msg = verify_final_roots_event_message ();
352
397
ShenandoahPausePhase gc_phase (msg, ShenandoahPhaseTimings::final_roots);
353
398
EventMark em (" %s" , msg);
354
399
355
- op_final_roots ();
400
+ op_verify_final_roots ();
356
401
}
357
402
358
403
void ShenandoahConcurrentGC::entry_reset () {
@@ -526,19 +571,12 @@ void ShenandoahConcurrentGC::entry_evacuate() {
526
571
op_evacuate ();
527
572
}
528
573
529
- void ShenandoahConcurrentGC::entry_promote_in_place () {
574
+ void ShenandoahConcurrentGC::entry_promote_in_place () const {
530
575
shenandoah_assert_generational ();
531
576
532
- ShenandoahHeap* const heap = ShenandoahHeap::heap ();
533
- TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
534
-
535
- static const char * msg = " Promote in place" ;
536
- ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::promote_in_place);
537
- EventMark em (" %s" , msg);
538
-
539
- ShenandoahWorkerScope scope (heap->workers (),
540
- ShenandoahWorkerPolicy::calc_workers_for_conc_evac (),
541
- " promote in place" );
577
+ ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::promote_in_place);
578
+ ShenandoahGCWorkerPhase worker_phase (ShenandoahPhaseTimings::promote_in_place);
579
+ EventMark em (" %s" , " Promote in place" );
542
580
543
581
ShenandoahGenerationalHeap::heap ()->promote_regions_in_place (true );
544
582
}
@@ -663,6 +701,7 @@ void ShenandoahConcurrentGC::op_init_mark() {
663
701
}
664
702
665
703
if (ShenandoahVerify) {
704
+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::init_mark_verify);
666
705
heap->verifier ()->verify_before_concmark ();
667
706
}
668
707
@@ -751,6 +790,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
751
790
}
752
791
753
792
if (ShenandoahVerify) {
793
+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_mark_verify);
754
794
heap->verifier ()->verify_before_evacuation ();
755
795
}
756
796
@@ -767,6 +807,7 @@ void ShenandoahConcurrentGC::op_final_mark() {
767
807
}
768
808
} else {
769
809
if (ShenandoahVerify) {
810
+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_mark_verify);
770
811
if (has_in_place_promotions (heap)) {
771
812
heap->verifier ()->verify_after_concmark_with_promotions ();
772
813
} else {
@@ -1088,6 +1129,7 @@ void ShenandoahConcurrentGC::op_evacuate() {
1088
1129
void ShenandoahConcurrentGC::op_init_update_refs () {
1089
1130
ShenandoahHeap* const heap = ShenandoahHeap::heap ();
1090
1131
if (ShenandoahVerify) {
1132
+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::init_update_refs_verify);
1091
1133
heap->verifier ()->verify_before_update_refs ();
1092
1134
}
1093
1135
if (ShenandoahPacing) {
@@ -1175,6 +1217,7 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
1175
1217
}
1176
1218
1177
1219
if (ShenandoahVerify) {
1220
+ ShenandoahTimingsTracker v (ShenandoahPhaseTimings::final_update_refs_verify);
1178
1221
heap->verifier ()->verify_after_update_refs ();
1179
1222
}
1180
1223
@@ -1190,33 +1233,32 @@ void ShenandoahConcurrentGC::op_final_update_refs() {
1190
1233
}
1191
1234
}
1192
1235
1193
- void ShenandoahConcurrentGC::op_final_roots () {
1236
+ bool ShenandoahConcurrentGC::entry_final_roots () {
1237
+ ShenandoahHeap* const heap = ShenandoahHeap::heap ();
1238
+ TraceCollectorStats tcs (heap->monitoring_support ()->concurrent_collection_counters ());
1194
1239
1195
- ShenandoahHeap *heap = ShenandoahHeap::heap ();
1196
- heap->set_concurrent_weak_root_in_progress (false );
1197
- heap->set_evacuation_in_progress (false );
1198
1240
1199
- if (heap->mode ()->is_generational ()) {
1200
- // If the cycle was shortened for having enough immediate garbage, this could be
1201
- // the last GC safepoint before concurrent marking of old resumes. We must be sure
1202
- // that old mark threads don't see any pointers to garbage in the SATB buffers.
1203
- if (heap->is_concurrent_old_mark_in_progress ()) {
1204
- heap->old_generation ()->transfer_pointers_from_satb ();
1205
- }
1241
+ const char * msg = conc_final_roots_event_message ();
1242
+ ShenandoahConcurrentPhase gc_phase (msg, ShenandoahPhaseTimings::conc_final_roots);
1243
+ EventMark em (" %s" , msg);
1244
+ ShenandoahWorkerScope scope (heap->workers (),
1245
+ ShenandoahWorkerPolicy::calc_workers_for_conc_evac (),
1246
+ msg);
1206
1247
1207
- if (!_generation->is_old ()) {
1208
- ShenandoahGenerationalHeap::heap ()->update_region_ages (_generation->complete_marking_context ());
1248
+ if (!heap->mode ()->is_generational ()) {
1249
+ heap->concurrent_final_roots ();
1250
+ } else {
1251
+ if (!complete_abbreviated_cycle ()) {
1252
+ return false ;
1209
1253
}
1210
1254
}
1255
+ return true ;
1256
+ }
1211
1257
1258
+ void ShenandoahConcurrentGC::op_verify_final_roots () {
1212
1259
if (VerifyAfterGC) {
1213
1260
Universe::verify ();
1214
1261
}
1215
-
1216
- {
1217
- ShenandoahTimingsTracker timing (ShenandoahPhaseTimings::final_roots_propagate_gc_state);
1218
- heap->propagate_gc_state_to_all_threads ();
1219
- }
1220
1262
}
1221
1263
1222
1264
void ShenandoahConcurrentGC::op_cleanup_complete () {
@@ -1301,11 +1343,19 @@ const char* ShenandoahConcurrentGC::conc_reset_after_collect_event_message() con
1301
1343
}
1302
1344
}
1303
1345
1304
- const char * ShenandoahConcurrentGC::final_roots_event_message () const {
1346
+ const char * ShenandoahConcurrentGC::verify_final_roots_event_message () const {
1347
+ if (ShenandoahHeap::heap ()->unload_classes ()) {
1348
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Verify Final Roots" , " (unload classes)" );
1349
+ } else {
1350
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Verify Final Roots" , " " );
1351
+ }
1352
+ }
1353
+
1354
+ const char * ShenandoahConcurrentGC::conc_final_roots_event_message () const {
1305
1355
if (ShenandoahHeap::heap ()->unload_classes ()) {
1306
- SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Final Roots" , " (unload classes)" );
1356
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Final Roots" , " (unload classes)" );
1307
1357
} else {
1308
- SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Pause Final Roots" , " " );
1358
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Final Roots" , " " );
1309
1359
}
1310
1360
}
1311
1361
@@ -1332,3 +1382,11 @@ const char* ShenandoahConcurrentGC::conc_cleanup_event_message() const {
1332
1382
SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent cleanup" , " " );
1333
1383
}
1334
1384
}
1385
+
1386
+ const char * ShenandoahConcurrentGC::conc_init_update_refs_event_message () const {
1387
+ if (ShenandoahHeap::heap ()->unload_classes ()) {
1388
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Init Update Refs" , " (unload classes)" );
1389
+ } else {
1390
+ SHENANDOAH_RETURN_EVENT_MESSAGE (_generation->type (), " Concurrent Init Update Refs" , " " );
1391
+ }
1392
+ }
0 commit comments