8242101: Shenandoah: coalesce and parallelise heap region walks during the pauses

Reviewed-by: rkennke
This commit is contained in:
Aleksey Shipilev 2020-04-03 16:13:01 +02:00
parent 746d28d110
commit fe2a82031f
3 changed files with 76 additions and 40 deletions

View File

@ -1351,11 +1351,11 @@ void ShenandoahHeap::parallel_heap_region_iterate(ShenandoahHeapRegionClosure* b
}
}
class ShenandoahClearLivenessClosure : public ShenandoahHeapRegionClosure {
class ShenandoahInitMarkUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {
private:
ShenandoahMarkingContext* const _ctx;
public:
ShenandoahClearLivenessClosure() : _ctx(ShenandoahHeap::heap()->marking_context()) {}
ShenandoahInitMarkUpdateRegionStateClosure() : _ctx(ShenandoahHeap::heap()->marking_context()) {}
void heap_region_do(ShenandoahHeapRegion* r) {
if (r->is_active()) {
@ -1395,9 +1395,9 @@ void ShenandoahHeap::op_init_mark() {
}
{
ShenandoahGCPhase phase(ShenandoahPhaseTimings::clear_liveness);
ShenandoahClearLivenessClosure clc;
parallel_heap_region_iterate(&clc);
ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);
ShenandoahInitMarkUpdateRegionStateClosure cl;
parallel_heap_region_iterate(&cl);
}
// Make above changes visible to worker threads
@ -1426,19 +1426,43 @@ void ShenandoahHeap::op_mark() {
concurrent_mark()->mark_from_roots();
}
class ShenandoahCompleteLivenessClosure : public ShenandoahHeapRegionClosure {
class ShenandoahFinalMarkUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {
private:
ShenandoahMarkingContext* const _ctx;
ShenandoahHeapLock* const _lock;
public:
ShenandoahCompleteLivenessClosure() : _ctx(ShenandoahHeap::heap()->complete_marking_context()) {}
ShenandoahFinalMarkUpdateRegionStateClosure() :
_ctx(ShenandoahHeap::heap()->complete_marking_context()), _lock(ShenandoahHeap::heap()->lock()) {}
void heap_region_do(ShenandoahHeapRegion* r) {
if (r->is_active()) {
// All allocations past TAMS are implicitly live, adjust the region data.
// Bitmaps/TAMS are swapped at this point, so we need to poll complete bitmap.
HeapWord *tams = _ctx->top_at_mark_start(r);
HeapWord *top = r->top();
if (top > tams) {
r->increase_live_data_alloc_words(pointer_delta(top, tams));
}
// We are about to select the collection set, make sure it knows about
// current pinning status. Also, this allows trashing more regions that
// now have their pinning status dropped.
if (r->is_pinned()) {
if (r->pin_count() == 0) {
ShenandoahHeapLocker locker(_lock);
r->make_unpinned();
}
} else {
if (r->pin_count() > 0) {
ShenandoahHeapLocker locker(_lock);
r->make_pinned();
}
}
// Remember limit for updating refs. It's guaranteed that we get no
// from-space-refs written from here on.
r->set_update_watermark(r->top());
} else {
assert(!r->has_live(), "Region " SIZE_FORMAT " should have no live data", r->index());
assert(_ctx->top_at_mark_start(r) == r->top(),
@ -1469,12 +1493,13 @@ void ShenandoahHeap::op_final_mark() {
if (ShenandoahVerify) {
verifier()->verify_roots_no_forwarded();
}
// All allocations past TAMS are implicitly live, adjust the region data.
// Bitmaps/TAMS are swapped at this point, so we need to poll complete bitmap.
{
ShenandoahGCPhase phase(ShenandoahPhaseTimings::complete_liveness);
ShenandoahCompleteLivenessClosure cl;
ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_region_states);
ShenandoahFinalMarkUpdateRegionStateClosure cl;
parallel_heap_region_iterate(&cl);
assert_pinned_region_status();
}
// Force the threads to reacquire their TLABs outside the collection set.
@ -1483,14 +1508,6 @@ void ShenandoahHeap::op_final_mark() {
make_parsable(true);
}
// We are about to select the collection set, make sure it knows about
// current pinning status. Also, this allows trashing more regions that
// now have their pinning status dropped.
{
ShenandoahGCPhase phase(ShenandoahPhaseTimings::sync_pinned);
sync_pinned_region_status();
}
{
ShenandoahGCPhase phase(ShenandoahPhaseTimings::choose_cset);
ShenandoahHeapLocker locker(lock());
@ -1518,13 +1535,6 @@ void ShenandoahHeap::op_final_mark() {
verifier()->verify_before_evacuation();
}
// Remember limit for updating refs. It's guaranteed that we get no from-space-refs written
// from here on.
for (uint i = 0; i < num_regions(); i++) {
ShenandoahHeapRegion* r = get_region(i);
r->set_update_watermark(r->top());
}
set_evacuation_in_progress(true);
// From here on, we need to update references.
set_has_forwarded_objects(true);
@ -2397,6 +2407,35 @@ void ShenandoahHeap::op_init_updaterefs() {
}
}
class ShenandoahFinalUpdateRefsUpdateRegionStateClosure : public ShenandoahHeapRegionClosure {
private:
ShenandoahHeapLock* const _lock;
public:
ShenandoahFinalUpdateRefsUpdateRegionStateClosure() : _lock(ShenandoahHeap::heap()->lock()) {}
void heap_region_do(ShenandoahHeapRegion* r) {
// Drop unnecessary "pinned" state from regions that does not have CP marks
// anymore, as this would allow trashing them.
if (r->is_active()) {
if (r->is_pinned()) {
if (r->pin_count() == 0) {
ShenandoahHeapLocker locker(_lock);
r->make_unpinned();
}
} else {
if (r->pin_count() > 0) {
ShenandoahHeapLocker locker(_lock);
r->make_pinned();
}
}
}
}
bool is_thread_safe() { return true; }
};
void ShenandoahHeap::op_final_updaterefs() {
assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
@ -2433,11 +2472,12 @@ void ShenandoahHeap::op_final_updaterefs() {
verifier()->verify_roots_in_to_space();
}
// Drop unnecessary "pinned" state from regions that does not have CP marks
// anymore, as this would allow trashing them below.
{
ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_sync_pinned);
sync_pinned_region_status();
ShenandoahGCPhase phase(ShenandoahPhaseTimings::final_update_refs_update_region_states);
ShenandoahFinalUpdateRefsUpdateRegionStateClosure cl;
parallel_heap_region_iterate(&cl);
assert_pinned_region_status();
}
{

View File

@ -384,18 +384,15 @@ public:
size_t get_gclab_allocs() const;
HeapWord* get_update_watermark() const {
// Updates to the update-watermark only happen at safepoints or, when pushing
// back the watermark for evacuation regions, under the Shenandoah heap-lock.
// Consequently, we should access the field under the same lock. However, since
// those updates are only monotonically increasing, possibly reading a stale value
// is only conservative - we would not miss to update any fields.
// Updates to the update-watermark only happen at safepoints.
// Since those updates are only monotonically increasing, possibly reading
// a stale value is only conservative - we would not miss to update any fields.
HeapWord* watermark = _update_watermark;
assert(bottom() <= watermark && watermark <= top(), "within bounds");
return watermark;
}
void set_update_watermark(HeapWord* w) {
shenandoah_assert_heaplocked_or_safepoint();
assert(bottom() <= w && w <= top(), "within bounds");
_update_watermark = w;
}

View File

@ -63,7 +63,7 @@ class outputStream;
f(init_mark_gross, "Pause Init Mark (G)") \
f(init_mark, "Pause Init Mark (N)") \
f(make_parsable, " Make Parsable") \
f(clear_liveness, " Clear Liveness") \
f(init_update_region_states, " Update Region States") \
f(scan_roots, " Scan Roots") \
SHENANDOAH_GC_PAR_PHASE_DO(scan_, " S: ", f) \
f(resize_tlabs, " Resize TLABs") \
@ -80,9 +80,8 @@ class outputStream;
f(purge_par, " Parallel Cleanup") \
SHENANDOAH_GC_PAR_PHASE_DO(purge_par_roots, " PC: ", f) \
f(purge_cldg, " CLDG") \
f(complete_liveness, " Complete Liveness") \
f(final_update_region_states, " Update Region States") \
f(retire_tlabs, " Retire TLABs") \
f(sync_pinned, " Sync Pinned") \
f(choose_cset, " Choose Collection Set") \
f(final_rebuild_freeset, " Rebuild Free Set") \
f(init_evac, " Initial Evacuation") \
@ -98,7 +97,7 @@ class outputStream;
f(final_update_refs_finish_work, " Finish Work") \
f(final_update_refs_roots, " Update Roots") \
SHENANDOAH_GC_PAR_PHASE_DO(final_update_, " UR: ", f) \
f(final_update_refs_sync_pinned, " Sync Pinned") \
f(final_update_refs_update_region_states, " Update Region States") \
f(final_update_refs_trash_cset, " Trash Collection Set") \
f(final_update_refs_rebuild_freeset, " Rebuild Free Set") \
\