8238162: Shenandoah: Remove ShenandoahTaskTerminator wrapper

Reviewed-by: shade
This commit is contained in:
Zhengyu Gu 2020-02-04 14:48:28 -05:00
parent b069da31e0
commit a88734eadc
6 changed files with 45 additions and 60 deletions

View File

@ -155,10 +155,10 @@ public:
class ShenandoahConcurrentMarkingTask : public AbstractGangTask {
private:
ShenandoahConcurrentMark* _cm;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
public:
ShenandoahConcurrentMarkingTask(ShenandoahConcurrentMark* cm, ShenandoahTaskTerminator* terminator) :
ShenandoahConcurrentMarkingTask(ShenandoahConcurrentMark* cm, TaskTerminator* terminator) :
AbstractGangTask("Root Region Scan"), _cm(cm), _terminator(terminator) {
}
@ -202,11 +202,11 @@ public:
class ShenandoahFinalMarkingTask : public AbstractGangTask {
private:
ShenandoahConcurrentMark* _cm;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
bool _dedup_string;
public:
ShenandoahFinalMarkingTask(ShenandoahConcurrentMark* cm, ShenandoahTaskTerminator* terminator, bool dedup_string) :
ShenandoahFinalMarkingTask(ShenandoahConcurrentMark* cm, TaskTerminator* terminator, bool dedup_string) :
AbstractGangTask("Shenandoah Final Marking"), _cm(cm), _terminator(terminator), _dedup_string(dedup_string) {
}
@ -405,7 +405,7 @@ void ShenandoahConcurrentMark::mark_from_roots() {
{
ShenandoahTerminationTracker term(ShenandoahPhaseTimings::conc_termination);
ShenandoahTaskTerminator terminator(nworkers, task_queues());
TaskTerminator terminator(nworkers, task_queues());
ShenandoahConcurrentMarkingTask task(this, &terminator);
workers->run_task(&task);
}
@ -440,7 +440,7 @@ void ShenandoahConcurrentMark::finish_mark_from_roots(bool full_gc) {
ShenandoahPhaseTimings::termination);
StrongRootsScope scope(nworkers);
ShenandoahTaskTerminator terminator(nworkers, task_queues());
TaskTerminator terminator(nworkers, task_queues());
ShenandoahFinalMarkingTask task(this, &terminator, ShenandoahStringDedup::is_enabled());
_heap->workers()->run_task(&task);
}
@ -460,11 +460,11 @@ void ShenandoahConcurrentMark::finish_mark_from_roots(bool full_gc) {
// Weak Reference Closures
class ShenandoahCMDrainMarkingStackClosure: public VoidClosure {
uint _worker_id;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
bool _reset_terminator;
public:
ShenandoahCMDrainMarkingStackClosure(uint worker_id, ShenandoahTaskTerminator* t, bool reset_terminator = false):
ShenandoahCMDrainMarkingStackClosure(uint worker_id, TaskTerminator* t, bool reset_terminator = false):
_worker_id(worker_id),
_terminator(t),
_reset_terminator(reset_terminator) {
@ -552,11 +552,11 @@ public:
class ShenandoahRefProcTaskProxy : public AbstractGangTask {
private:
AbstractRefProcTaskExecutor::ProcessTask& _proc_task;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
public:
ShenandoahRefProcTaskProxy(AbstractRefProcTaskExecutor::ProcessTask& proc_task,
ShenandoahTaskTerminator* t) :
TaskTerminator* t) :
AbstractGangTask("Process reference objects in parallel"),
_proc_task(proc_task),
_terminator(t) {
@ -600,7 +600,7 @@ public:
/* do_check = */ false);
uint nworkers = _workers->active_workers();
cm->task_queues()->reserve(nworkers);
ShenandoahTaskTerminator terminator(nworkers, cm->task_queues());
TaskTerminator terminator(nworkers, cm->task_queues());
ShenandoahRefProcTaskProxy proc_task_proxy(task, &terminator);
_workers->run_task(&proc_task_proxy);
}
@ -658,7 +658,7 @@ void ShenandoahConcurrentMark::weak_refs_work_doit(bool full_gc) {
// simplifies implementation. Since RP may decide to call complete_gc several
// times, we need to be able to reuse the terminator.
uint serial_worker_id = 0;
ShenandoahTaskTerminator terminator(1, task_queues());
TaskTerminator terminator(1, task_queues());
ShenandoahCMDrainMarkingStackClosure complete_gc(serial_worker_id, &terminator, /* reset_terminator = */ true);
ShenandoahRefProcTaskExecutor executor(workers);
@ -703,7 +703,7 @@ public:
ShenandoahHeap* sh = ShenandoahHeap::heap();
ShenandoahConcurrentMark* scm = sh->concurrent_mark();
assert(sh->process_references(), "why else would we be here?");
ShenandoahTaskTerminator terminator(1, scm->task_queues());
TaskTerminator terminator(1, scm->task_queues());
ReferenceProcessor* rp = sh->ref_processor();
shenandoah_assert_rp_isalive_installed();
@ -826,7 +826,7 @@ ShenandoahObjToScanQueue* ShenandoahConcurrentMark::get_queue(uint worker_id) {
}
template <bool CANCELLABLE>
void ShenandoahConcurrentMark::mark_loop_prework(uint w, ShenandoahTaskTerminator *t, ReferenceProcessor *rp,
void ShenandoahConcurrentMark::mark_loop_prework(uint w, TaskTerminator *t, ReferenceProcessor *rp,
bool strdedup) {
ShenandoahObjToScanQueue* q = get_queue(w);
@ -876,7 +876,7 @@ void ShenandoahConcurrentMark::mark_loop_prework(uint w, ShenandoahTaskTerminato
}
template <class T, bool CANCELLABLE>
void ShenandoahConcurrentMark::mark_loop_work(T* cl, jushort* live_data, uint worker_id, ShenandoahTaskTerminator *terminator) {
void ShenandoahConcurrentMark::mark_loop_work(T* cl, jushort* live_data, uint worker_id, TaskTerminator *terminator) {
uintx stride = ShenandoahMarkLoopStride;
ShenandoahHeap* heap = ShenandoahHeap::heap();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013, 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2013, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -26,6 +26,7 @@
#define SHARE_GC_SHENANDOAH_SHENANDOAHCONCURRENTMARK_HPP
#include "gc/shared/taskqueue.hpp"
#include "gc/shared/taskTerminator.hpp"
#include "gc/shenandoah/shenandoahOopClosures.hpp"
#include "gc/shenandoah/shenandoahPhaseTimings.hpp"
#include "gc/shenandoah/shenandoahTaskqueue.hpp"
@ -57,13 +58,13 @@ private:
inline void count_liveness(jushort* live_data, oop obj);
template <class T, bool CANCELLABLE>
void mark_loop_work(T* cl, jushort* live_data, uint worker_id, ShenandoahTaskTerminator *t);
void mark_loop_work(T* cl, jushort* live_data, uint worker_id, TaskTerminator *t);
template <bool CANCELLABLE>
void mark_loop_prework(uint worker_id, ShenandoahTaskTerminator *terminator, ReferenceProcessor *rp, bool strdedup);
void mark_loop_prework(uint worker_id, TaskTerminator *terminator, ReferenceProcessor *rp, bool strdedup);
public:
void mark_loop(uint worker_id, ShenandoahTaskTerminator* terminator, ReferenceProcessor *rp,
void mark_loop(uint worker_id, TaskTerminator* terminator, ReferenceProcessor *rp,
bool cancellable, bool strdedup) {
if (cancellable) {
mark_loop_prework<true>(worker_id, terminator, rp, strdedup);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2016, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -50,9 +50,6 @@ bool ShenandoahObjToScanQueueSet::is_empty() {
return true;
}
ShenandoahTaskTerminator::ShenandoahTaskTerminator(uint n_threads, TaskQueueSetSuper* queue_set) :
_terminator(n_threads, queue_set) { }
#if TASKQUEUE_STATS
void ShenandoahObjToScanQueueSet::print_taskqueue_stats_hdr(outputStream* const st) {
st->print_raw_cr("GC Task Stats");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2016, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -339,18 +339,4 @@ public:
virtual bool should_exit_termination() { return _heap->cancelled_gc(); }
};
class ShenandoahTaskTerminator : public StackObj {
private:
TaskTerminator _terminator;
public:
ShenandoahTaskTerminator(uint n_threads, TaskQueueSetSuper* queue_set);
bool offer_termination(ShenandoahTerminatorTerminator* terminator) {
return _terminator.offer_termination(terminator);
}
void reset_for_reuse() { _terminator.reset_for_reuse(); }
bool offer_termination() { return offer_termination((ShenandoahTerminatorTerminator*)NULL); }
};
#endif // SHARE_GC_SHENANDOAH_SHENANDOAHTASKQUEUE_HPP

View File

@ -200,10 +200,10 @@ public:
class ShenandoahConcurrentTraversalCollectionTask : public AbstractGangTask {
private:
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
ShenandoahHeap* _heap;
public:
ShenandoahConcurrentTraversalCollectionTask(ShenandoahTaskTerminator* terminator) :
ShenandoahConcurrentTraversalCollectionTask(TaskTerminator* terminator) :
AbstractGangTask("Shenandoah Concurrent Traversal Collection"),
_terminator(terminator),
_heap(ShenandoahHeap::heap()) {}
@ -221,10 +221,10 @@ public:
class ShenandoahFinalTraversalCollectionTask : public AbstractGangTask {
private:
ShenandoahAllRootScanner* _rp;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
ShenandoahHeap* _heap;
public:
ShenandoahFinalTraversalCollectionTask(ShenandoahAllRootScanner* rp, ShenandoahTaskTerminator* terminator) :
ShenandoahFinalTraversalCollectionTask(ShenandoahAllRootScanner* rp, TaskTerminator* terminator) :
AbstractGangTask("Shenandoah Final Traversal Collection"),
_rp(rp),
_terminator(terminator),
@ -428,7 +428,7 @@ void ShenandoahTraversalGC::init_traversal_collection() {
}
}
void ShenandoahTraversalGC::main_loop(uint w, ShenandoahTaskTerminator* t, bool sts_yield) {
void ShenandoahTraversalGC::main_loop(uint w, TaskTerminator* t, bool sts_yield) {
ShenandoahObjToScanQueue* q = task_queues()->queue(w);
// Initialize live data.
@ -482,7 +482,7 @@ void ShenandoahTraversalGC::main_loop(uint w, ShenandoahTaskTerminator* t, bool
}
template <class T>
void ShenandoahTraversalGC::main_loop_work(T* cl, jushort* live_data, uint worker_id, ShenandoahTaskTerminator* terminator, bool sts_yield) {
void ShenandoahTraversalGC::main_loop_work(T* cl, jushort* live_data, uint worker_id, TaskTerminator* terminator, bool sts_yield) {
ShenandoahObjToScanQueueSet* queues = task_queues();
ShenandoahObjToScanQueue* q = queues->queue(worker_id);
ShenandoahConcurrentMark* conc_mark = _heap->concurrent_mark();
@ -546,7 +546,7 @@ void ShenandoahTraversalGC::main_loop_work(T* cl, jushort* live_data, uint worke
}
}
bool ShenandoahTraversalGC::check_and_handle_cancelled_gc(ShenandoahTaskTerminator* terminator, bool sts_yield) {
bool ShenandoahTraversalGC::check_and_handle_cancelled_gc(TaskTerminator* terminator, bool sts_yield) {
if (_heap->cancelled_gc()) {
return true;
}
@ -560,7 +560,7 @@ void ShenandoahTraversalGC::concurrent_traversal_collection() {
task_queues()->reserve(nworkers);
ShenandoahTerminationTracker tracker(ShenandoahPhaseTimings::conc_traversal_termination);
ShenandoahTaskTerminator terminator(nworkers, task_queues());
TaskTerminator terminator(nworkers, task_queues());
ShenandoahConcurrentTraversalCollectionTask task(&terminator);
_heap->workers()->run_task(&task);
}
@ -585,7 +585,7 @@ void ShenandoahTraversalGC::final_traversal_collection() {
ShenandoahAllRootScanner rp(nworkers, ShenandoahPhaseTimings::final_traversal_gc_work);
ShenandoahTerminationTracker term(ShenandoahPhaseTimings::final_traversal_gc_termination);
ShenandoahTaskTerminator terminator(nworkers, task_queues());
TaskTerminator terminator(nworkers, task_queues());
ShenandoahFinalTraversalCollectionTask task(&rp, &terminator);
_heap->workers()->run_task(&task);
#if COMPILER2_OR_JVMCI
@ -776,7 +776,7 @@ public:
ShenandoahHeap* sh = ShenandoahHeap::heap();
ShenandoahTraversalGC* traversal_gc = sh->traversal_gc();
assert(sh->process_references(), "why else would we be here?");
ShenandoahTaskTerminator terminator(1, traversal_gc->task_queues());
TaskTerminator terminator(1, traversal_gc->task_queues());
shenandoah_assert_rp_isalive_installed();
traversal_gc->main_loop((uint) 0, &terminator, true);
}
@ -943,11 +943,11 @@ void ShenandoahTraversalGC::preclean_weak_refs() {
// Weak Reference Closures
class ShenandoahTraversalDrainMarkingStackClosure: public VoidClosure {
uint _worker_id;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
bool _reset_terminator;
public:
ShenandoahTraversalDrainMarkingStackClosure(uint worker_id, ShenandoahTaskTerminator* t, bool reset_terminator = false):
ShenandoahTraversalDrainMarkingStackClosure(uint worker_id, TaskTerminator* t, bool reset_terminator = false):
_worker_id(worker_id),
_terminator(t),
_reset_terminator(reset_terminator) {
@ -971,11 +971,11 @@ public:
class ShenandoahTraversalSingleThreadedDrainMarkingStackClosure: public VoidClosure {
uint _worker_id;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
bool _reset_terminator;
public:
ShenandoahTraversalSingleThreadedDrainMarkingStackClosure(uint worker_id, ShenandoahTaskTerminator* t, bool reset_terminator = false):
ShenandoahTraversalSingleThreadedDrainMarkingStackClosure(uint worker_id, TaskTerminator* t, bool reset_terminator = false):
_worker_id(worker_id),
_terminator(t),
_reset_terminator(reset_terminator) {
@ -1019,11 +1019,11 @@ void ShenandoahTraversalGC::weak_refs_work() {
class ShenandoahTraversalRefProcTaskProxy : public AbstractGangTask {
private:
AbstractRefProcTaskExecutor::ProcessTask& _proc_task;
ShenandoahTaskTerminator* _terminator;
TaskTerminator* _terminator;
public:
ShenandoahTraversalRefProcTaskProxy(AbstractRefProcTaskExecutor::ProcessTask& proc_task,
ShenandoahTaskTerminator* t) :
TaskTerminator* t) :
AbstractGangTask("Process reference objects in parallel"),
_proc_task(proc_task),
_terminator(t) {
@ -1064,7 +1064,7 @@ public:
/* do_check = */ false);
uint nworkers = _workers->active_workers();
traversal_gc->task_queues()->reserve(nworkers);
ShenandoahTaskTerminator terminator(nworkers, traversal_gc->task_queues());
TaskTerminator terminator(nworkers, traversal_gc->task_queues());
ShenandoahTraversalRefProcTaskProxy proc_task_proxy(task, &terminator);
_workers->run_task(&proc_task_proxy);
}
@ -1092,7 +1092,7 @@ void ShenandoahTraversalGC::weak_refs_work_doit() {
// simplifies implementation. Since RP may decide to call complete_gc several
// times, we need to be able to reuse the terminator.
uint serial_worker_id = 0;
ShenandoahTaskTerminator terminator(1, task_queues());
TaskTerminator terminator(1, task_queues());
ShenandoahTraversalSingleThreadedDrainMarkingStackClosure complete_gc(serial_worker_id, &terminator, /* reset_terminator = */ true);
ShenandoahPushWorkerQueuesScope scope(workers, task_queues(), 1, /* do_check = */ false);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2018, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -26,6 +26,7 @@
#define SHARE_GC_SHENANDOAH_SHENANDOAHTRAVERSALGC_HPP
#include "memory/allocation.hpp"
#include "gc/shared/taskTerminator.hpp"
#include "gc/shenandoah/shenandoahHeap.hpp"
#include "gc/shenandoah/shenandoahHeapRegionSet.hpp"
#include "gc/shenandoah/shenandoahTaskqueue.hpp"
@ -52,17 +53,17 @@ public:
template <class T, bool STRING_DEDUP, bool DEGEN, bool ATOMIC_UPDATE>
inline void process_oop(T* p, Thread* thread, ShenandoahObjToScanQueue* queue, ShenandoahMarkingContext* const mark_context);
bool check_and_handle_cancelled_gc(ShenandoahTaskTerminator* terminator, bool sts_yield);
bool check_and_handle_cancelled_gc(TaskTerminator* terminator, bool sts_yield);
ShenandoahObjToScanQueueSet* task_queues();
void main_loop(uint worker_id, ShenandoahTaskTerminator* terminator, bool sts_yield);
void main_loop(uint worker_id, TaskTerminator* terminator, bool sts_yield);
private:
void prepare_regions();
template <class T>
void main_loop_work(T* cl, jushort* live_data, uint worker_id, ShenandoahTaskTerminator* terminator, bool sts_yield);
void main_loop_work(T* cl, jushort* live_data, uint worker_id, TaskTerminator* terminator, bool sts_yield);
void preclean_weak_refs();
void weak_refs_work();