8210713: Let CollectedHeap::ensure_parsability() take care of TLAB statistics gathering
Reviewed-by: eosterlund, sjohanss
This commit is contained in:
parent
aafb2b0474
commit
a8703f9803
@ -2484,7 +2484,6 @@ void G1CollectedHeap::gc_prologue(bool full) {
|
||||
|
||||
// Fill TLAB's and such
|
||||
double start = os::elapsedTime();
|
||||
accumulate_statistics_all_tlabs();
|
||||
ensure_parsability(true);
|
||||
g1_policy()->phase_times()->record_prepare_tlab_time_ms((os::elapsedTime() - start) * 1000.0);
|
||||
}
|
||||
|
@ -488,10 +488,6 @@ HeapWord* ParallelScavengeHeap::allocate_new_tlab(size_t min_size, size_t reques
|
||||
return result;
|
||||
}
|
||||
|
||||
void ParallelScavengeHeap::accumulate_statistics_all_tlabs() {
|
||||
CollectedHeap::accumulate_statistics_all_tlabs();
|
||||
}
|
||||
|
||||
void ParallelScavengeHeap::resize_all_tlabs() {
|
||||
CollectedHeap::resize_all_tlabs();
|
||||
}
|
||||
|
@ -206,7 +206,6 @@ class ParallelScavengeHeap : public CollectedHeap {
|
||||
HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
|
||||
|
||||
void ensure_parsability(bool retire_tlabs);
|
||||
void accumulate_statistics_all_tlabs();
|
||||
void resize_all_tlabs();
|
||||
|
||||
bool supports_tlab_allocation() const { return true; }
|
||||
|
@ -150,7 +150,6 @@ bool PSMarkSweep::invoke_no_policy(bool clear_all_softrefs) {
|
||||
heap->trace_heap_before_gc(_gc_tracer);
|
||||
|
||||
// Fill in TLABs
|
||||
heap->accumulate_statistics_all_tlabs();
|
||||
heap->ensure_parsability(true); // retire TLABs
|
||||
|
||||
if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
|
||||
|
@ -972,7 +972,6 @@ void PSParallelCompact::pre_compact()
|
||||
heap->trace_heap_before_gc(&_gc_tracer);
|
||||
|
||||
// Fill in TLABs
|
||||
heap->accumulate_statistics_all_tlabs();
|
||||
heap->ensure_parsability(true); // retire TLABs
|
||||
|
||||
if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
|
||||
|
@ -279,7 +279,6 @@ bool PSScavenge::invoke_no_policy() {
|
||||
assert(!AlwaysTenure || _tenuring_threshold == 0, "Sanity");
|
||||
|
||||
// Fill in TLABs
|
||||
heap->accumulate_statistics_all_tlabs();
|
||||
heap->ensure_parsability(true); // retire TLABs
|
||||
|
||||
if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
|
||||
|
@ -476,35 +476,31 @@ void CollectedHeap::ensure_parsability(bool retire_tlabs) {
|
||||
// started allocating (nothing much to verify) or we have
|
||||
// started allocating but are now a full-fledged JavaThread
|
||||
// (and have thus made our TLAB's) available for filling.
|
||||
assert(SafepointSynchronize::is_at_safepoint() ||
|
||||
!is_init_completed(),
|
||||
assert(SafepointSynchronize::is_at_safepoint() || !is_init_completed(),
|
||||
"Should only be called at a safepoint or at start-up"
|
||||
" otherwise concurrent mutator activity may make heap "
|
||||
" unparsable again");
|
||||
const bool use_tlab = UseTLAB;
|
||||
|
||||
if (UseTLAB && retire_tlabs) {
|
||||
// Accumulate statistics before retiring
|
||||
ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
|
||||
}
|
||||
|
||||
// The main thread starts allocating via a TLAB even before it
|
||||
// has added itself to the threads list at vm boot-up.
|
||||
JavaThreadIteratorWithHandle jtiwh;
|
||||
assert(!use_tlab || jtiwh.length() > 0,
|
||||
assert(jtiwh.length() > 0,
|
||||
"Attempt to fill tlabs before main thread has been added"
|
||||
" to threads list is doomed to failure!");
|
||||
BarrierSet *bs = BarrierSet::barrier_set();
|
||||
for (; JavaThread *thread = jtiwh.next(); ) {
|
||||
if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
|
||||
if (UseTLAB) {
|
||||
thread->tlab().make_parsable(retire_tlabs);
|
||||
}
|
||||
bs->make_parsable(thread);
|
||||
}
|
||||
}
|
||||
|
||||
void CollectedHeap::accumulate_statistics_all_tlabs() {
|
||||
if (UseTLAB) {
|
||||
assert(SafepointSynchronize::is_at_safepoint() ||
|
||||
!is_init_completed(),
|
||||
"should only accumulate statistics on tlabs at safepoint");
|
||||
|
||||
ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
|
||||
}
|
||||
}
|
||||
|
||||
void CollectedHeap::resize_all_tlabs() {
|
||||
assert(SafepointSynchronize::is_at_safepoint() || !is_init_completed(),
|
||||
"Should only resize tlabs at safepoint");
|
||||
|
@ -137,9 +137,6 @@ class CollectedHeap : public CHeapObj<mtInternal> {
|
||||
size_t requested_size,
|
||||
size_t* actual_size);
|
||||
|
||||
// Accumulate statistics on all tlabs.
|
||||
virtual void accumulate_statistics_all_tlabs();
|
||||
|
||||
// Reinitialize tlabs before resuming mutators.
|
||||
virtual void resize_all_tlabs();
|
||||
|
||||
|
@ -1293,7 +1293,6 @@ void GenCollectedHeap::gc_prologue(bool full) {
|
||||
assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
|
||||
|
||||
// Fill TLAB's and such
|
||||
CollectedHeap::accumulate_statistics_all_tlabs();
|
||||
ensure_parsability(true); // retire TLABs
|
||||
|
||||
// Walk generations
|
||||
|
@ -57,7 +57,6 @@ public:
|
||||
static ZCollectedHeap* heap();
|
||||
|
||||
using CollectedHeap::ensure_parsability;
|
||||
using CollectedHeap::accumulate_statistics_all_tlabs;
|
||||
using CollectedHeap::resize_all_tlabs;
|
||||
|
||||
ZCollectedHeap(ZCollectorPolicy* policy);
|
||||
|
@ -300,7 +300,6 @@ void ZObjectAllocator::retire_tlabs() {
|
||||
// Retire TLABs
|
||||
if (UseTLAB) {
|
||||
ZCollectedHeap* heap = ZCollectedHeap::heap();
|
||||
heap->accumulate_statistics_all_tlabs();
|
||||
heap->ensure_parsability(true /* retire_tlabs */);
|
||||
heap->resize_all_tlabs();
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user