8239926: Shenandoah: Shenandoah needs to mark nmethod's metadata

Reviewed-by: rkennke, shade
This commit is contained in:
Zhengyu Gu 2020-02-25 12:01:35 -05:00
parent 8b73900222
commit b4ff6abe63
9 changed files with 169 additions and 44 deletions

View File

@ -171,11 +171,54 @@ void ShenandoahCodeRoots::arm_nmethods() {
}
}
class ShenandoahDisarmNMethodClosure : public NMethodClosure {
private:
BarrierSetNMethod* const _bs;
public:
ShenandoahDisarmNMethodClosure() :
_bs(BarrierSet::barrier_set()->barrier_set_nmethod()) {
}
virtual void do_nmethod(nmethod* nm) {
_bs->disarm(nm);
}
};
class ShenandoahDisarmNMethodsTask : public AbstractGangTask {
private:
ShenandoahDisarmNMethodClosure _cl;
ShenandoahConcurrentNMethodIterator _iterator;
public:
ShenandoahDisarmNMethodsTask() :
AbstractGangTask("ShenandoahDisarmNMethodsTask"),
_iterator(ShenandoahCodeRoots::table()) {
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
_iterator.nmethods_do_begin();
}
~ShenandoahDisarmNMethodsTask() {
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
_iterator.nmethods_do_end();
}
virtual void work(uint worker_id) {
_iterator.nmethods_do(&_cl);
}
};
void ShenandoahCodeRoots::disarm_nmethods() {
ShenandoahDisarmNMethodsTask task;
ShenandoahHeap::heap()->workers()->run_task(&task);
}
class ShenandoahNMethodUnlinkClosure : public NMethodClosure {
private:
bool _unloading_occurred;
volatile bool _failed;
ShenandoahHeap* _heap;
bool _unloading_occurred;
volatile bool _failed;
ShenandoahHeap* const _heap;
BarrierSetNMethod* const _bs;
void set_failed() {
Atomic::store(&_failed, true);
@ -201,7 +244,8 @@ public:
ShenandoahNMethodUnlinkClosure(bool unloading_occurred) :
_unloading_occurred(unloading_occurred),
_failed(false),
_heap(ShenandoahHeap::heap()) {}
_heap(ShenandoahHeap::heap()),
_bs(ShenandoahBarrierSet::barrier_set()->barrier_set_nmethod()) {}
virtual void do_nmethod(nmethod* nm) {
assert(_heap->is_concurrent_root_in_progress(), "Only this phase");
@ -225,10 +269,10 @@ public:
ShenandoahReentrantLocker locker(nm_data->lock());
// Heal oops and disarm
if (_heap->is_evacuation_in_progress()) {
if (_bs->is_armed(nm)) {
ShenandoahNMethod::heal_nmethod(nm);
_bs->disarm(nm);
}
ShenandoahNMethod::disarm_nmethod(nm);
// Clear compiled ICs and exception caches
if (!nm->unload_nmethod_caches(_unloading_occurred)) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2017, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -111,6 +111,7 @@ public:
static void unlink(WorkGang* workers, bool unloading_occurred);
static void purge(WorkGang* workers);
static void arm_nmethods();
static void disarm_nmethods();
static int disarmed_value() { return _disarmed_value; }
static int* disarmed_value_address() { return &_disarmed_value; }

View File

@ -180,19 +180,30 @@ public:
}
};
class ShenandoahSATBThreadsClosure : public ThreadClosure {
class ShenandoahSATBAndRemarkCodeRootsThreadsClosure : public ThreadClosure {
private:
ShenandoahSATBBufferClosure* _satb_cl;
MarkingCodeBlobClosure* _code_cl;
uintx _claim_token;
public:
ShenandoahSATBThreadsClosure(ShenandoahSATBBufferClosure* satb_cl) :
_satb_cl(satb_cl),
ShenandoahSATBAndRemarkCodeRootsThreadsClosure(ShenandoahSATBBufferClosure* satb_cl, MarkingCodeBlobClosure* code_cl) :
_satb_cl(satb_cl), _code_cl(code_cl),
_claim_token(Threads::thread_claim_token()) {}
void do_thread(Thread* thread) {
if (thread->claim_threads_do(true, _claim_token)) {
ShenandoahThreadLocalData::satb_mark_queue(thread).apply_closure_and_empty(_satb_cl);
if (_code_cl != NULL && thread->is_Java_thread()) {
// In theory it should not be neccessary to explicitly walk the nmethods to find roots for concurrent marking
// however the liveness of oops reachable from nmethods have very complex lifecycles:
// * Alive if on the stack of an executing method
// * Weakly reachable otherwise
// Some objects reachable from nmethods, such as the class loader (or klass_holder) of the receiver should be
// live by the SATB invariant but other oops recorded in nmethods may behave differently.
JavaThread* jt = (JavaThread*)thread;
jt->nmethods_do(_code_cl);
}
}
}
};
@ -212,6 +223,14 @@ public:
ShenandoahHeap* heap = ShenandoahHeap::heap();
ShenandoahParallelWorkerSession worker_session(worker_id);
ReferenceProcessor* rp;
if (heap->process_references()) {
rp = heap->ref_processor();
shenandoah_assert_rp_isalive_installed();
} else {
rp = NULL;
}
// First drain remaining SATB buffers.
// Notice that this is not strictly necessary for mark-compact. But since
// it requires a StrongRootsScope around the task, we need to claim the
@ -219,19 +238,27 @@ public:
// full-gc.
{
ShenandoahObjToScanQueue* q = _cm->get_queue(worker_id);
ShenandoahSATBBufferClosure cl(q);
SATBMarkQueueSet& satb_mq_set = ShenandoahBarrierSet::satb_mark_queue_set();
while (satb_mq_set.apply_closure_to_completed_buffer(&cl));
ShenandoahSATBThreadsClosure tc(&cl);
Threads::threads_do(&tc);
}
ReferenceProcessor* rp;
if (heap->process_references()) {
rp = heap->ref_processor();
shenandoah_assert_rp_isalive_installed();
} else {
rp = NULL;
if (heap->unload_classes() && !ShenandoahConcurrentRoots::can_do_concurrent_class_unloading()) {
if (heap->has_forwarded_objects()) {
ShenandoahMarkResolveRefsClosure resolve_mark_cl(q, rp);
MarkingCodeBlobClosure blobsCl(&resolve_mark_cl, !CodeBlobToOopClosure::FixRelocations);
ShenandoahSATBAndRemarkCodeRootsThreadsClosure tc(&cl, &blobsCl);
Threads::threads_do(&tc);
} else {
ShenandoahMarkRefsClosure mark_cl(q, rp);
MarkingCodeBlobClosure blobsCl(&mark_cl, !CodeBlobToOopClosure::FixRelocations);
ShenandoahSATBAndRemarkCodeRootsThreadsClosure tc(&cl, &blobsCl);
Threads::threads_do(&tc);
}
} else {
ShenandoahSATBAndRemarkCodeRootsThreadsClosure tc(&cl, NULL);
Threads::threads_do(&tc);
}
}
if (heap->is_degenerated_gc_in_progress()) {

View File

@ -1420,6 +1420,13 @@ void ShenandoahHeap::op_init_mark() {
if (ShenandoahPacing) {
pacer()->setup_for_mark();
}
// Arm nmethods for concurrent marking. When a nmethod is about to be executed,
// we need to make sure that all its metadata are marked. alternative is to remark
// thread roots at final mark pause, but it can be potential latency killer.
if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
ShenandoahCodeRoots::arm_nmethods();
}
}
void ShenandoahHeap::op_mark() {
@ -1879,6 +1886,13 @@ void ShenandoahHeap::op_degenerated(ShenandoahDegenPoint point) {
return;
}
if (!has_forwarded_objects() && ShenandoahConcurrentRoots::can_do_concurrent_class_unloading()) {
// Disarm nmethods that armed for concurrent mark. On normal cycle, it would
// be disarmed while conc-roots phase is running.
// TODO: Call op_conc_roots() here instead
ShenandoahCodeRoots::disarm_nmethods();
}
op_cleanup();
case _degenerated_evac:

View File

@ -175,15 +175,54 @@ ShenandoahNMethod* ShenandoahNMethod::for_nmethod(nmethod* nm) {
return new ShenandoahNMethod(nm, oops, non_immediate_oops);
}
template <bool HAS_FWD>
class ShenandoahKeepNMethodMetadataAliveClosure : public OopClosure {
private:
ShenandoahBarrierSet* const _bs;
public:
ShenandoahKeepNMethodMetadataAliveClosure() :
_bs(static_cast<ShenandoahBarrierSet*>(BarrierSet::barrier_set())) {
}
virtual void do_oop(oop* p) {
oop obj = RawAccess<>::oop_load(p);
if (!CompressedOops::is_null(obj)) {
if (HAS_FWD) {
obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
}
_bs->enqueue(obj);
}
}
virtual void do_oop(narrowOop* p) {
ShouldNotReachHere();
}
};
void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
assert(ShenandoahHeap::heap()->is_concurrent_root_in_progress(), "Only this phase");
ShenandoahNMethod* data = gc_data(nm);
assert(data != NULL, "Sanity");
assert(data->lock()->owned_by_self(), "Must hold the lock");
ShenandoahEvacOOMScope evac_scope;
ShenandoahEvacuateUpdateRootsClosure<> cl;
data->oops_do(&cl, true /*fix relocation*/);
ShenandoahHeap* const heap = ShenandoahHeap::heap();
if (heap->is_concurrent_mark_in_progress()) {
if (heap->has_forwarded_objects()) {
ShenandoahKeepNMethodMetadataAliveClosure<true> cl;
data->oops_do(&cl);
} else {
ShenandoahKeepNMethodMetadataAliveClosure<false> cl;
data->oops_do(&cl);
}
} else if (heap->is_concurrent_root_in_progress()) {
ShenandoahEvacOOMScope evac_scope;
ShenandoahEvacuateUpdateRootsClosure<> cl;
data->oops_do(&cl, true /*fix relocation*/);
} else {
// There is possibility that GC is cancelled when it arrives final mark.
// In this case, concurrent root phase is skipped and degenerated GC should be
// followed, where nmethods are disarmed.
assert(heap->cancelled_gc(), "What else?");
}
}
#ifdef ASSERT

View File

@ -181,12 +181,12 @@ ShenandoahRootProcessor::ShenandoahRootProcessor(ShenandoahPhaseTimings::Phase p
ShenandoahRootEvacuator::ShenandoahRootEvacuator(uint n_workers,
ShenandoahPhaseTimings::Phase phase,
bool include_concurrent_roots,
bool include_concurrent_code_roots) :
bool stw_roots_processing,
bool stw_class_unloading) :
ShenandoahRootProcessor(phase),
_thread_roots(n_workers > 1),
_include_concurrent_roots(include_concurrent_roots),
_include_concurrent_code_roots(include_concurrent_code_roots) {
_stw_roots_processing(stw_roots_processing),
_stw_class_unloading(stw_class_unloading) {
}
void ShenandoahRootEvacuator::roots_do(uint worker_id, OopClosure* oops) {
@ -199,15 +199,15 @@ void ShenandoahRootEvacuator::roots_do(uint worker_id, OopClosure* oops) {
_serial_roots.oops_do(oops, worker_id);
_serial_weak_roots.weak_oops_do(oops, worker_id);
if (_include_concurrent_roots) {
CLDToOopClosure clds(oops, ClassLoaderData::_claim_strong);
if (_stw_roots_processing) {
_vm_roots.oops_do<OopClosure>(oops, worker_id);
_cld_roots.cld_do(&clds, worker_id);
_weak_roots.oops_do<OopClosure>(oops, worker_id);
_dedup_roots.oops_do(&always_true, oops, worker_id);
}
if (_include_concurrent_code_roots) {
if (_stw_class_unloading) {
CLDToOopClosure clds(oops, ClassLoaderData::_claim_strong);
_cld_roots.cld_do(&clds, worker_id);
_code_roots.code_blobs_do(codes_cl, worker_id);
_thread_roots.oops_do(oops, NULL, worker_id);
} else {

View File

@ -288,11 +288,11 @@ private:
ShenandoahWeakRoots<false /*concurrent*/> _weak_roots;
ShenandoahStringDedupRoots _dedup_roots;
ShenandoahCodeCacheRoots<ShenandoahAllCodeRootsIterator> _code_roots;
bool _include_concurrent_roots;
bool _include_concurrent_code_roots;
bool _stw_roots_processing;
bool _stw_class_unloading;
public:
ShenandoahRootEvacuator(uint n_workers, ShenandoahPhaseTimings::Phase phase,
bool include_concurrent_roots, bool _include_concurrent_code_roots);
bool stw_roots_processing, bool stw_class_unloading);
void roots_do(uint worker_id, OopClosure* oops);
};

View File

@ -255,15 +255,16 @@ public:
// Step 1: Process GC roots.
// For oops in code roots, they are marked, evacuated, enqueued for further traversal,
// and the references to the oops are updated during init pause. New nmethods are handled
// in similar way during nmethod-register process. Therefore, we don't need to rescan code
// roots here.
// and the references to the oops are updated during init pause. We only need to rescan
// on stack code roots, in case of class unloading is enabled. Otherwise, code roots are
// scanned during init traversal or degenerated GC will update them at the end.
if (!_heap->is_degenerated_gc_in_progress()) {
ShenandoahTraversalRootsClosure roots_cl(q, rp);
ShenandoahTraversalSATBThreadsClosure tc(&satb_cl);
if (unload_classes) {
ShenandoahRemarkCLDClosure remark_cld_cl(&roots_cl);
_rp->strong_roots_do(worker_id, &roots_cl, &remark_cld_cl, NULL, &tc);
MarkingCodeBlobClosure code_cl(&roots_cl, CodeBlobToOopClosure::FixRelocations);
_rp->strong_roots_do(worker_id, &roots_cl, &remark_cld_cl, &code_cl, &tc);
} else {
CLDToOopClosure cld_cl(&roots_cl, ClassLoaderData::_claim_strong);
_rp->roots_do(worker_id, &roots_cl, &cld_cl, NULL, &tc);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2019, Red Hat, Inc. All rights reserved.
* Copyright (c) 2019, 2020, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -45,12 +45,12 @@
class ShenandoahIsUnloadingOopClosure : public OopClosure {
private:
ShenandoahMarkingContext* _marking_context;
bool _is_unloading;
ShenandoahMarkingContext* const _marking_context;
bool _is_unloading;
public:
ShenandoahIsUnloadingOopClosure() :
_marking_context(ShenandoahHeap::heap()->marking_context()),
_marking_context(ShenandoahHeap::heap()->complete_marking_context()),
_is_unloading(false) {
}
@ -61,7 +61,6 @@ public:
const oop o = RawAccess<>::oop_load(p);
if (!CompressedOops::is_null(o) &&
_marking_context->is_complete() &&
!_marking_context->is_marked(o)) {
_is_unloading = true;
}
@ -80,7 +79,7 @@ class ShenandoahIsUnloadingBehaviour : public IsUnloadingBehaviour {
public:
virtual bool is_unloading(CompiledMethod* method) const {
nmethod* const nm = method->as_nmethod();
guarantee(ShenandoahHeap::heap()->is_concurrent_root_in_progress(), "Only this phase");
assert(ShenandoahHeap::heap()->is_concurrent_root_in_progress(), "Only for this phase");
ShenandoahNMethod* data = ShenandoahNMethod::gc_data(nm);
ShenandoahReentrantLocker locker(data->lock());
ShenandoahIsUnloadingOopClosure cl;