8310194: Generational ZGC: Lock-order asserts in JVMTI IterateThroughHeap

Reviewed-by: eosterlund, aboldtch
This commit is contained in:
Stefan Karlsson 2023-06-20 13:46:49 +00:00
parent e1906e7641
commit 4e4e586dac
5 changed files with 166 additions and 100 deletions

View File

@ -293,19 +293,19 @@ bool ZHeap::is_allocating(zaddress addr) const {
void ZHeap::object_iterate(ObjectClosure* object_cl, bool visit_weaks) {
assert(SafepointSynchronize::is_at_safepoint(), "Should be at safepoint");
ZHeapIterator iter(1 /* nworkers */, visit_weaks);
ZHeapIterator iter(1 /* nworkers */, visit_weaks, false /* for_verify */);
iter.object_iterate(object_cl, 0 /* worker_id */);
}
void ZHeap::object_and_field_iterate(ObjectClosure* object_cl, OopFieldClosure* field_cl, bool visit_weaks) {
void ZHeap::object_and_field_iterate_for_verify(ObjectClosure* object_cl, OopFieldClosure* field_cl, bool visit_weaks) {
assert(SafepointSynchronize::is_at_safepoint(), "Should be at safepoint");
ZHeapIterator iter(1 /* nworkers */, visit_weaks);
ZHeapIterator iter(1 /* nworkers */, visit_weaks, true /* for_verify */);
iter.object_and_field_iterate(object_cl, field_cl, 0 /* worker_id */);
}
ParallelObjectIteratorImpl* ZHeap::parallel_object_iterator(uint nworkers, bool visit_weaks) {
assert(SafepointSynchronize::is_at_safepoint(), "Should be at safepoint");
return new ZHeapIterator(nworkers, visit_weaks);
return new ZHeapIterator(nworkers, visit_weaks, false /* for_verify */);
}
void ZHeap::serviceability_initialize() {

View File

@ -118,7 +118,7 @@ public:
// Iteration
void object_iterate(ObjectClosure* object_cl, bool visit_weaks);
void object_and_field_iterate(ObjectClosure* object_cl, OopFieldClosure* field_cl, bool visit_weaks);
void object_and_field_iterate_for_verify(ObjectClosure* object_cl, OopFieldClosure* field_cl, bool visit_weaks);
ParallelObjectIteratorImpl* parallel_object_iterator(uint nworkers, bool visit_weaks);
void threads_do(ThreadClosure* tc) const;

View File

@ -55,21 +55,27 @@ public:
class ZHeapIteratorContext {
private:
ZHeapIterator* const _iter;
ZHeapIteratorQueue* const _queue;
ZHeapIteratorArrayQueue* const _array_queue;
const uint _worker_id;
ObjectClosure* _object_cl;
OopFieldClosure* _field_cl;
ObjectClosure* const _object_cl;
OopFieldClosure* const _field_cl;
const uint _worker_id;
ZHeapIteratorQueue* const _queue;
ZHeapIteratorArrayChunkQueue* const _array_chunk_queue;
public:
ZHeapIteratorContext(ZHeapIterator* iter, ObjectClosure* object_cl, OopFieldClosure* field_cl, uint worker_id)
: _iter(iter),
_queue(_iter->_queues.queue(worker_id)),
_array_queue(_iter->_array_queues.queue(worker_id)),
ZHeapIteratorContext(ObjectClosure* object_cl,
OopFieldClosure* field_cl,
uint worker_id,
ZHeapIteratorQueue* queue,
ZHeapIteratorArrayChunkQueue* array_chunk_queue)
: _object_cl(object_cl),
_field_cl(field_cl),
_worker_id(worker_id),
_object_cl(object_cl),
_field_cl(field_cl) {}
_queue(queue),
_array_chunk_queue(array_chunk_queue) {}
uint worker_id() const {
return _worker_id;
}
void visit_field(oop base, oop* p) const {
if (_field_cl != nullptr) {
@ -81,41 +87,31 @@ public:
_object_cl->do_object(obj);
}
void mark_and_push(oop obj) const {
if (_iter->mark_object(obj)) {
visit_object(obj);
_queue->push(obj);
}
void push(oop obj) const {
_queue->push(obj);
}
void push_array(const ObjArrayTask& array) const {
_array_queue->push(array);
void push_array_chunk(const ObjArrayTask& array_chunk) const {
_array_chunk_queue->push(array_chunk);
}
bool pop(oop& obj) const {
return _queue->pop_overflow(obj) || _queue->pop_local(obj);
}
bool pop_array(ObjArrayTask& array) const {
return _array_queue->pop_overflow(array) || _array_queue->pop_local(array);
}
bool steal(oop& obj) const {
return _iter->_queues.steal(_worker_id, obj);
}
bool steal_array(ObjArrayTask& array) const {
return _iter->_array_queues.steal(_worker_id, array);
bool pop_array_chunk(ObjArrayTask& array_chunk) const {
return _array_chunk_queue->pop_overflow(array_chunk) || _array_chunk_queue->pop_local(array_chunk);
}
bool is_drained() const {
return _queue->is_empty() && _array_queue->is_empty();
return _queue->is_empty() && _array_chunk_queue->is_empty();
}
};
template <bool Weak>
class ZHeapIteratorColoredRootOopClosure : public OopClosure {
private:
ZHeapIterator* const _iter;
const ZHeapIteratorContext& _context;
oop load_oop(oop* p) {
@ -127,13 +123,15 @@ private:
}
public:
ZHeapIteratorColoredRootOopClosure(const ZHeapIteratorContext& context)
: _context(context) {}
ZHeapIteratorColoredRootOopClosure(ZHeapIterator* iter,
const ZHeapIteratorContext& context)
: _iter(iter),
_context(context) {}
virtual void do_oop(oop* p) {
_context.visit_field(nullptr, p);
const oop obj = load_oop(p);
_context.mark_and_push(obj);
_iter->mark_visit_and_push(_context, obj);
}
virtual void do_oop(narrowOop* p) {
@ -143,6 +141,7 @@ public:
class ZHeapIteratorUncoloredRootOopClosure : public OopClosure {
private:
ZHeapIterator* const _iter;
const ZHeapIteratorContext& _context;
oop load_oop(oop* p) {
@ -152,13 +151,41 @@ private:
}
public:
ZHeapIteratorUncoloredRootOopClosure(const ZHeapIteratorContext& context)
: _context(context) {}
ZHeapIteratorUncoloredRootOopClosure(ZHeapIterator* iter,
const ZHeapIteratorContext& context)
: _iter(iter),
_context(context) {}
virtual void do_oop(oop* p) {
_context.visit_field(nullptr, p);
const oop obj = load_oop(p);
_context.mark_and_push(obj);
_iter->mark_visit_and_push(_context, obj);
}
virtual void do_oop(narrowOop* p) {
ShouldNotReachHere();
}
};
class ZHeapIteratorCLDOopClosure : public OopClosure {
private:
ZHeapIterator* const _iter;
const ZHeapIteratorContext& _context;
oop load_oop(oop* p) {
assert(!ZCollectedHeap::heap()->is_in(p), "Should not be in heap");
return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
}
public:
ZHeapIteratorCLDOopClosure(ZHeapIterator* iter,
const ZHeapIteratorContext& context)
: _iter(iter),
_context(context) {}
virtual void do_oop(oop* p) {
const oop obj = load_oop(p);
_iter->mark_visit_and_push(_context, obj);
}
virtual void do_oop(narrowOop* p) {
@ -169,6 +196,7 @@ public:
template <bool VisitReferents>
class ZHeapIteratorOopClosure : public OopIterateClosure {
private:
ZHeapIterator* const _iter;
const ZHeapIteratorContext& _context;
const oop _base;
@ -183,8 +211,11 @@ private:
}
public:
ZHeapIteratorOopClosure(const ZHeapIteratorContext& context, oop base)
ZHeapIteratorOopClosure(ZHeapIterator* iter,
const ZHeapIteratorContext& context,
oop base)
: OopIterateClosure(),
_iter(iter),
_context(context),
_base(base) {}
@ -195,7 +226,7 @@ public:
virtual void do_oop(oop* p) {
_context.visit_field(_base, p);
const oop obj = load_oop(p);
_context.mark_and_push(obj);
_iter->mark_visit_and_push(_context, obj);
}
virtual void do_oop(narrowOop* p) {
@ -212,26 +243,7 @@ public:
}
virtual void do_cld(ClassLoaderData* cld) {
class NativeAccessClosure : public OopClosure {
private:
const ZHeapIteratorContext& _context;
public:
explicit NativeAccessClosure(const ZHeapIteratorContext& context)
: _context(context) {}
virtual void do_oop(oop* p) {
assert(!ZCollectedHeap::heap()->is_in(p), "Should not be in heap");
const oop obj = NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
_context.mark_and_push(obj);
}
virtual void do_oop(narrowOop* p) {
ShouldNotReachHere();
}
};
NativeAccessClosure cl(_context);
ZHeapIteratorCLDOopClosure cl(_iter, _context);
cld->oops_do(&cl, ClassLoaderData::_claim_other);
}
@ -240,12 +252,15 @@ public:
virtual void do_method(Method* m) {}
};
ZHeapIterator::ZHeapIterator(uint nworkers, bool visit_weaks)
ZHeapIterator::ZHeapIterator(uint nworkers,
bool visit_weaks,
bool for_verify)
: _visit_weaks(visit_weaks),
_for_verify(for_verify),
_bitmaps(ZAddressOffsetMax),
_bitmaps_lock(),
_queues(nworkers),
_array_queues(nworkers),
_array_chunk_queues(nworkers),
_roots_colored(ZGenerationIdOptional::none),
_roots_uncolored(ZGenerationIdOptional::none),
_roots_weak_colored(ZGenerationIdOptional::none),
@ -257,10 +272,10 @@ ZHeapIterator::ZHeapIterator(uint nworkers, bool visit_weaks)
_queues.register_queue(i, queue);
}
// Create array queues
for (uint i = 0; i < _array_queues.size(); i++) {
ZHeapIteratorArrayQueue* const array_queue = new ZHeapIteratorArrayQueue();
_array_queues.register_queue(i, array_queue);
// Create array chunk queues
for (uint i = 0; i < _array_chunk_queues.size(); i++) {
ZHeapIteratorArrayChunkQueue* const array_chunk_queue = new ZHeapIteratorArrayChunkQueue();
_array_chunk_queues.register_queue(i, array_chunk_queue);
}
}
@ -271,9 +286,9 @@ ZHeapIterator::~ZHeapIterator() {
delete bitmap;
}
// Destroy array queues
for (uint i = 0; i < _array_queues.size(); i++) {
delete _array_queues.queue(i);
// Destroy array chunk queues
for (uint i = 0; i < _array_chunk_queues.size(); i++) {
delete _array_chunk_queues.queue(i);
}
// Destroy queues
@ -312,6 +327,19 @@ ZHeapIteratorBitMap* ZHeapIterator::object_bitmap(oop obj) {
return bitmap;
}
bool ZHeapIterator::should_visit_object_at_mark() const {
// Verify wants to visit objects as soon as they are found.
return _for_verify;
}
bool ZHeapIterator::should_visit_object_at_follow() const {
// Non-verify code needs to be careful and visit the objects
// during the follow stage, where we've completed the root
// iteration. This prevents lock-ordering problems between
// the root iterator and the visit closures.
return !_for_verify;
}
bool ZHeapIterator::mark_object(oop obj) {
if (obj == nullptr) {
return false;
@ -362,7 +390,7 @@ public:
void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
{
ZHeapIteratorColoredRootOopClosure<false /* Weak */> cl(context);
ZHeapIteratorColoredRootOopClosure<false /* Weak */> cl(this, context);
ZHeapIteratorCLDClosure cld_cl(&cl);
_roots_colored.apply(&cl,
@ -370,7 +398,7 @@ void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
}
{
ZHeapIteratorUncoloredRootOopClosure cl(context);
ZHeapIteratorUncoloredRootOopClosure cl(this, context);
ZHeapIteratorNMethodClosure nm_cl(&cl);
ZHeapIteratorThreadClosure thread_cl(&cl, &nm_cl);
_roots_uncolored.apply(&thread_cl,
@ -379,7 +407,7 @@ void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
}
void ZHeapIterator::push_weak_roots(const ZHeapIteratorContext& context) {
ZHeapIteratorColoredRootOopClosure<true /* Weak */> cl(context);
ZHeapIteratorColoredRootOopClosure<true /* Weak */> cl(this, context);
_roots_weak_colored.apply(&cl);
}
@ -391,19 +419,28 @@ void ZHeapIterator::push_roots(const ZHeapIteratorContext& context) {
}
}
void ZHeapIterator::mark_visit_and_push(const ZHeapIteratorContext& context, oop obj) {
if (mark_object(obj)) {
if (should_visit_object_at_mark()) {
context.visit_object(obj);
}
context.push(obj);
}
}
template <bool VisitReferents>
void ZHeapIterator::follow_object(const ZHeapIteratorContext& context, oop obj) {
ZHeapIteratorOopClosure<VisitReferents> cl(context, obj);
ZHeapIteratorOopClosure<VisitReferents> cl(this, context, obj);
ZIterator::oop_iterate(obj, &cl);
}
void ZHeapIterator::follow_array(const ZHeapIteratorContext& context, oop obj) {
// Follow klass
ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
ZHeapIteratorOopClosure<false /* VisitReferents */> cl(this, context, obj);
cl.do_klass(obj->klass());
// Push array chunk
context.push_array(ObjArrayTask(obj, 0 /* index */));
context.push_array_chunk(ObjArrayTask(obj, 0 /* index */));
}
void ZHeapIterator::follow_array_chunk(const ZHeapIteratorContext& context, const ObjArrayTask& array) {
@ -415,11 +452,11 @@ void ZHeapIterator::follow_array_chunk(const ZHeapIteratorContext& context, cons
// Push remaining array chunk first
if (end < length) {
context.push_array(ObjArrayTask(obj, end));
context.push_array_chunk(ObjArrayTask(obj, end));
}
// Follow array chunk
ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
ZHeapIteratorOopClosure<false /* VisitReferents */> cl(this, context, obj);
ZIterator::oop_iterate_range(obj, &cl, start, end);
}
@ -433,6 +470,15 @@ void ZHeapIterator::follow(const ZHeapIteratorContext& context, oop obj) {
}
}
template <bool VisitWeaks>
void ZHeapIterator::visit_and_follow(const ZHeapIteratorContext& context, oop obj) {
if (should_visit_object_at_follow()) {
context.visit_object(obj);
}
follow<VisitWeaks>(context, obj);
}
template <bool VisitWeaks>
void ZHeapIterator::drain(const ZHeapIteratorContext& context) {
ObjArrayTask array;
@ -440,10 +486,10 @@ void ZHeapIterator::drain(const ZHeapIteratorContext& context) {
do {
while (context.pop(obj)) {
follow<VisitWeaks>(context, obj);
visit_and_follow<VisitWeaks>(context, obj);
}
if (context.pop_array(array)) {
if (context.pop_array_chunk(array)) {
follow_array_chunk(context, array);
}
} while (!context.is_drained());
@ -454,13 +500,21 @@ void ZHeapIterator::steal(const ZHeapIteratorContext& context) {
ObjArrayTask array;
oop obj;
if (context.steal_array(array)) {
if (steal_array_chunk(context, array)) {
follow_array_chunk(context, array);
} else if (context.steal(obj)) {
follow<VisitWeaks>(context, obj);
} else if (steal(context, obj)) {
visit_and_follow<VisitWeaks>(context, obj);
}
}
bool ZHeapIterator::steal(const ZHeapIteratorContext& context, oop& obj) {
return _queues.steal(context.worker_id(), obj);
}
bool ZHeapIterator::steal_array_chunk(const ZHeapIteratorContext& context, ObjArrayTask& array) {
return _array_chunk_queues.steal(context.worker_id(), array);
}
template <bool VisitWeaks>
void ZHeapIterator::drain_and_steal(const ZHeapIteratorContext& context) {
do {
@ -476,17 +530,15 @@ void ZHeapIterator::object_iterate_inner(const ZHeapIteratorContext& context) {
}
void ZHeapIterator::object_iterate(ObjectClosure* object_cl, uint worker_id) {
const ZHeapIteratorContext context(this, object_cl, nullptr /* field_cl */, worker_id);
if (_visit_weaks) {
object_iterate_inner<true /* VisitWeaks */>(context);
} else {
object_iterate_inner<false /* VisitWeaks */>(context);
}
object_and_field_iterate(object_cl, nullptr /* field_cl */, worker_id);
}
void ZHeapIterator::object_and_field_iterate(ObjectClosure* object_cl, OopFieldClosure* field_cl, uint worker_id) {
const ZHeapIteratorContext context(this, object_cl, field_cl, worker_id);
const ZHeapIteratorContext context(object_cl,
field_cl,
worker_id,
_queues.queue(worker_id),
_array_chunk_queues.queue(worker_id));
if (_visit_weaks) {
object_iterate_inner<true /* VisitWeaks */>(context);

View File

@ -39,19 +39,22 @@ using ZHeapIteratorBitMaps = ZGranuleMap<ZHeapIteratorBitMap*>;
using ZHeapIteratorBitMapsIterator = ZGranuleMapIterator<ZHeapIteratorBitMap*, false /* Parallel */>;
using ZHeapIteratorQueue = OverflowTaskQueue<oop, mtGC>;
using ZHeapIteratorQueues = GenericTaskQueueSet<ZHeapIteratorQueue, mtGC>;
using ZHeapIteratorArrayQueue = OverflowTaskQueue<ObjArrayTask, mtGC>;
using ZHeapIteratorArrayQueues = GenericTaskQueueSet<ZHeapIteratorArrayQueue, mtGC>;
using ZHeapIteratorArrayChunkQueue = OverflowTaskQueue<ObjArrayTask, mtGC>;
using ZHeapIteratorArrayChunkQueues = GenericTaskQueueSet<ZHeapIteratorArrayChunkQueue, mtGC>;
class ZHeapIterator : public ParallelObjectIteratorImpl {
friend class ZHeapIteratorContext;
friend class ZHeapIteratorRootUncoloredOopClosure;
friend class ZHeapIteratorCLDOopClosure;
template <bool Weak> friend class ZHeapIteratorColoredRootOopClosure;
template <bool VisitReferents> friend class ZHeapIteratorOopClosure;
friend class ZHeapIteratorUncoloredRootOopClosure;
private:
const bool _visit_weaks;
const bool _for_verify;
ZHeapIteratorBitMaps _bitmaps;
ZLock _bitmaps_lock;
ZHeapIteratorQueues _queues;
ZHeapIteratorArrayQueues _array_queues;
ZHeapIteratorArrayChunkQueues _array_chunk_queues;
ZRootsIteratorStrongColored _roots_colored;
ZRootsIteratorStrongUncolored _roots_uncolored;
ZRootsIteratorWeakColored _roots_weak_colored;
@ -59,6 +62,9 @@ private:
ZHeapIteratorBitMap* object_bitmap(oop obj);
bool should_visit_object_at_mark() const;
bool should_visit_object_at_follow() const;
bool mark_object(oop obj);
void push_strong_roots(const ZHeapIteratorContext& context);
@ -67,6 +73,8 @@ private:
template <bool VisitWeaks>
void push_roots(const ZHeapIteratorContext& context);
void mark_visit_and_push(const ZHeapIteratorContext& context, oop obj);
template <bool VisitReferents>
void follow_object(const ZHeapIteratorContext& context, oop obj);
@ -76,12 +84,18 @@ private:
template <bool VisitWeaks>
void follow(const ZHeapIteratorContext& context, oop obj);
template <bool VisitWeaks>
void visit_and_follow(const ZHeapIteratorContext& context, oop obj);
template <bool VisitWeaks>
void drain(const ZHeapIteratorContext& context);
template <bool VisitWeaks>
void steal(const ZHeapIteratorContext& context);
bool steal(const ZHeapIteratorContext& context, oop& obj);
bool steal_array_chunk(const ZHeapIteratorContext& context, ObjArrayTask& array);
template <bool VisitWeaks>
void drain_and_steal(const ZHeapIteratorContext& context);
@ -89,7 +103,7 @@ private:
void object_iterate_inner(const ZHeapIteratorContext& context);
public:
ZHeapIterator(uint nworkers, bool visit_weaks);
ZHeapIterator(uint nworkers, bool visit_weaks, bool for_verify);
virtual ~ZHeapIterator();
virtual void object_iterate(ObjectClosure* object_cl, uint worker_id);

View File

@ -426,7 +426,7 @@ void ZVerify::objects(bool verify_weaks) {
threads_start_processing();
ZVerifyObjectClosure object_cl(verify_weaks);
ZHeap::heap()->object_and_field_iterate(&object_cl, &object_cl, verify_weaks);
ZHeap::heap()->object_and_field_iterate_for_verify(&object_cl, &object_cl, verify_weaks);
}
void ZVerify::before_zoperation() {