8317007: Add bulk removal of dead nmethods during class unloading
Reviewed-by: ayang, iwalulya
This commit is contained in:
parent
34351b7a79
commit
f553819502
src/hotspot/share
@ -164,7 +164,7 @@ RuntimeBlob::RuntimeBlob(
|
||||
void RuntimeBlob::free(RuntimeBlob* blob) {
|
||||
assert(blob != nullptr, "caller must check for nullptr");
|
||||
ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
|
||||
blob->purge();
|
||||
blob->purge(true /* free_code_cache_data */, true /* unregister_nmethod */);
|
||||
{
|
||||
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
CodeCache::free(blob);
|
||||
@ -173,7 +173,7 @@ void RuntimeBlob::free(RuntimeBlob* blob) {
|
||||
MemoryService::track_code_cache_memory_usage();
|
||||
}
|
||||
|
||||
void CodeBlob::purge(bool free_code_cache_data) {
|
||||
void CodeBlob::purge(bool free_code_cache_data, bool unregister_nmethod) {
|
||||
if (_oop_maps != nullptr) {
|
||||
delete _oop_maps;
|
||||
_oop_maps = nullptr;
|
||||
|
@ -143,7 +143,7 @@ public:
|
||||
static unsigned int align_code_offset(int offset);
|
||||
|
||||
// Deletion
|
||||
virtual void purge(bool free_code_cache_data = true);
|
||||
virtual void purge(bool free_code_cache_data, bool unregister_nmethod);
|
||||
|
||||
// Typing
|
||||
virtual bool is_buffer_blob() const { return false; }
|
||||
|
@ -174,7 +174,7 @@ protected:
|
||||
|
||||
void* _gc_data;
|
||||
|
||||
virtual void purge(bool free_code_cache_data = true) = 0;
|
||||
virtual void purge(bool free_code_cache_data, bool unregister_nmethod) = 0;
|
||||
|
||||
private:
|
||||
DeoptimizationStatus deoptimization_status() const {
|
||||
|
@ -1444,7 +1444,9 @@ void nmethod::unlink() {
|
||||
ClassUnloadingContext::context()->register_unlinked_nmethod(this);
|
||||
}
|
||||
|
||||
void nmethod::purge(bool free_code_cache_data) {
|
||||
void nmethod::purge(bool free_code_cache_data, bool unregister_nmethod) {
|
||||
assert(!free_code_cache_data, "must only call not freeing code cache data");
|
||||
|
||||
MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
|
||||
// completely deallocate this method
|
||||
@ -1464,13 +1466,13 @@ void nmethod::purge(bool free_code_cache_data) {
|
||||
ec = next;
|
||||
}
|
||||
|
||||
Universe::heap()->unregister_nmethod(this);
|
||||
if (unregister_nmethod) {
|
||||
Universe::heap()->unregister_nmethod(this);
|
||||
}
|
||||
|
||||
CodeCache::unregister_old_nmethod(this);
|
||||
|
||||
CodeBlob::purge();
|
||||
if (free_code_cache_data) {
|
||||
CodeCache::free(this);
|
||||
}
|
||||
CodeBlob::purge(free_code_cache_data, unregister_nmethod);
|
||||
}
|
||||
|
||||
oop nmethod::oop_at(int index) const {
|
||||
|
@ -522,7 +522,7 @@ public:
|
||||
void unlink();
|
||||
|
||||
// Deallocate this nmethod - called by the GC
|
||||
void purge(bool free_code_cache_data = true);
|
||||
void purge(bool free_code_cache_data, bool unregister_nmethod);
|
||||
|
||||
// See comment at definition of _last_seen_on_stack
|
||||
void mark_as_maybe_on_stack();
|
||||
|
@ -1788,7 +1788,7 @@ bool CompileBroker::init_compiler_runtime() {
|
||||
void CompileBroker::free_buffer_blob_if_allocated(CompilerThread* thread) {
|
||||
BufferBlob* blob = thread->get_buffer_blob();
|
||||
if (blob != nullptr) {
|
||||
blob->purge();
|
||||
blob->purge(true /* free_code_cache_data */, true /* unregister_nmethod */);
|
||||
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
CodeCache::free(blob);
|
||||
}
|
||||
|
@ -187,6 +187,15 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
// Removes dead/unlinked entries.
|
||||
void bulk_remove() {
|
||||
auto delete_check = [&] (nmethod** value) {
|
||||
return (*value)->is_unlinked();
|
||||
};
|
||||
|
||||
clean(delete_check);
|
||||
}
|
||||
|
||||
// Calculate the log2 of the table size we want to shrink to.
|
||||
size_t log2_target_shrink_size(size_t current_size) const {
|
||||
// A table with the new size should be at most filled by this factor. Otherwise
|
||||
@ -255,6 +264,11 @@ bool G1CodeRootSet::remove(nmethod* method) {
|
||||
return _table->remove(method);
|
||||
}
|
||||
|
||||
void G1CodeRootSet::bulk_remove() {
|
||||
assert(!_is_iterating, "should not mutate while iterating the table");
|
||||
_table->bulk_remove();
|
||||
}
|
||||
|
||||
bool G1CodeRootSet::contains(nmethod* method) {
|
||||
return _table->contains(method);
|
||||
}
|
||||
|
@ -44,6 +44,7 @@ class G1CodeRootSet {
|
||||
|
||||
void add(nmethod* method);
|
||||
bool remove(nmethod* method);
|
||||
void bulk_remove();
|
||||
bool contains(nmethod* method);
|
||||
void clear();
|
||||
|
||||
|
@ -2517,6 +2517,7 @@ void G1CollectedHeap::unload_classes_and_code(const char* description, BoolObjec
|
||||
GCTraceTime(Debug, gc, phases) debug(description, timer);
|
||||
|
||||
ClassUnloadingContext ctx(workers()->active_workers(),
|
||||
false /* unregister_nmethods_during_purge */,
|
||||
false /* lock_codeblob_free_separately */);
|
||||
{
|
||||
CodeCache::UnlinkingScope scope(is_alive);
|
||||
@ -2528,6 +2529,10 @@ void G1CollectedHeap::unload_classes_and_code(const char* description, BoolObjec
|
||||
GCTraceTime(Debug, gc, phases) t("Purge Unlinked NMethods", timer);
|
||||
ctx.purge_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) ur("Unregister NMethods", timer);
|
||||
G1CollectedHeap::heap()->bulk_unregister_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) t("Free Code Blobs", timer);
|
||||
ctx.free_code_blobs();
|
||||
@ -2539,6 +2544,33 @@ void G1CollectedHeap::unload_classes_and_code(const char* description, BoolObjec
|
||||
}
|
||||
}
|
||||
|
||||
class G1BulkUnregisterNMethodTask : public WorkerTask {
|
||||
HeapRegionClaimer _hrclaimer;
|
||||
|
||||
class UnregisterNMethodsHeapRegionClosure : public HeapRegionClosure {
|
||||
public:
|
||||
|
||||
bool do_heap_region(HeapRegion* hr) {
|
||||
hr->rem_set()->bulk_remove_code_roots();
|
||||
return false;
|
||||
}
|
||||
} _cl;
|
||||
|
||||
public:
|
||||
G1BulkUnregisterNMethodTask(uint num_workers)
|
||||
: WorkerTask("G1 Remove Unlinked NMethods From Code Root Set Task"),
|
||||
_hrclaimer(num_workers) { }
|
||||
|
||||
void work(uint worker_id) {
|
||||
G1CollectedHeap::heap()->heap_region_par_iterate_from_worker_offset(&_cl, &_hrclaimer, worker_id);
|
||||
}
|
||||
};
|
||||
|
||||
void G1CollectedHeap::bulk_unregister_nmethods() {
|
||||
uint num_workers = workers()->active_workers();
|
||||
G1BulkUnregisterNMethodTask t(num_workers);
|
||||
workers()->run_task(&t);
|
||||
}
|
||||
|
||||
bool G1STWSubjectToDiscoveryClosure::do_object_b(oop obj) {
|
||||
assert(obj != nullptr, "must not be null");
|
||||
@ -2963,31 +2995,6 @@ public:
|
||||
void do_oop(narrowOop* p) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
class UnregisterNMethodOopClosure: public OopClosure {
|
||||
G1CollectedHeap* _g1h;
|
||||
nmethod* _nm;
|
||||
|
||||
public:
|
||||
UnregisterNMethodOopClosure(G1CollectedHeap* g1h, nmethod* nm) :
|
||||
_g1h(g1h), _nm(nm) {}
|
||||
|
||||
void do_oop(oop* p) {
|
||||
oop heap_oop = RawAccess<>::oop_load(p);
|
||||
if (!CompressedOops::is_null(heap_oop)) {
|
||||
oop obj = CompressedOops::decode_not_null(heap_oop);
|
||||
HeapRegion* hr = _g1h->heap_region_containing(obj);
|
||||
assert(!hr->is_continues_humongous(),
|
||||
"trying to remove code root " PTR_FORMAT " in continuation of humongous region " HR_FORMAT
|
||||
" starting at " HR_FORMAT,
|
||||
p2i(_nm), HR_FORMAT_PARAMS(hr), HR_FORMAT_PARAMS(hr->humongous_start_region()));
|
||||
|
||||
hr->remove_code_root(_nm);
|
||||
}
|
||||
}
|
||||
|
||||
void do_oop(narrowOop* p) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
void G1CollectedHeap::register_nmethod(nmethod* nm) {
|
||||
guarantee(nm != nullptr, "sanity");
|
||||
RegisterNMethodOopClosure reg_cl(this, nm);
|
||||
@ -2995,9 +3002,8 @@ void G1CollectedHeap::register_nmethod(nmethod* nm) {
|
||||
}
|
||||
|
||||
void G1CollectedHeap::unregister_nmethod(nmethod* nm) {
|
||||
guarantee(nm != nullptr, "sanity");
|
||||
UnregisterNMethodOopClosure reg_cl(this, nm);
|
||||
nm->oops_do(®_cl, true);
|
||||
// We always unregister nmethods in bulk during code unloading only.
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
|
||||
void G1CollectedHeap::update_used_after_gc(bool evacuation_failed) {
|
||||
|
@ -1270,6 +1270,8 @@ public:
|
||||
|
||||
void unload_classes_and_code(const char* description, BoolObjectClosure* cl, GCTimer* timer);
|
||||
|
||||
void bulk_unregister_nmethods();
|
||||
|
||||
// Verification
|
||||
|
||||
// Perform any cleanup actions necessary before allowing a verification.
|
||||
|
@ -115,6 +115,10 @@ void HeapRegionRemSet::remove_code_root(nmethod* nm) {
|
||||
guarantee(!_code_roots.contains(nm), "duplicate entry found");
|
||||
}
|
||||
|
||||
void HeapRegionRemSet::bulk_remove_code_roots() {
|
||||
_code_roots.bulk_remove();
|
||||
}
|
||||
|
||||
void HeapRegionRemSet::code_roots_do(CodeBlobClosure* blk) const {
|
||||
_code_roots.nmethods_do(blk);
|
||||
}
|
||||
|
@ -150,6 +150,7 @@ public:
|
||||
// the heap region that owns this RSet.
|
||||
void add_code_root(nmethod* nm);
|
||||
void remove_code_root(nmethod* nm);
|
||||
void bulk_remove_code_roots();
|
||||
|
||||
// Applies blk->do_code_blob() to each of the entries in _code_roots
|
||||
void code_roots_do(CodeBlobClosure* blk) const;
|
||||
|
@ -532,6 +532,14 @@ void ParallelScavengeHeap::resize_all_tlabs() {
|
||||
CollectedHeap::resize_all_tlabs();
|
||||
}
|
||||
|
||||
void ParallelScavengeHeap::prune_scavengable_nmethods() {
|
||||
ScavengableNMethods::prune_nmethods_not_into_young();
|
||||
}
|
||||
|
||||
void ParallelScavengeHeap::prune_unlinked_nmethods() {
|
||||
ScavengableNMethods::prune_unlinked_nmethods();
|
||||
}
|
||||
|
||||
// This method is used by System.gc() and JVMTI.
|
||||
void ParallelScavengeHeap::collect(GCCause::Cause cause) {
|
||||
assert(!Heap_lock->owned_by_self(),
|
||||
@ -863,10 +871,6 @@ void ParallelScavengeHeap::verify_nmethod(nmethod* nm) {
|
||||
ScavengableNMethods::verify_nmethod(nm);
|
||||
}
|
||||
|
||||
void ParallelScavengeHeap::prune_scavengable_nmethods() {
|
||||
ScavengableNMethods::prune_nmethods();
|
||||
}
|
||||
|
||||
GrowableArray<GCMemoryManager*> ParallelScavengeHeap::memory_managers() {
|
||||
GrowableArray<GCMemoryManager*> memory_managers(2);
|
||||
memory_managers.append(_young_manager);
|
||||
|
@ -176,6 +176,7 @@ class ParallelScavengeHeap : public CollectedHeap {
|
||||
void verify_nmethod(nmethod* nm) override;
|
||||
|
||||
void prune_scavengable_nmethods();
|
||||
void prune_unlinked_nmethods();
|
||||
|
||||
size_t max_capacity() const override;
|
||||
|
||||
|
@ -1769,6 +1769,7 @@ bool PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
|
||||
ref_processor()->start_discovery(maximum_heap_compaction);
|
||||
|
||||
ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
|
||||
false /* unregister_nmethods_during_purge */,
|
||||
false /* lock_codeblob_free_separately */);
|
||||
|
||||
marking_phase(&_gc_tracer);
|
||||
@ -2078,6 +2079,10 @@ void PSParallelCompact::marking_phase(ParallelOldTracer *gc_tracer) {
|
||||
// Release unloaded nmethod's memory.
|
||||
ctx->purge_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) ur("Unregister NMethods", &_gc_timer);
|
||||
ParallelScavengeHeap::heap()->prune_unlinked_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) t("Free Code Blobs", gc_timer());
|
||||
ctx->free_code_blobs();
|
||||
|
@ -219,6 +219,10 @@ void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
|
||||
// Release unloaded nmethod's memory.
|
||||
ctx->purge_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) ur("Unregister NMethods", gc_timer());
|
||||
gch->prune_unlinked_nmethods();
|
||||
}
|
||||
{
|
||||
GCTraceTime(Debug, gc, phases) t("Free Code Blobs", gc_timer());
|
||||
ctx->free_code_blobs();
|
||||
|
@ -28,6 +28,7 @@
|
||||
#include "gc/serial/tenuredGeneration.inline.hpp"
|
||||
#include "gc/shared/gcLocker.inline.hpp"
|
||||
#include "gc/shared/genMemoryPools.hpp"
|
||||
#include "gc/shared/scavengableNMethods.hpp"
|
||||
#include "gc/shared/strongRootsScope.hpp"
|
||||
#include "gc/shared/suspendibleThreadSet.hpp"
|
||||
#include "memory/universe.hpp"
|
||||
|
@ -32,10 +32,13 @@
|
||||
|
||||
ClassUnloadingContext* ClassUnloadingContext::_context = nullptr;
|
||||
|
||||
ClassUnloadingContext::ClassUnloadingContext(uint num_workers, bool lock_codeblob_free_separately) :
|
||||
ClassUnloadingContext::ClassUnloadingContext(uint num_workers,
|
||||
bool unregister_nmethods_during_purge,
|
||||
bool lock_codeblob_free_separately) :
|
||||
_cld_head(nullptr),
|
||||
_num_nmethod_unlink_workers(num_workers),
|
||||
_unlinked_nmethods(nullptr),
|
||||
_unregister_nmethods_during_purge(unregister_nmethods_during_purge),
|
||||
_lock_codeblob_free_separately(lock_codeblob_free_separately) {
|
||||
|
||||
assert(_context == nullptr, "context already set");
|
||||
@ -113,7 +116,7 @@ void ClassUnloadingContext::purge_nmethods() {
|
||||
NMethodSet* set = _unlinked_nmethods[i];
|
||||
for (nmethod* nm : *set) {
|
||||
freed_memory += nm->size();
|
||||
nm->purge(false /* free_code_cache_data */);
|
||||
nm->purge(false /* free_code_cache_data */, _unregister_nmethods_during_purge);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,6 +42,7 @@ class ClassUnloadingContext : public CHeapObj<mtGC> {
|
||||
using NMethodSet = GrowableArrayCHeap<nmethod*, mtGC>;
|
||||
NMethodSet** _unlinked_nmethods;
|
||||
|
||||
bool _unregister_nmethods_during_purge;
|
||||
bool _lock_codeblob_free_separately;
|
||||
|
||||
public:
|
||||
@ -49,10 +50,14 @@ public:
|
||||
|
||||
// Num_nmethod_unlink_workers configures the maximum numbers of threads unlinking
|
||||
// nmethods.
|
||||
// unregister_nmethods_during_purge determines whether unloaded nmethods should
|
||||
// be unregistered from the garbage collector during purge. If not, ,the caller
|
||||
// is responsible to do that later.
|
||||
// lock_codeblob_free_separately determines whether freeing the code blobs takes
|
||||
// the CodeCache_lock during the whole operation (=false) or per code blob
|
||||
// free operation (=true).
|
||||
ClassUnloadingContext(uint num_nmethod_unlink_workers,
|
||||
bool unregister_nmethods_during_purge,
|
||||
bool lock_codeblob_free_separately);
|
||||
~ClassUnloadingContext();
|
||||
|
||||
|
@ -524,6 +524,7 @@ void GenCollectedHeap::do_collection(bool full,
|
||||
CodeCache::on_gc_marking_cycle_start();
|
||||
|
||||
ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
|
||||
false /* unregister_nmethods_during_purge */,
|
||||
false /* lock_codeblob_free_separately */);
|
||||
|
||||
collect_generation(_old_gen,
|
||||
@ -582,7 +583,11 @@ void GenCollectedHeap::verify_nmethod(nmethod* nm) {
|
||||
}
|
||||
|
||||
void GenCollectedHeap::prune_scavengable_nmethods() {
|
||||
ScavengableNMethods::prune_nmethods();
|
||||
ScavengableNMethods::prune_nmethods_not_into_young();
|
||||
}
|
||||
|
||||
void GenCollectedHeap::prune_unlinked_nmethods() {
|
||||
ScavengableNMethods::prune_unlinked_nmethods();
|
||||
}
|
||||
|
||||
HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
|
||||
|
@ -182,6 +182,7 @@ public:
|
||||
void verify_nmethod(nmethod* nm) override;
|
||||
|
||||
void prune_scavengable_nmethods();
|
||||
void prune_unlinked_nmethods();
|
||||
|
||||
// Iteration functions.
|
||||
void object_iterate(ObjectClosure* cl) override;
|
||||
|
@ -59,18 +59,8 @@ void ScavengableNMethods::register_nmethod(nmethod* nm) {
|
||||
}
|
||||
|
||||
void ScavengableNMethods::unregister_nmethod(nmethod* nm) {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
if (gc_data(nm).on_list()) {
|
||||
nmethod* prev = nullptr;
|
||||
for (nmethod* cur = _head; cur != nullptr; cur = gc_data(cur).next()) {
|
||||
if (cur == nm) {
|
||||
unlist_nmethod(cur, prev);
|
||||
return;
|
||||
}
|
||||
prev = cur;
|
||||
}
|
||||
}
|
||||
// All users of this method only unregister in bulk during code unloading.
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
|
||||
#ifndef PRODUCT
|
||||
@ -172,10 +162,37 @@ void ScavengableNMethods::nmethods_do_and_prune(CodeBlobToOopClosure* cl) {
|
||||
debug_only(verify_unlisted_nmethods(nullptr));
|
||||
}
|
||||
|
||||
void ScavengableNMethods::prune_nmethods() {
|
||||
void ScavengableNMethods::prune_nmethods_not_into_young() {
|
||||
nmethods_do_and_prune(nullptr /* No closure */);
|
||||
}
|
||||
|
||||
void ScavengableNMethods::prune_unlinked_nmethods() {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
debug_only(mark_on_list_nmethods());
|
||||
|
||||
nmethod* prev = nullptr;
|
||||
nmethod* cur = _head;
|
||||
while (cur != nullptr) {
|
||||
ScavengableNMethodsData data = gc_data(cur);
|
||||
debug_only(data.clear_marked());
|
||||
assert(data.on_list(), "else shouldn't be on this list");
|
||||
|
||||
nmethod* const next = data.next();
|
||||
|
||||
if (cur->is_unlinked()) {
|
||||
unlist_nmethod(cur, prev);
|
||||
} else {
|
||||
prev = cur;
|
||||
}
|
||||
|
||||
cur = next;
|
||||
}
|
||||
|
||||
// Check for stray marks.
|
||||
debug_only(verify_unlisted_nmethods(nullptr));
|
||||
}
|
||||
|
||||
// Walk the list of methods which might contain oops to the java heap.
|
||||
void ScavengableNMethods::nmethods_do(CodeBlobToOopClosure* cl) {
|
||||
nmethods_do_and_prune(cl);
|
||||
@ -218,8 +235,9 @@ void ScavengableNMethods::mark_on_list_nmethods() {
|
||||
nmethod* nm = iter.method();
|
||||
ScavengableNMethodsData data = gc_data(nm);
|
||||
assert(data.not_marked(), "clean state");
|
||||
if (data.on_list())
|
||||
if (data.on_list()) {
|
||||
data.set_marked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -230,7 +248,10 @@ void ScavengableNMethods::verify_unlisted_nmethods(CodeBlobClosure* cl) {
|
||||
while(iter.next()) {
|
||||
nmethod* nm = iter.method();
|
||||
|
||||
verify_nmethod(nm);
|
||||
// Can not verify already unlinked nmethods as they are partially invalid already.
|
||||
if (!nm->is_unlinked()) {
|
||||
verify_nmethod(nm);
|
||||
}
|
||||
|
||||
if (cl != nullptr && !gc_data(nm).on_list()) {
|
||||
cl->do_code_blob(nm);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2019, 2022, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2019, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -46,8 +46,10 @@ public:
|
||||
static void unregister_nmethod(nmethod* nm);
|
||||
static void verify_nmethod(nmethod* nm);
|
||||
|
||||
// Remove nmethods that no longer have scavengable oops.
|
||||
static void prune_nmethods();
|
||||
// Remove nmethods that no longer have oops into young gen.
|
||||
static void prune_nmethods_not_into_young();
|
||||
// Remvoe unlinked (dead) nmethods.
|
||||
static void prune_unlinked_nmethods();
|
||||
|
||||
// Apply closure to every scavengable nmethod.
|
||||
// Remove nmethods that no longer have scavengable oops.
|
||||
|
@ -1822,6 +1822,7 @@ void ShenandoahHeap::stop() {
|
||||
void ShenandoahHeap::stw_unload_classes(bool full_gc) {
|
||||
if (!unload_classes()) return;
|
||||
ClassUnloadingContext ctx(_workers->active_workers(),
|
||||
true /* unregister_nmethods_during_purge */,
|
||||
false /* lock_codeblob_free_separately */);
|
||||
|
||||
// Unload classes and purge SystemDictionary.
|
||||
|
@ -140,6 +140,7 @@ void ShenandoahUnload::unload() {
|
||||
assert(heap->is_concurrent_weak_root_in_progress(), "Filtered by caller");
|
||||
|
||||
ClassUnloadingContext ctx(heap->workers()->active_workers(),
|
||||
true /* unregister_nmethods_during_purge */,
|
||||
true /* lock_codeblob_free_separately */);
|
||||
|
||||
// Unlink stale metadata and nmethods
|
||||
|
@ -322,6 +322,7 @@ void XHeap::process_non_strong_references() {
|
||||
_weak_roots_processor.process_weak_roots();
|
||||
|
||||
ClassUnloadingContext ctx(_workers.active_workers(),
|
||||
true /* unregister_nmethods_during_purge */,
|
||||
true /* lock_codeblob_free_separately */);
|
||||
|
||||
// Unlink stale metadata and nmethods
|
||||
|
@ -1318,6 +1318,7 @@ void ZGenerationOld::process_non_strong_references() {
|
||||
_weak_roots_processor.process_weak_roots();
|
||||
|
||||
ClassUnloadingContext ctx(_workers.active_workers(),
|
||||
true /* unregister_nmethods_during_purge */,
|
||||
true /* lock_codeblob_free_separately */);
|
||||
|
||||
// Unlink stale metadata and nmethods
|
||||
|
Loading…
x
Reference in New Issue
Block a user