8306836: Remove pinned tag for G1 heap regions

Reviewed-by: ayang, cjplummer, sspitsyn
This commit is contained in:
Thomas Schatzl 2023-05-03 13:50:02 +00:00
parent ccf91f881c
commit fc76687c2f
18 changed files with 31 additions and 57 deletions

@ -568,7 +568,7 @@ bool G1CollectedHeap::alloc_archive_regions(MemRegion range) {
// Mark each G1 region touched by the range as old, add it to
// the old set, and set top.
auto set_region_to_old = [&] (HeapRegion* r, bool is_last) {
assert(r->is_empty() && !r->is_pinned(), "Region already in use (%u)", r->hrm_index());
assert(r->is_empty(), "Region already in use (%u)", r->hrm_index());
HeapWord* top = is_last ? last_address + 1 : r->end();
r->set_top(top);

@ -82,11 +82,11 @@ void G1CollectionSetCandidates::verify() const {
HeapRegion *prev = nullptr;
for (; idx < _num_regions; idx++) {
HeapRegion *cur = _regions[idx];
guarantee(cur != nullptr, "Regions after _front_idx %u cannot be null but %u is", _front_idx, idx);
// The first disjunction filters out regions with objects that were explicitly
// pinned after being added to the collection set candidates.
guarantee(cur->is_pinned() ||
G1CollectionSetChooser::should_add(cur),
guarantee(cur != nullptr, "Regions after _front_idx %u cannot be NULL but %u is", _front_idx, idx);
// Currently the decision whether young gc moves region contents is determined
// at region allocation time. It is not possible that a region becomes non-movable
// at a later point, which means below condition always holds true.
guarantee(G1CollectionSetChooser::should_add(cur),
"Region %u should be eligible for addition.", cur->hrm_index());
if (prev != nullptr) {
guarantee(prev->gc_efficiency() >= cur->gc_efficiency(),

@ -252,7 +252,7 @@ uint G1CollectionSetChooser::calculate_work_chunk_size(uint num_workers, uint nu
bool G1CollectionSetChooser::should_add(HeapRegion* hr) {
return !hr->is_young() &&
!hr->is_pinned() &&
!hr->is_humongous() &&
region_occupancy_low_enough_for_evac(hr->live_bytes()) &&
hr->rem_set()->is_complete();
}

@ -1,5 +1,5 @@
/*
* Copyright (c) 2001, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2001, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -53,8 +53,8 @@ public:
}
// Determine whether to add the given region to the collection set candidates or
// not. Currently, we skip pinned regions and regions whose live
// bytes are over the threshold. Humongous regions may be reclaimed during cleanup.
// not. Currently, we skip regions that we will never move during young gc, and
// regions which liveness is below the occupancy threshold.
// Regions also need a complete remembered set to be a candidate.
static bool should_add(HeapRegion* hr);

@ -256,7 +256,9 @@ void G1FullCollector::complete_collection() {
void G1FullCollector::before_marking_update_attribute_table(HeapRegion* hr) {
if (hr->is_free()) {
_region_attr_table.set_free(hr->hrm_index());
} else if (hr->is_pinned()) {
} else if (hr->is_humongous()) {
// Humongous objects will never be moved in the "main" compaction phase, but
// afterwards in a special phase if needed.
_region_attr_table.set_skip_compacting(hr->hrm_index());
} else {
// Everything else should be compacted.

@ -67,7 +67,6 @@ void G1FullGCCompactTask::copy_object_to_new_location(oop obj) {
}
void G1FullGCCompactTask::compact_region(HeapRegion* hr) {
assert(!hr->is_pinned(), "Should be no pinned region in compaction queue");
assert(!hr->is_humongous(), "Should be no humongous regions in compaction queue");
if (!collector()->is_free(hr->hrm_index())) {

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,7 +34,8 @@
// the table specifies whether a Full GC cycle should be compacting or skip
// compacting a region.
// Reasons for not compacting a region:
// (1) the HeapRegion itself has been pinned at the start of Full GC.
// (1) the HeapRegion itself can not be moved during this phase of the full gc
// (e.g. Humongous regions).
// (2) the occupancy of the region is too high to be considered eligible for compaction.
class G1FullGCHeapRegionAttr : public G1BiasedMappedArray<uint8_t> {
static const uint8_t Compacting = 0; // Region will be compacted.

@ -47,7 +47,7 @@ bool G1FullGCPrepareTask::G1CalculatePointersClosure::do_heap_region(HeapRegion*
uint region_idx = hr->hrm_index();
assert(_collector->is_compaction_target(region_idx), "must be");
assert(!hr->is_pinned(), "must be");
assert(!hr->is_humongous(), "must be");
prepare_for_compaction(hr);

@ -40,9 +40,9 @@ void G1DetermineCompactionQueueClosure::free_empty_humongous_region(HeapRegion*
}
inline bool G1DetermineCompactionQueueClosure::should_compact(HeapRegion* hr) const {
// There is no need to iterate and forward objects in pinned regions ie.
// There is no need to iterate and forward objects in non-movable regions ie.
// prepare them for compaction.
if (hr->is_pinned()) {
if (hr->is_humongous()) {
return false;
}
size_t live_words = _collector->live_words(hr->hrm_index());

@ -409,10 +409,7 @@ public:
assert(hr->containing_set() == _old_set, "Heap region %u is old but not in the old set.", hr->hrm_index());
_old_count++;
} else {
// There are no other valid region types. Check for one invalid
// one we can identify: pinned without old or humongous set.
assert(!hr->is_pinned(), "Heap region %u is pinned but not old or humongous.", hr->hrm_index());
ShouldNotReachHere();
fatal("Invalid region type for region %u (%s)", hr->hrm_index(), hr->get_short_type_str());
}
return false;
}

@ -557,8 +557,7 @@ class FreeCSetClosure : public HeapRegionClosure {
stats()->account_failed_region(r);
G1GCPhaseTimes* p = _g1h->phase_times();
assert(!r->is_pinned(), "Unexpected pinned region at index %u", r->hrm_index());
assert(r->in_collection_set(), "bad CS");
assert(r->in_collection_set(), "Failed evacuation of region %u not in collection set", r->hrm_index());
p->record_or_add_thread_work_item(G1GCPhaseTimes::RestoreRetainedRegions,
_worker_id,

@ -402,10 +402,6 @@ public:
bool is_old_or_humongous() const { return _type.is_old_or_humongous(); }
// A pinned region contains objects which are not moved by garbage collections.
// Humongous regions are pinned.
bool is_pinned() const { return _type.is_pinned(); }
void set_free();
void set_eden();

@ -32,6 +32,7 @@
#include "gc/g1/g1CollectedHeap.inline.hpp"
#include "gc/g1/g1ConcurrentMarkBitMap.inline.hpp"
#include "gc/g1/g1MonotonicArena.inline.hpp"
#include "gc/g1/g1Policy.hpp"
#include "gc/g1/g1Predictions.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/atomic.hpp"
@ -177,7 +178,7 @@ inline size_t HeapRegion::block_size(const HeapWord* p, HeapWord* const pb) cons
inline void HeapRegion::reset_compacted_after_full_gc(HeapWord* new_top) {
set_top(new_top);
// After a compaction the mark bitmap in a non-pinned regions is invalid.
// After a compaction the mark bitmap in a movable region is invalid.
// But all objects are live, we get this by setting TAMS to bottom.
init_top_at_mark_start();

@ -52,11 +52,11 @@ private:
// 00001 1 [ 3] Survivor
//
// 00010 0 [ 4] Humongous Mask
// 00100 0 [ 8] Pinned Mask
// 00110 0 [12] Starts Humongous
// 00110 1 [13] Continues Humongous
// 00010 0 [ 4] Starts Humongous
// 00010 1 [ 5] Continues Humongous
//
// 01000 0 [16] Old Mask
// 00100 0 [ 8] Old Mask
// 00100 0 [ 8] Old
//
typedef enum {
FreeTag = 0,
@ -66,11 +66,10 @@ private:
SurvTag = YoungMask + 1,
HumongousMask = 4,
PinnedMask = 8,
StartsHumongousTag = HumongousMask | PinnedMask,
ContinuesHumongousTag = HumongousMask | PinnedMask + 1,
StartsHumongousTag = HumongousMask,
ContinuesHumongousTag = HumongousMask + 1,
OldMask = 16,
OldMask = 8,
OldTag = OldMask
} Tag;
@ -117,13 +116,10 @@ public:
bool is_starts_humongous() const { return get() == StartsHumongousTag; }
bool is_continues_humongous() const { return get() == ContinuesHumongousTag; }
// is_old regions may or may not also be pinned
bool is_old() const { return (get() & OldMask) != 0; }
bool is_old_or_humongous() const { return (get() & (OldMask | HumongousMask)) != 0; }
bool is_pinned() const { return (get() & PinnedMask) != 0; }
// Setters
void set_free() { set(FreeTag); }

@ -78,7 +78,6 @@
declare_constant(HeapRegionType::EdenTag) \
declare_constant(HeapRegionType::SurvTag) \
declare_constant(HeapRegionType::HumongousMask) \
declare_constant(HeapRegionType::PinnedMask) \
declare_constant(HeapRegionType::StartsHumongousTag) \
declare_constant(HeapRegionType::ContinuesHumongousTag) \
declare_constant(HeapRegionType::OldMask) \

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -1104,9 +1104,6 @@ public class HSDB implements ObjectHistogramPanel.Listener, SAListener {
} else if (region.isHumongous()) {
anno = "Humongous ";
bad = false;
} else if (region.isPinned()) {
anno = "Pinned ";
bad = false;
} else if (region.isOld()) {
anno = "Old ";
bad = false;

@ -124,10 +124,6 @@ public class HeapRegion extends ContiguousSpace implements LiveRegionsProvider {
return type.isHumongous();
}
public boolean isPinned() {
return type.isPinned();
}
public boolean isOld() {
return type.isOld();
}

@ -45,7 +45,6 @@ public class HeapRegionType extends VMObject {
private static int humongousMask;
private static int startsHumongousTag;
private static int continuesHumongousTag;
private static int pinnedMask;
private static int oldMask;
private static CIntegerField tagField;
private int tag;
@ -70,7 +69,6 @@ public class HeapRegionType extends VMObject {
startsHumongousTag = db.lookupIntConstant("HeapRegionType::StartsHumongousTag");
continuesHumongousTag = db.lookupIntConstant("HeapRegionType::ContinuesHumongousTag");
humongousMask = db.lookupIntConstant("HeapRegionType::HumongousMask");
pinnedMask = db.lookupIntConstant("HeapRegionType::PinnedMask");
oldMask = db.lookupIntConstant("HeapRegionType::OldMask");
}
@ -102,10 +100,6 @@ public class HeapRegionType extends VMObject {
return tagField.getValue(addr) == continuesHumongousTag;
}
public boolean isPinned() {
return (tagField.getValue(addr) & pinnedMask) != 0;
}
public boolean isOld() {
return (tagField.getValue(addr) & oldMask) != 0;
}
@ -130,9 +124,6 @@ public class HeapRegionType extends VMObject {
if (isContinuesHumongous()) {
return "ContinuesHumongous";
}
if (isPinned()) {
return "Pinned";
}
if (isOld()) {
return "Old";
}