8278756: Parallel: Drop PSOldGen::_reserved
Reviewed-by: tschatzl, mli
This commit is contained in:
parent
271d26f141
commit
f15a59ce72
@ -41,6 +41,7 @@ class ObjectStartArray : public CHeapObj<mtGC> {
|
||||
private:
|
||||
PSVirtualSpace _virtual_space;
|
||||
MemRegion _reserved_region;
|
||||
// The committed (old-gen heap) virtual space this object-start-array covers.
|
||||
MemRegion _covered_region;
|
||||
MemRegion _blocks_region;
|
||||
jbyte* _raw_base;
|
||||
|
@ -645,8 +645,10 @@ void ParallelScavengeHeap::prepare_for_verify() {
|
||||
PSHeapSummary ParallelScavengeHeap::create_ps_heap_summary() {
|
||||
PSOldGen* old = old_gen();
|
||||
HeapWord* old_committed_end = (HeapWord*)old->virtual_space()->committed_high_addr();
|
||||
VirtualSpaceSummary old_summary(old->reserved().start(), old_committed_end, old->reserved().end());
|
||||
SpaceSummary old_space(old->reserved().start(), old_committed_end, old->used_in_bytes());
|
||||
HeapWord* old_reserved_start = old->reserved().start();
|
||||
HeapWord* old_reserved_end = old->reserved().end();
|
||||
VirtualSpaceSummary old_summary(old_reserved_start, old_committed_end, old_reserved_end);
|
||||
SpaceSummary old_space(old_reserved_start, old_committed_end, old->used_in_bytes());
|
||||
|
||||
PSYoungGen* young = young_gen();
|
||||
VirtualSpaceSummary young_summary(young->reserved().start(),
|
||||
|
@ -50,10 +50,6 @@ void PSOldGen::initialize(ReservedSpace rs, size_t initial_size, size_t alignmen
|
||||
initialize_virtual_space(rs, initial_size, alignment);
|
||||
initialize_work(perf_data_name, level);
|
||||
|
||||
// The old gen can grow to max_gen_size(). _reserve reflects only
|
||||
// the current maximum that can be committed.
|
||||
assert(_reserved.byte_size() <= max_gen_size(), "Consistency check");
|
||||
|
||||
initialize_performance_counters(perf_data_name, level);
|
||||
}
|
||||
|
||||
@ -69,66 +65,51 @@ void PSOldGen::initialize_virtual_space(ReservedSpace rs,
|
||||
}
|
||||
|
||||
void PSOldGen::initialize_work(const char* perf_data_name, int level) {
|
||||
//
|
||||
// Basic memory initialization
|
||||
//
|
||||
MemRegion const reserved_mr = reserved();
|
||||
assert(reserved_mr.byte_size() == max_gen_size(), "invariant");
|
||||
|
||||
MemRegion limit_reserved((HeapWord*)virtual_space()->low_boundary(),
|
||||
heap_word_size(max_gen_size()));
|
||||
assert(limit_reserved.byte_size() == max_gen_size(),
|
||||
"word vs bytes confusion");
|
||||
//
|
||||
// Object start stuff
|
||||
//
|
||||
// Object start stuff: for all reserved memory
|
||||
start_array()->initialize(reserved_mr);
|
||||
|
||||
start_array()->initialize(limit_reserved);
|
||||
// Card table stuff: for all committed memory
|
||||
MemRegion committed_mr((HeapWord*)virtual_space()->low(),
|
||||
(HeapWord*)virtual_space()->high());
|
||||
|
||||
_reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(),
|
||||
(HeapWord*)virtual_space()->high_boundary());
|
||||
|
||||
//
|
||||
// Card table stuff
|
||||
//
|
||||
|
||||
MemRegion cmr((HeapWord*)virtual_space()->low(),
|
||||
(HeapWord*)virtual_space()->high());
|
||||
if (ZapUnusedHeapArea) {
|
||||
// Mangle newly committed space immediately rather than
|
||||
// waiting for the initialization of the space even though
|
||||
// mangling is related to spaces. Doing it here eliminates
|
||||
// the need to carry along information that a complete mangling
|
||||
// (bottom to end) needs to be done.
|
||||
SpaceMangler::mangle_region(cmr);
|
||||
SpaceMangler::mangle_region(committed_mr);
|
||||
}
|
||||
|
||||
ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
|
||||
PSCardTable* ct = heap->card_table();
|
||||
ct->resize_covered_region(cmr);
|
||||
ct->resize_covered_region(committed_mr);
|
||||
|
||||
// Verify that the start and end of this generation is the start of a card.
|
||||
// If this wasn't true, a single card could span more than one generation,
|
||||
// which would cause problems when we commit/uncommit memory, and when we
|
||||
// clear and dirty cards.
|
||||
guarantee(ct->is_card_aligned(_reserved.start()), "generation must be card aligned");
|
||||
if (_reserved.end() != heap->reserved_region().end()) {
|
||||
// Don't check at the very end of the heap as we'll assert that we're probing off
|
||||
// the end if we try.
|
||||
guarantee(ct->is_card_aligned(_reserved.end()), "generation must be card aligned");
|
||||
}
|
||||
guarantee(ct->is_card_aligned(reserved_mr.start()), "generation must be card aligned");
|
||||
// Check the heap layout documented at `class ParallelScavengeHeap`.
|
||||
assert(reserved_mr.end() != heap->reserved_region().end(), "invariant");
|
||||
guarantee(ct->is_card_aligned(reserved_mr.end()), "generation must be card aligned");
|
||||
|
||||
//
|
||||
// ObjectSpace stuff
|
||||
//
|
||||
|
||||
_object_space = new MutableSpace(virtual_space()->alignment());
|
||||
object_space()->initialize(cmr,
|
||||
object_space()->initialize(committed_mr,
|
||||
SpaceDecorator::Clear,
|
||||
SpaceDecorator::Mangle,
|
||||
MutableSpace::SetupPages,
|
||||
&ParallelScavengeHeap::heap()->workers());
|
||||
|
||||
// Update the start_array
|
||||
start_array()->set_covered_region(cmr);
|
||||
start_array()->set_covered_region(committed_mr);
|
||||
}
|
||||
|
||||
void PSOldGen::initialize_performance_counters(const char* perf_data_name, int level) {
|
||||
@ -314,7 +295,6 @@ void PSOldGen::resize(size_t desired_free_space) {
|
||||
// Adjust according to our min and max
|
||||
new_size = clamp(new_size, min_gen_size(), max_gen_size());
|
||||
|
||||
assert(max_gen_size() >= reserved().byte_size(), "max new size problem?");
|
||||
new_size = align_up(new_size, alignment);
|
||||
|
||||
const size_t current_size = capacity_in_bytes();
|
||||
|
@ -34,9 +34,7 @@
|
||||
|
||||
class PSOldGen : public CHeapObj<mtGC> {
|
||||
friend class VMStructs;
|
||||
|
||||
private:
|
||||
MemRegion _reserved; // Used for simple containment tests
|
||||
PSVirtualSpace* _virtual_space; // Controls mapping and unmapping of virtual mem
|
||||
ObjectStartArray _start_array; // Keeps track of where objects start in a 512b block
|
||||
MutableSpace* _object_space; // Where all the objects live
|
||||
@ -99,16 +97,20 @@ class PSOldGen : public CHeapObj<mtGC> {
|
||||
PSOldGen(ReservedSpace rs, size_t initial_size, size_t min_size,
|
||||
size_t max_size, const char* perf_data_name, int level);
|
||||
|
||||
MemRegion reserved() const { return _reserved; }
|
||||
MemRegion reserved() const {
|
||||
return MemRegion((HeapWord*)(_virtual_space->low_boundary()),
|
||||
(HeapWord*)(_virtual_space->high_boundary()));
|
||||
}
|
||||
|
||||
size_t max_gen_size() const { return _max_gen_size; }
|
||||
size_t min_gen_size() const { return _min_gen_size; }
|
||||
|
||||
bool is_in(const void* p) const {
|
||||
return _virtual_space->contains((void *)p);
|
||||
return _virtual_space->is_in_committed((void *)p);
|
||||
}
|
||||
|
||||
bool is_in_reserved(const void* p) const {
|
||||
return reserved().contains(p);
|
||||
return _virtual_space->is_in_reserved(p);
|
||||
}
|
||||
|
||||
MutableSpace* object_space() const { return _object_space; }
|
||||
|
@ -1041,9 +1041,9 @@ void PSParallelCompact::post_compact()
|
||||
PSCardTable* ct = heap->card_table();
|
||||
MemRegion old_mr = heap->old_gen()->reserved();
|
||||
if (young_gen_empty) {
|
||||
ct->clear(MemRegion(old_mr.start(), old_mr.end()));
|
||||
ct->clear(old_mr);
|
||||
} else {
|
||||
ct->invalidate(MemRegion(old_mr.start(), old_mr.end()));
|
||||
ct->invalidate(old_mr);
|
||||
}
|
||||
|
||||
// Delete metaspaces for unloaded class loaders and clean up loader_data graph
|
||||
|
@ -66,11 +66,6 @@ PSVirtualSpace::~PSVirtualSpace() {
|
||||
release();
|
||||
}
|
||||
|
||||
bool PSVirtualSpace::contains(void* p) const {
|
||||
char* const cp = (char*)p;
|
||||
return cp >= committed_low_addr() && cp < committed_high_addr();
|
||||
}
|
||||
|
||||
void PSVirtualSpace::release() {
|
||||
DEBUG_ONLY(PSVirtualSpaceVerifier this_verifier(this));
|
||||
// This may not release memory it didn't reserve.
|
||||
|
@ -75,7 +75,13 @@ class PSVirtualSpace : public CHeapObj<mtGC> {
|
||||
PSVirtualSpace();
|
||||
void initialize(ReservedSpace rs);
|
||||
|
||||
bool contains(void* p) const;
|
||||
bool is_in_committed(const void* p) const {
|
||||
return (p >= committed_low_addr()) && (p < committed_high_addr());
|
||||
}
|
||||
|
||||
bool is_in_reserved(const void* p) const {
|
||||
return (p >= reserved_low_addr()) && (p < reserved_high_addr());
|
||||
}
|
||||
|
||||
// Accessors (all sizes are bytes).
|
||||
size_t alignment() const { return _alignment; }
|
||||
@ -85,6 +91,7 @@ class PSVirtualSpace : public CHeapObj<mtGC> {
|
||||
char* committed_high_addr() const { return _committed_high_addr; }
|
||||
bool special() const { return _special; }
|
||||
|
||||
// Return size in bytes
|
||||
inline size_t committed_size() const;
|
||||
inline size_t reserved_size() const;
|
||||
inline size_t uncommitted_size() const;
|
||||
|
@ -90,7 +90,7 @@ class PSYoungGen : public CHeapObj<mtGC> {
|
||||
MemRegion reserved() const { return _reserved; }
|
||||
|
||||
bool is_in(const void* p) const {
|
||||
return _virtual_space->contains((void *)p);
|
||||
return _virtual_space->is_in_committed(p);
|
||||
}
|
||||
|
||||
bool is_in_reserved(const void* p) const {
|
||||
|
@ -57,7 +57,6 @@
|
||||
nonstatic_field(PSYoungGen, _min_gen_size, const size_t) \
|
||||
nonstatic_field(PSYoungGen, _max_gen_size, const size_t) \
|
||||
\
|
||||
nonstatic_field(PSOldGen, _reserved, MemRegion) \
|
||||
nonstatic_field(PSOldGen, _virtual_space, PSVirtualSpace*) \
|
||||
nonstatic_field(PSOldGen, _object_space, MutableSpace*) \
|
||||
nonstatic_field(PSOldGen, _min_gen_size, const size_t) \
|
||||
|
Loading…
Reference in New Issue
Block a user