8259862: MutableSpace's end should be atomic

Make _end volatile and use atomic access

Reviewed-by: ayang, tschatzl
This commit is contained in:
Kim Barrett 2021-02-05 07:24:09 +00:00
parent d2bd499222
commit 1e0a1013ef
4 changed files with 15 additions and 20 deletions

@ -28,6 +28,7 @@
#include "memory/allocation.hpp"
#include "memory/iterator.hpp"
#include "memory/memRegion.hpp"
#include "runtime/atomic.hpp"
#include "utilities/copy.hpp"
#include "utilities/globalDefinitions.hpp"
#include "utilities/macros.hpp"
@ -42,9 +43,6 @@ class WorkGang;
// page allocation time by having the memory pretouched (with
// AlwaysPretouch) and for optimizing page placement on NUMA systems
// by make the underlying region interleaved (with UseNUMA).
//
// Invariant: bottom() <= top() <= end()
// top() and end() are exclusive.
class MutableSpaceMangler;
@ -56,9 +54,11 @@ class MutableSpace: public CHeapObj<mtGC> {
// The last region which page had been setup to be interleaved.
MemRegion _last_setup_region;
size_t _alignment;
HeapWord* _bottom;
HeapWord* volatile _top;
HeapWord* _end;
// Supports CAS-based allocation.
// Invariant: bottom() <= top() <= end()
HeapWord* _bottom; // Start of the region.
HeapWord* volatile _top; // Current allocation pointer.
HeapWord* volatile _end; // Current allocation limit. expand() advances.
MutableSpaceMangler* mangler() { return _mangler; }
@ -67,21 +67,22 @@ class MutableSpace: public CHeapObj<mtGC> {
void set_last_setup_region(MemRegion mr) { _last_setup_region = mr; }
MemRegion last_setup_region() const { return _last_setup_region; }
protected:
HeapWord* volatile* top_addr() { return &_top; }
HeapWord* volatile* end_addr() { return &_end; }
public:
virtual ~MutableSpace();
MutableSpace(size_t page_size);
// Accessors
HeapWord* bottom() const { return _bottom; }
HeapWord* top() const { return _top; }
HeapWord* end() const { return _end; }
HeapWord* top() const { return Atomic::load(&_top); }
HeapWord* end() const { return Atomic::load(&_end); }
void set_bottom(HeapWord* value) { _bottom = value; }
virtual void set_top(HeapWord* value) { _top = value; }
void set_end(HeapWord* value) { _end = value; }
HeapWord* volatile* top_addr() { return &_top; }
HeapWord** end_addr() { return &_end; }
virtual void set_top(HeapWord* value) { Atomic::store(&_top, value); }
void set_end(HeapWord* value) { Atomic::store(&_end, value); }
size_t alignment() { return _alignment; }

@ -200,9 +200,6 @@ class ParallelScavengeHeap : public CollectedHeap {
bool supports_inline_contig_alloc() const { return !UseNUMA; }
HeapWord* volatile* top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord* volatile*)-1; }
HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
void ensure_parsability(bool retire_tlabs);
void resize_all_tlabs();

@ -133,9 +133,6 @@ class PSYoungGen : public CHeapObj<mtGC> {
return result;
}
HeapWord* volatile* top_addr() const { return eden_space()->top_addr(); }
HeapWord** end_addr() const { return eden_space()->end_addr(); }
// Iteration.
void oop_iterate(OopIterateClosure* cl);
void object_iterate(ObjectClosure* cl);

@ -46,7 +46,7 @@
nonstatic_field(PSVirtualSpace, _committed_high_addr, char*) \
\
nonstatic_field(MutableSpace, _bottom, HeapWord*) \
nonstatic_field(MutableSpace, _end, HeapWord*) \
volatile_nonstatic_field(MutableSpace, _end, HeapWord*) \
volatile_nonstatic_field(MutableSpace, _top, HeapWord*) \
\
nonstatic_field(PSYoungGen, _reserved, MemRegion) \