8261213: [BACKOUT] MutableSpace's end should be atomic
Reviewed-by: tschatzl, ayang
This commit is contained in:
parent
3495febf51
commit
224c166c30
@ -28,7 +28,6 @@
|
||||
#include "memory/allocation.hpp"
|
||||
#include "memory/iterator.hpp"
|
||||
#include "memory/memRegion.hpp"
|
||||
#include "runtime/atomic.hpp"
|
||||
#include "utilities/copy.hpp"
|
||||
#include "utilities/globalDefinitions.hpp"
|
||||
#include "utilities/macros.hpp"
|
||||
@ -43,6 +42,9 @@ class WorkGang;
|
||||
// page allocation time by having the memory pretouched (with
|
||||
// AlwaysPretouch) and for optimizing page placement on NUMA systems
|
||||
// by make the underlying region interleaved (with UseNUMA).
|
||||
//
|
||||
// Invariant: bottom() <= top() <= end()
|
||||
// top() and end() are exclusive.
|
||||
|
||||
class MutableSpaceMangler;
|
||||
|
||||
@ -54,11 +56,9 @@ class MutableSpace: public CHeapObj<mtGC> {
|
||||
// The last region which page had been setup to be interleaved.
|
||||
MemRegion _last_setup_region;
|
||||
size_t _alignment;
|
||||
// Supports CAS-based allocation.
|
||||
// Invariant: bottom() <= top() <= end()
|
||||
HeapWord* _bottom; // Start of the region.
|
||||
HeapWord* volatile _top; // Current allocation pointer.
|
||||
HeapWord* volatile _end; // Current allocation limit. expand() advances.
|
||||
HeapWord* _bottom;
|
||||
HeapWord* volatile _top;
|
||||
HeapWord* _end;
|
||||
|
||||
MutableSpaceMangler* mangler() { return _mangler; }
|
||||
|
||||
@ -67,22 +67,21 @@ class MutableSpace: public CHeapObj<mtGC> {
|
||||
void set_last_setup_region(MemRegion mr) { _last_setup_region = mr; }
|
||||
MemRegion last_setup_region() const { return _last_setup_region; }
|
||||
|
||||
protected:
|
||||
HeapWord* volatile* top_addr() { return &_top; }
|
||||
HeapWord* volatile* end_addr() { return &_end; }
|
||||
|
||||
public:
|
||||
virtual ~MutableSpace();
|
||||
MutableSpace(size_t page_size);
|
||||
|
||||
// Accessors
|
||||
HeapWord* bottom() const { return _bottom; }
|
||||
HeapWord* top() const { return Atomic::load(&_top); }
|
||||
HeapWord* end() const { return Atomic::load(&_end); }
|
||||
HeapWord* top() const { return _top; }
|
||||
HeapWord* end() const { return _end; }
|
||||
|
||||
void set_bottom(HeapWord* value) { _bottom = value; }
|
||||
virtual void set_top(HeapWord* value) { Atomic::store(&_top, value); }
|
||||
void set_end(HeapWord* value) { Atomic::store(&_end, value); }
|
||||
virtual void set_top(HeapWord* value) { _top = value; }
|
||||
void set_end(HeapWord* value) { _end = value; }
|
||||
|
||||
HeapWord* volatile* top_addr() { return &_top; }
|
||||
HeapWord** end_addr() { return &_end; }
|
||||
|
||||
size_t alignment() { return _alignment; }
|
||||
|
||||
|
@ -200,6 +200,9 @@ class ParallelScavengeHeap : public CollectedHeap {
|
||||
|
||||
bool supports_inline_contig_alloc() const { return !UseNUMA; }
|
||||
|
||||
HeapWord* volatile* top_addr() const { return !UseNUMA ? young_gen()->top_addr() : (HeapWord* volatile*)-1; }
|
||||
HeapWord** end_addr() const { return !UseNUMA ? young_gen()->end_addr() : (HeapWord**)-1; }
|
||||
|
||||
void ensure_parsability(bool retire_tlabs);
|
||||
void resize_all_tlabs();
|
||||
|
||||
|
@ -133,6 +133,9 @@ class PSYoungGen : public CHeapObj<mtGC> {
|
||||
return result;
|
||||
}
|
||||
|
||||
HeapWord* volatile* top_addr() const { return eden_space()->top_addr(); }
|
||||
HeapWord** end_addr() const { return eden_space()->end_addr(); }
|
||||
|
||||
// Iteration.
|
||||
void oop_iterate(OopIterateClosure* cl);
|
||||
void object_iterate(ObjectClosure* cl);
|
||||
|
@ -46,7 +46,7 @@
|
||||
nonstatic_field(PSVirtualSpace, _committed_high_addr, char*) \
|
||||
\
|
||||
nonstatic_field(MutableSpace, _bottom, HeapWord*) \
|
||||
volatile_nonstatic_field(MutableSpace, _end, HeapWord*) \
|
||||
nonstatic_field(MutableSpace, _end, HeapWord*) \
|
||||
volatile_nonstatic_field(MutableSpace, _top, HeapWord*) \
|
||||
\
|
||||
nonstatic_field(PSYoungGen, _reserved, MemRegion) \
|
||||
|
Loading…
x
Reference in New Issue
Block a user