8337674: ZGC: Consistent style for naming private static constants

Reviewed-by: stefank, aboldtch, mli
This commit is contained in:
Joel Sikström 2024-09-19 08:47:20 +00:00 committed by Hamlin Li
parent 118c9ade1a
commit 8908812d0a
24 changed files with 62 additions and 62 deletions

View File

@ -93,7 +93,7 @@ static size_t probe_valid_max_address_bit() {
} }
size_t ZPlatformAddressOffsetBits() { size_t ZPlatformAddressOffsetBits() {
const static size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1; static const size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1;
const size_t max_address_offset_bits = valid_max_address_offset_bits - 3; const size_t max_address_offset_bits = valid_max_address_offset_bits - 3;
const size_t min_address_offset_bits = max_address_offset_bits - 2; const size_t min_address_offset_bits = max_address_offset_bits - 2;
const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio); const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio);

View File

@ -90,7 +90,7 @@ static size_t probe_valid_max_address_bit() {
} }
size_t ZPlatformAddressOffsetBits() { size_t ZPlatformAddressOffsetBits() {
const static size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1; static const size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1;
const size_t max_address_offset_bits = valid_max_address_offset_bits - 3; const size_t max_address_offset_bits = valid_max_address_offset_bits - 3;
const size_t min_address_offset_bits = max_address_offset_bits - 2; const size_t min_address_offset_bits = max_address_offset_bits - 2;
const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio); const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio);

View File

@ -92,7 +92,7 @@ static size_t probe_valid_max_address_bit() {
} }
size_t ZPlatformAddressOffsetBits() { size_t ZPlatformAddressOffsetBits() {
const static size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1; static const size_t valid_max_address_offset_bits = probe_valid_max_address_bit() + 1;
const size_t max_address_offset_bits = valid_max_address_offset_bits - 3; const size_t max_address_offset_bits = valid_max_address_offset_bits - 3;
const size_t min_address_offset_bits = max_address_offset_bits - 2; const size_t min_address_offset_bits = max_address_offset_bits - 2;
const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio); const size_t address_offset = round_up_power_of_2(MaxHeapSize * ZVirtualToPhysicalRatio);

View File

@ -636,7 +636,7 @@ void ZBarrierSetAssembler::copy_load_at(MacroAssembler* masm,
// Remove metadata bits so that the store side (vectorized or non-vectorized) can // Remove metadata bits so that the store side (vectorized or non-vectorized) can
// inject the store-good color with an or instruction. // inject the store-good color with an or instruction.
__ andq(dst, _zpointer_address_mask); __ andq(dst, ZPointerAddressMask);
if ((decorators & ARRAYCOPY_CHECKCAST) != 0) { if ((decorators & ARRAYCOPY_CHECKCAST) != 0) {
// The checkcast arraycopy needs to be able to dereference the oops in order to perform a typechecks. // The checkcast arraycopy needs to be able to dereference the oops in order to perform a typechecks.

View File

@ -64,7 +64,7 @@ private:
GrowableArrayCHeap<address, mtGC> _store_good_relocations; GrowableArrayCHeap<address, mtGC> _store_good_relocations;
public: public:
static const int32_t _zpointer_address_mask = 0xFFFF0000; static const int32_t ZPointerAddressMask = 0xFFFF0000;
ZBarrierSetAssembler(); ZBarrierSetAssembler();

View File

@ -141,7 +141,7 @@ instruct zLoadPNullCheck(rFlagsReg cr, memory op, immP0 zero)
ins_encode %{ ins_encode %{
// A null pointer will have all address bits 0. This mask sign extends // A null pointer will have all address bits 0. This mask sign extends
// all address bits, so we can test if the address is 0. // all address bits, so we can test if the address is 0.
__ testq($op$$Address, ZBarrierSetAssembler::_zpointer_address_mask); __ testq($op$$Address, ZBarrierSetAssembler::ZPointerAddressMask);
%} %}
ins_pipe(ialu_cr_reg_imm); ins_pipe(ialu_cr_reg_imm);
%} %}

View File

@ -103,14 +103,14 @@
#define ZFILENAME_HEAP "java_heap" #define ZFILENAME_HEAP "java_heap"
// Preferred tmpfs mount points, ordered by priority // Preferred tmpfs mount points, ordered by priority
static const char* z_preferred_tmpfs_mountpoints[] = { static const char* ZPreferredTmpfsMountpoints[] = {
"/dev/shm", "/dev/shm",
"/run/shm", "/run/shm",
nullptr nullptr
}; };
// Preferred hugetlbfs mount points, ordered by priority // Preferred hugetlbfs mount points, ordered by priority
static const char* z_preferred_hugetlbfs_mountpoints[] = { static const char* ZPreferredHugetlbfsMountpoints[] = {
"/dev/hugepages", "/dev/hugepages",
"/hugepages", "/hugepages",
nullptr nullptr
@ -226,8 +226,8 @@ int ZPhysicalMemoryBacking::create_file_fd(const char* name) const {
? ZFILESYSTEM_HUGETLBFS ? ZFILESYSTEM_HUGETLBFS
: ZFILESYSTEM_TMPFS; : ZFILESYSTEM_TMPFS;
const char** const preferred_mountpoints = ZLargePages::is_explicit() const char** const preferred_mountpoints = ZLargePages::is_explicit()
? z_preferred_hugetlbfs_mountpoints ? ZPreferredHugetlbfsMountpoints
: z_preferred_tmpfs_mountpoints; : ZPreferredTmpfsMountpoints;
// Find mountpoint // Find mountpoint
ZMountPoint mountpoint(filesystem, preferred_mountpoints); ZMountPoint mountpoint(filesystem, preferred_mountpoints);

View File

@ -46,7 +46,7 @@
#include "utilities/growableArray.hpp" #include "utilities/growableArray.hpp"
#include "utilities/macros.hpp" #include "utilities/macros.hpp"
template<typename K, typename V, size_t _table_size> template<typename K, typename V, size_t TableSize>
class ZArenaHashtable : public ResourceObj { class ZArenaHashtable : public ResourceObj {
class ZArenaHashtableEntry : public ResourceObj { class ZArenaHashtableEntry : public ResourceObj {
public: public:
@ -55,10 +55,10 @@ class ZArenaHashtable : public ResourceObj {
V _value; V _value;
}; };
static const size_t _table_mask = _table_size - 1; static const size_t TableMask = TableSize - 1;
Arena* _arena; Arena* _arena;
ZArenaHashtableEntry* _table[_table_size]; ZArenaHashtableEntry* _table[TableSize];
public: public:
class Iterator { class Iterator {
@ -84,7 +84,7 @@ public:
if (_current_entry != nullptr) { if (_current_entry != nullptr) {
_current_entry = _current_entry->_next; _current_entry = _current_entry->_next;
} }
while (_current_entry == nullptr && ++_current_index < _table_size) { while (_current_entry == nullptr && ++_current_index < TableSize) {
_current_entry = _table->_table[_current_index]; _current_entry = _table->_table[_current_index];
} }
} }
@ -100,12 +100,12 @@ public:
ZArenaHashtableEntry* entry = new (_arena) ZArenaHashtableEntry(); ZArenaHashtableEntry* entry = new (_arena) ZArenaHashtableEntry();
entry->_key = key; entry->_key = key;
entry->_value = value; entry->_value = value;
entry->_next = _table[key & _table_mask]; entry->_next = _table[key & TableMask];
_table[key & _table_mask] = entry; _table[key & TableMask] = entry;
} }
V* get(K key) const { V* get(K key) const {
for (ZArenaHashtableEntry* e = _table[key & _table_mask]; e != nullptr; e = e->_next) { for (ZArenaHashtableEntry* e = _table[key & TableMask]; e != nullptr; e = e->_next) {
if (e->_key == key) { if (e->_key == key) {
return &(e->_value); return &(e->_value);
} }

View File

@ -155,7 +155,7 @@ public:
}; };
template<> struct BarrierSet::GetName<ZBarrierSet> { template<> struct BarrierSet::GetName<ZBarrierSet> {
static const BarrierSet::Name value = BarrierSet::ZBarrierSet; static const BarrierSet::Name Value = BarrierSet::ZBarrierSet;
}; };
template<> struct BarrierSet::GetType<BarrierSet::ZBarrierSet> { template<> struct BarrierSet::GetType<BarrierSet::ZBarrierSet> {

View File

@ -839,7 +839,7 @@ void ZDirector::evaluate_rules() {
} }
bool ZDirector::wait_for_tick() { bool ZDirector::wait_for_tick() {
const uint64_t interval_ms = MILLIUNITS / decision_hz; const uint64_t interval_ms = MILLIUNITS / DecisionHz;
ZLocker<ZConditionLock> locker(&_monitor); ZLocker<ZConditionLock> locker(&_monitor);

View File

@ -29,7 +29,7 @@
class ZDirector : public ZThread { class ZDirector : public ZThread {
private: private:
static const uint64_t decision_hz = 100; static const uint64_t DecisionHz = 100;
static ZDirector* _director; static ZDirector* _director;
ZConditionLock _monitor; ZConditionLock _monitor;

View File

@ -35,9 +35,9 @@
static const ZStatCounter ZCounterMarkSeqNumResetContention("Contention", "Mark SeqNum Reset Contention", ZStatUnitOpsPerSecond); static const ZStatCounter ZCounterMarkSeqNumResetContention("Contention", "Mark SeqNum Reset Contention", ZStatUnitOpsPerSecond);
static const ZStatCounter ZCounterMarkSegmentResetContention("Contention", "Mark Segment Reset Contention", ZStatUnitOpsPerSecond); static const ZStatCounter ZCounterMarkSegmentResetContention("Contention", "Mark Segment Reset Contention", ZStatUnitOpsPerSecond);
static size_t bitmap_size(uint32_t size, size_t nsegments) { static size_t bitmap_size(uint32_t size, size_t NumSegments) {
// We need at least one bit per segment // We need at least one bit per segment
return MAX2<size_t>(size, nsegments) * 2; return MAX2<size_t>(size, NumSegments) * 2;
} }
ZLiveMap::ZLiveMap(uint32_t size) ZLiveMap::ZLiveMap(uint32_t size)
@ -46,7 +46,7 @@ ZLiveMap::ZLiveMap(uint32_t size)
_live_bytes(0), _live_bytes(0),
_segment_live_bits(0), _segment_live_bits(0),
_segment_claim_bits(0), _segment_claim_bits(0),
_bitmap(bitmap_size(size, nsegments)), _bitmap(bitmap_size(size, NumSegments)),
_segment_shift(log2i_exact(segment_size())) {} _segment_shift(log2i_exact(segment_size())) {}
void ZLiveMap::reset(ZGenerationId id) { void ZLiveMap::reset(ZGenerationId id) {
@ -127,7 +127,7 @@ void ZLiveMap::reset_segment(BitMap::idx_t segment) {
} }
void ZLiveMap::resize(uint32_t size) { void ZLiveMap::resize(uint32_t size) {
const size_t new_bitmap_size = bitmap_size(size, nsegments); const size_t new_bitmap_size = bitmap_size(size, NumSegments);
if (_bitmap.size() != new_bitmap_size) { if (_bitmap.size() != new_bitmap_size) {
_bitmap.reinitialize(new_bitmap_size, false /* clear */); _bitmap.reinitialize(new_bitmap_size, false /* clear */);
_segment_shift = log2i_exact(segment_size()); _segment_shift = log2i_exact(segment_size());

View File

@ -35,7 +35,7 @@ class ZLiveMap {
friend class ZLiveMapTest; friend class ZLiveMapTest;
private: private:
static const size_t nsegments = 64; static const size_t NumSegments = 64;
volatile uint32_t _seqnum; volatile uint32_t _seqnum;
volatile uint32_t _live_objects; volatile uint32_t _live_objects;

View File

@ -52,19 +52,19 @@ inline size_t ZLiveMap::live_bytes() const {
} }
inline const BitMapView ZLiveMap::segment_live_bits() const { inline const BitMapView ZLiveMap::segment_live_bits() const {
return BitMapView(const_cast<BitMap::bm_word_t*>(&_segment_live_bits), nsegments); return BitMapView(const_cast<BitMap::bm_word_t*>(&_segment_live_bits), NumSegments);
} }
inline const BitMapView ZLiveMap::segment_claim_bits() const { inline const BitMapView ZLiveMap::segment_claim_bits() const {
return BitMapView(const_cast<BitMap::bm_word_t*>(&_segment_claim_bits), nsegments); return BitMapView(const_cast<BitMap::bm_word_t*>(&_segment_claim_bits), NumSegments);
} }
inline BitMapView ZLiveMap::segment_live_bits() { inline BitMapView ZLiveMap::segment_live_bits() {
return BitMapView(&_segment_live_bits, nsegments); return BitMapView(&_segment_live_bits, NumSegments);
} }
inline BitMapView ZLiveMap::segment_claim_bits() { inline BitMapView ZLiveMap::segment_claim_bits() {
return BitMapView(&_segment_claim_bits, nsegments); return BitMapView(&_segment_claim_bits, NumSegments);
} }
inline bool ZLiveMap::is_segment_live(BitMap::idx_t segment) const { inline bool ZLiveMap::is_segment_live(BitMap::idx_t segment) const {
@ -80,15 +80,15 @@ inline bool ZLiveMap::claim_segment(BitMap::idx_t segment) {
} }
inline BitMap::idx_t ZLiveMap::first_live_segment() const { inline BitMap::idx_t ZLiveMap::first_live_segment() const {
return segment_live_bits().find_first_set_bit(0, nsegments); return segment_live_bits().find_first_set_bit(0, NumSegments);
} }
inline BitMap::idx_t ZLiveMap::next_live_segment(BitMap::idx_t segment) const { inline BitMap::idx_t ZLiveMap::next_live_segment(BitMap::idx_t segment) const {
return segment_live_bits().find_first_set_bit(segment + 1, nsegments); return segment_live_bits().find_first_set_bit(segment + 1, NumSegments);
} }
inline BitMap::idx_t ZLiveMap::segment_size() const { inline BitMap::idx_t ZLiveMap::segment_size() const {
return _bitmap.size() / nsegments; return _bitmap.size() / NumSegments;
} }
inline BitMap::idx_t ZLiveMap::index_to_segment(BitMap::idx_t index) const { inline BitMap::idx_t ZLiveMap::index_to_segment(BitMap::idx_t index) const {
@ -167,7 +167,7 @@ inline void ZLiveMap::iterate(ZGenerationId id, Function function) {
return true; return true;
}; };
for (BitMap::idx_t segment = first_live_segment(); segment < nsegments; segment = next_live_segment(segment)) { for (BitMap::idx_t segment = first_live_segment(); segment < NumSegments; segment = next_live_segment(segment)) {
// For each live segment // For each live segment
iterate_segment(segment, live_only); iterate_segment(segment, live_only);
} }

View File

@ -36,8 +36,8 @@ class ZReferenceProcessor : public ReferenceDiscoverer {
friend class ZReferenceProcessorTask; friend class ZReferenceProcessorTask;
private: private:
static const size_t reference_type_count = REF_PHANTOM + 1; static const size_t ReferenceTypeCount = REF_PHANTOM + 1;
typedef size_t Counters[reference_type_count]; typedef size_t Counters[ReferenceTypeCount];
ZWorkers* const _workers; ZWorkers* const _workers;
ReferencePolicy* _soft_reference_policy; ReferencePolicy* _soft_reference_policy;

View File

@ -130,7 +130,7 @@ void ZStackWatermark::save_old_watermark() {
} else { } else {
// Found none too replace - push it to the top // Found none too replace - push it to the top
_old_watermarks_newest++; _old_watermarks_newest++;
assert(_old_watermarks_newest < _old_watermarks_max, "Unexpected amount of old watermarks"); assert(_old_watermarks_newest < OldWatermarksMax, "Unexpected amount of old watermarks");
} }
// Install old watermark // Install old watermark

View File

@ -59,8 +59,8 @@ class ZStackWatermark : public StackWatermark {
private: private:
// Stores old watermarks, which describes the // Stores old watermarks, which describes the
// colors of the non-processed part of the stack. // colors of the non-processed part of the stack.
const static int _old_watermarks_max = 3; static const int OldWatermarksMax = 3;
ZColorWatermark _old_watermarks[_old_watermarks_max]; ZColorWatermark _old_watermarks[OldWatermarksMax];
int _old_watermarks_newest; int _old_watermarks_newest;
ThreadLocalAllocStats _stats; ThreadLocalAllocStats _stats;

View File

@ -1019,7 +1019,7 @@ ZStatMutatorAllocRateStats ZStatMutatorAllocRate::stats() {
// Stat thread // Stat thread
// //
ZStat::ZStat() ZStat::ZStat()
: _metronome(sample_hz) { : _metronome(SampleHz) {
set_name("ZStat"); set_name("ZStat");
create_and_start(); create_and_start();
ZStatMutatorAllocRate::initialize(); ZStatMutatorAllocRate::initialize();
@ -1098,11 +1098,11 @@ void ZStat::terminate() {
// //
class ZStatTablePrinter { class ZStatTablePrinter {
private: private:
static const size_t _buffer_size = 256; static const size_t BufferSize = 256;
const size_t _column0_width; const size_t _column0_width;
const size_t _columnN_width; const size_t _columnN_width;
char _buffer[_buffer_size]; char _buffer[BufferSize];
public: public:
class ZColumn { class ZColumn {
@ -1119,7 +1119,7 @@ public:
} }
size_t print(size_t position, const char* fmt, va_list va) { size_t print(size_t position, const char* fmt, va_list va) {
const int res = jio_vsnprintf(_buffer + position, _buffer_size - position, fmt, va); const int res = jio_vsnprintf(_buffer + position, BufferSize - position, fmt, va);
if (res < 0) { if (res < 0) {
return 0; return 0;
} }

View File

@ -384,7 +384,7 @@ public:
// //
class ZStat : public ZThread { class ZStat : public ZThread {
private: private:
static const uint64_t sample_hz = 1; static const uint64_t SampleHz = 1;
ZMetronome _metronome; ZMetronome _metronome;

View File

@ -55,7 +55,7 @@ ZStoreBarrierBuffer::ZStoreBarrierBuffer()
_last_installed_color(), _last_installed_color(),
_base_pointer_lock(), _base_pointer_lock(),
_base_pointers(), _base_pointers(),
_current(ZBufferStoreBarriers ? _buffer_size_bytes : 0) {} _current(ZBufferStoreBarriers ? BufferSizeBytes : 0) {}
void ZStoreBarrierBuffer::initialize() { void ZStoreBarrierBuffer::initialize() {
_last_processed_color = ZPointerStoreGoodMask; _last_processed_color = ZPointerStoreGoodMask;
@ -63,11 +63,11 @@ void ZStoreBarrierBuffer::initialize() {
} }
void ZStoreBarrierBuffer::clear() { void ZStoreBarrierBuffer::clear() {
_current = _buffer_size_bytes; _current = BufferSizeBytes;
} }
bool ZStoreBarrierBuffer::is_empty() const { bool ZStoreBarrierBuffer::is_empty() const {
return _current == _buffer_size_bytes; return _current == BufferSizeBytes;
} }
void ZStoreBarrierBuffer::install_base_pointers_inner() { void ZStoreBarrierBuffer::install_base_pointers_inner() {
@ -79,7 +79,7 @@ void ZStoreBarrierBuffer::install_base_pointers_inner() {
(ZPointer::remap_bits(_last_processed_color) & ZPointerRemappedOldMask) == 0, (ZPointer::remap_bits(_last_processed_color) & ZPointerRemappedOldMask) == 0,
"Should not have double bit errors"); "Should not have double bit errors");
for (size_t i = current(); i < _buffer_length; ++i) { for (size_t i = current(); i < BufferLength; ++i) {
const ZStoreBarrierEntry& entry = _buffer[i]; const ZStoreBarrierEntry& entry = _buffer[i];
volatile zpointer* const p = entry._p; volatile zpointer* const p = entry._p;
const zaddress_unsafe p_unsafe = to_zaddress_unsafe((uintptr_t)p); const zaddress_unsafe p_unsafe = to_zaddress_unsafe((uintptr_t)p);
@ -229,7 +229,7 @@ void ZStoreBarrierBuffer::on_new_phase() {
// Install all base pointers for relocation // Install all base pointers for relocation
install_base_pointers(); install_base_pointers();
for (size_t i = current(); i < _buffer_length; ++i) { for (size_t i = current(); i < BufferLength; ++i) {
on_new_phase_relocate(i); on_new_phase_relocate(i);
on_new_phase_remember(i); on_new_phase_remember(i);
on_new_phase_mark(i); on_new_phase_mark(i);
@ -259,7 +259,7 @@ void ZStoreBarrierBuffer::on_error(outputStream* st) {
st->print_cr(" _last_processed_color: " PTR_FORMAT, _last_processed_color); st->print_cr(" _last_processed_color: " PTR_FORMAT, _last_processed_color);
st->print_cr(" _last_installed_color: " PTR_FORMAT, _last_installed_color); st->print_cr(" _last_installed_color: " PTR_FORMAT, _last_installed_color);
for (size_t i = current(); i < _buffer_length; ++i) { for (size_t i = current(); i < BufferLength; ++i) {
st->print_cr(" [%2zu]: base: " PTR_FORMAT " p: " PTR_FORMAT " prev: " PTR_FORMAT, st->print_cr(" [%2zu]: base: " PTR_FORMAT " p: " PTR_FORMAT " prev: " PTR_FORMAT,
i, i,
untype(_base_pointers[i]), untype(_base_pointers[i]),
@ -276,7 +276,7 @@ void ZStoreBarrierBuffer::flush() {
OnError on_error(this); OnError on_error(this);
VMErrorCallbackMark mark(&on_error); VMErrorCallbackMark mark(&on_error);
for (size_t i = current(); i < _buffer_length; ++i) { for (size_t i = current(); i < BufferLength; ++i) {
const ZStoreBarrierEntry& entry = _buffer[i]; const ZStoreBarrierEntry& entry = _buffer[i];
const zaddress addr = ZBarrier::make_load_good(entry._prev); const zaddress addr = ZBarrier::make_load_good(entry._prev);
ZBarrier::mark_and_remember(entry._p, addr); ZBarrier::mark_and_remember(entry._p, addr);
@ -296,7 +296,7 @@ bool ZStoreBarrierBuffer::is_in(volatile zpointer* p) {
const uintptr_t last_remap_bits = ZPointer::remap_bits(buffer->_last_processed_color) & ZPointerRemappedMask; const uintptr_t last_remap_bits = ZPointer::remap_bits(buffer->_last_processed_color) & ZPointerRemappedMask;
const bool needs_remap = last_remap_bits != ZPointerRemapped; const bool needs_remap = last_remap_bits != ZPointerRemapped;
for (size_t i = buffer->current(); i < _buffer_length; ++i) { for (size_t i = buffer->current(); i < BufferLength; ++i) {
const ZStoreBarrierEntry& entry = buffer->_buffer[i]; const ZStoreBarrierEntry& entry = buffer->_buffer[i];
volatile zpointer* entry_p = entry._p; volatile zpointer* entry_p = entry._p;

View File

@ -42,10 +42,10 @@ class ZStoreBarrierBuffer : public CHeapObj<mtGC> {
friend class ZVerify; friend class ZVerify;
private: private:
static const size_t _buffer_length = 32; static const size_t BufferLength = 32;
static const size_t _buffer_size_bytes = _buffer_length * sizeof(ZStoreBarrierEntry); static const size_t BufferSizeBytes = BufferLength * sizeof(ZStoreBarrierEntry);
ZStoreBarrierEntry _buffer[_buffer_length]; ZStoreBarrierEntry _buffer[BufferLength];
// Color from previous phase this buffer was processed // Color from previous phase this buffer was processed
uintptr_t _last_processed_color; uintptr_t _last_processed_color;
@ -54,7 +54,7 @@ private:
uintptr_t _last_installed_color; uintptr_t _last_installed_color;
ZLock _base_pointer_lock; ZLock _base_pointer_lock;
zaddress_unsafe _base_pointers[_buffer_length]; zaddress_unsafe _base_pointers[BufferLength];
// sizeof(ZStoreBarrierEntry) scaled index growing downwards // sizeof(ZStoreBarrierEntry) scaled index growing downwards
size_t _current; size_t _current;

View File

@ -39,7 +39,7 @@ private:
static uintptr_t _end; static uintptr_t _end;
public: public:
static const size_t offset = 4 * K; static const size_t Offset = 4 * K;
static uintptr_t alloc(size_t size); static uintptr_t alloc(size_t size);
}; };

View File

@ -44,7 +44,7 @@ template <typename T> uintptr_t ZValueStorage<T>::_top = 0;
template <typename S> template <typename S>
uintptr_t ZValueStorage<S>::alloc(size_t size) { uintptr_t ZValueStorage<S>::alloc(size_t size) {
assert(size <= offset, "Allocation too large"); assert(size <= Offset, "Allocation too large");
// Allocate entry in existing memory block // Allocate entry in existing memory block
const uintptr_t addr = align_up(_top, S::alignment()); const uintptr_t addr = align_up(_top, S::alignment());
@ -56,10 +56,10 @@ uintptr_t ZValueStorage<S>::alloc(size_t size) {
} }
// Allocate new block of memory // Allocate new block of memory
const size_t block_alignment = offset; const size_t block_alignment = Offset;
const size_t block_size = offset * S::count(); const size_t block_size = Offset * S::count();
_top = ZUtils::alloc_aligned_unfreeable(block_alignment, block_size); _top = ZUtils::alloc_aligned_unfreeable(block_alignment, block_size);
_end = _top + offset; _end = _top + Offset;
// Retry allocation // Retry allocation
return alloc(size); return alloc(size);
@ -119,7 +119,7 @@ inline uint32_t ZPerWorkerStorage::id() {
template <typename S, typename T> template <typename S, typename T>
inline uintptr_t ZValue<S, T>::value_addr(uint32_t value_id) const { inline uintptr_t ZValue<S, T>::value_addr(uint32_t value_id) const {
return _addr + (value_id * S::offset); return _addr + (value_id * S::Offset);
} }
template <typename S, typename T> template <typename S, typename T>

View File

@ -589,7 +589,7 @@ void ZVerify::on_color_flip() {
for (JavaThreadIteratorWithHandle jtiwh; JavaThread* const jt = jtiwh.next(); ) { for (JavaThreadIteratorWithHandle jtiwh; JavaThread* const jt = jtiwh.next(); ) {
const ZStoreBarrierBuffer* const buffer = ZThreadLocalData::store_barrier_buffer(jt); const ZStoreBarrierBuffer* const buffer = ZThreadLocalData::store_barrier_buffer(jt);
for (size_t i = buffer->current(); i < ZStoreBarrierBuffer::_buffer_length; ++i) { for (size_t i = buffer->current(); i < ZStoreBarrierBuffer::BufferLength; ++i) {
volatile zpointer* const p = buffer->_buffer[i]._p; volatile zpointer* const p = buffer->_buffer[i]._p;
bool created = false; bool created = false;
z_verify_store_barrier_buffer_table->put_if_absent(p, true, &created); z_verify_store_barrier_buffer_table->put_if_absent(p, true, &created);