8277372: Add getters for BOT and card table members

Reviewed-by: tschatzl, sjohanss, ayang
This commit is contained in:
Vishal Chand 2021-12-06 15:28:20 +00:00 committed by Thomas Schatzl
parent 7c6f57fcb1
commit adf39522c1
40 changed files with 205 additions and 176 deletions

@ -222,7 +222,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
const Register card_addr = tmp;
__ lsr(card_addr, store_addr, CardTable::card_shift);
__ lsr(card_addr, store_addr, CardTable::card_shift());
// get the address of the card
__ load_byte_map_base(tmp2);
@ -444,7 +444,7 @@ void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler*
assert_different_registers(card_offset, byte_map_base, rscratch1);
__ load_parameter(0, card_offset);
__ lsr(card_offset, card_offset, CardTable::card_shift);
__ lsr(card_offset, card_offset, CardTable::card_shift());
__ load_byte_map_base(byte_map_base);
__ ldrb(rscratch1, Address(byte_map_base, card_offset));
__ cmpw(rscratch1, (int)G1CardTable::g1_young_card_val());

@ -38,7 +38,7 @@ void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register ob
BarrierSet* bs = BarrierSet::barrier_set();
assert(bs->kind() == BarrierSet::CardTableBarrierSet, "Wrong barrier set kind");
__ lsr(obj, obj, CardTable::card_shift);
__ lsr(obj, obj, CardTable::card_shift());
assert(CardTable::dirty_card_val() == 0, "must be");
@ -64,8 +64,8 @@ void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembl
__ lea(end, Address(start, count, Address::lsl(LogBytesPerHeapOop))); // end = start + count << LogBytesPerHeapOop
__ sub(end, end, BytesPerHeapOop); // last element address to make inclusive
__ lsr(start, start, CardTable::card_shift);
__ lsr(end, end, CardTable::card_shift);
__ lsr(start, start, CardTable::card_shift());
__ lsr(end, end, CardTable::card_shift());
__ sub(count, end, start); // number of bytes to copy
__ load_byte_map_base(scratch);

@ -377,7 +377,7 @@ void LIRGenerator::CardTableBarrierSet_post_barrier_helper(LIR_Opr addr, LIR_Con
// Use unsigned type T_BOOLEAN here rather than (signed) T_BYTE since signed load
// byte instruction does not support the addressing mode we need.
LIR_Address* card_addr = new LIR_Address(tmp, addr, (LIR_Address::Scale) -CardTable::card_shift, 0, T_BOOLEAN);
LIR_Address* card_addr = new LIR_Address(tmp, addr, (LIR_Address::Scale) -CardTable::card_shift(), 0, T_BOOLEAN);
if (UseCondCardMark) {
LIR_Opr cur_value = new_register(T_INT);
__ move(card_addr, cur_value);

@ -218,7 +218,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
const Register card_addr = tmp1;
__ mov_address(tmp2, (address)ct->byte_map_base());
__ add(card_addr, tmp2, AsmOperand(store_addr, lsr, CardTable::card_shift));
__ add(card_addr, tmp2, AsmOperand(store_addr, lsr, CardTable::card_shift()));
__ ldrb(tmp2, Address(card_addr));
__ cmp(tmp2, (int)G1CardTable::g1_young_card_val());
@ -452,7 +452,7 @@ void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler*
// explicitly specify that 'cardtable' has a relocInfo::none
// type.
__ lea(r_card_base_1, cardtable);
__ add(r_card_addr_0, r_card_base_1, AsmOperand(r_obj_0, lsr, CardTable::card_shift));
__ add(r_card_addr_0, r_card_base_1, AsmOperand(r_obj_0, lsr, CardTable::card_shift()));
// first quick check without barrier
__ ldrb(r_tmp2, Address(r_card_addr_0));

@ -55,8 +55,8 @@ void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembl
__ add_ptr_scaled_int32(count, addr, count, LogBytesPerHeapOop);
__ sub(count, count, BytesPerHeapOop); // last addr
__ logical_shift_right(addr, addr, CardTable::card_shift);
__ logical_shift_right(count, count, CardTable::card_shift);
__ logical_shift_right(addr, addr, CardTable::card_shift());
__ logical_shift_right(count, count, CardTable::card_shift());
__ sub(count, count, addr); // nb of cards
// warning: Rthread has not been preserved
@ -129,7 +129,7 @@ void CardTableBarrierSetAssembler::store_check_part2(MacroAssembler* masm, Regis
"Wrong barrier set kind");
assert(CardTable::dirty_card_val() == 0, "Dirty card value must be 0 due to optimizations.");
Address card_table_addr(card_table_base, obj, lsr, CardTable::card_shift);
Address card_table_addr(card_table_base, obj, lsr, CardTable::card_shift());
if (UseCondCardMark) {
Label already_dirty;

@ -245,7 +245,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, Decorato
Register Rbase = tmp2;
__ load_const_optimized(Rbase, (address)(ct->card_table()->byte_map_base()), /*temp*/ tmp3);
__ srdi(Rcard_addr, store_addr, CardTable::card_shift);
__ srdi(Rcard_addr, store_addr, CardTable::card_shift());
// Get the address of the card.
__ lbzx(/*card value*/ tmp3, Rbase, Rcard_addr);
@ -516,7 +516,7 @@ void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler*
__ std(addr, -8, R1_SP);
__ std(tmp2, -16, R1_SP);
__ srdi(addr, R0, CardTable::card_shift); // Addr is passed in R0.
__ srdi(addr, R0, CardTable::card_shift()); // Addr is passed in R0.
__ load_const_optimized(/*cardtable*/ tmp2, byte_map_base, tmp);
__ add(addr, tmp2, addr);
__ lbz(tmp, 0, addr); // tmp := [addr + cardtable]

@ -54,8 +54,8 @@ void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembl
__ addi(count, count, -BytesPerHeapOop);
__ add(count, addr, count);
// Use two shifts to clear out those low order two bits! (Cannot opt. into 1.)
__ srdi(addr, addr, CardTable::card_shift);
__ srdi(count, count, CardTable::card_shift);
__ srdi(addr, addr, CardTable::card_shift());
__ srdi(count, count, CardTable::card_shift());
__ subf(count, addr, count);
__ add_const_optimized(addr, addr, (address)ct->byte_map_base(), R0);
__ addi(count, count, 1);
@ -74,7 +74,7 @@ void CardTableBarrierSetAssembler::card_table_write(MacroAssembler* masm,
Register tmp, Register obj) {
assert_different_registers(obj, tmp, R0);
__ load_const_optimized(tmp, (address)byte_map_base, R0);
__ srdi(obj, obj, CardTable::card_shift);
__ srdi(obj, obj, CardTable::card_shift());
__ li(R0, CardTable::dirty_card_val());
__ stbx(R0, tmp, obj);
}

@ -305,7 +305,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, Decorato
// calculate address of card
__ load_const_optimized(Rbase, (address)ct->card_table()->byte_map_base()); // Card table base.
__ z_srlg(Rcard_addr, Rstore_addr, CardTable::card_shift); // Index into card table.
__ z_srlg(Rcard_addr, Rstore_addr, CardTable::card_shift()); // Index into card table.
__ z_algr(Rcard_addr, Rbase); // Explicit calculation needed for cli.
Rbase = noreg; // end of lifetime
@ -548,7 +548,7 @@ void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler*
// Calculate address of card corresponding to the updated oop slot.
AddressLiteral rs(byte_map_base);
__ z_srlg(addr_card, addr_oop, CardTable::card_shift);
__ z_srlg(addr_card, addr_oop, CardTable::card_shift());
addr_oop = noreg; // dead now
__ load_const_optimized(cardtable, rs); // cardtable := <card table base>
__ z_agr(addr_card, cardtable); // addr_card := addr_oop>>card_shift + cardtable

@ -70,8 +70,8 @@ void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembl
__ load_const_optimized(Z_R1, (address)ct->byte_map_base());
// count = (count>>shift) - (addr>>shift)
__ z_srlg(addr, addr, CardTable::card_shift);
__ z_srlg(count, count, CardTable::card_shift);
__ z_srlg(addr, addr, CardTable::card_shift());
__ z_srlg(count, count, CardTable::card_shift());
// Prefetch first elements of card table for update.
if (VM_Version::has_Prefetch()) {
@ -146,7 +146,7 @@ void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register st
assert_different_registers(store_addr, tmp);
__ z_srlg(store_addr, store_addr, CardTable::card_shift);
__ z_srlg(store_addr, store_addr, CardTable::card_shift());
__ load_absolute_address(tmp, (address)ct->byte_map_base());
__ z_agr(store_addr, tmp);
__ z_mvi(0, store_addr, CardTable::dirty_card_val());

@ -298,7 +298,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
const Register cardtable = tmp2;
__ movptr(card_addr, store_addr);
__ shrptr(card_addr, CardTable::card_shift);
__ shrptr(card_addr, CardTable::card_shift());
// Do not use ExternalAddress to load 'byte_map_base', since 'byte_map_base' is NOT
// a valid address and therefore is not properly handled by the relocation code.
__ movptr(cardtable, (intptr_t)ct->card_table()->byte_map_base());
@ -540,7 +540,7 @@ void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler*
const Register card_addr = rcx;
__ load_parameter(0, card_addr);
__ shrptr(card_addr, CardTable::card_shift);
__ shrptr(card_addr, CardTable::card_shift());
// Do not use ExternalAddress to load 'byte_map_base', since 'byte_map_base' is NOT
// a valid address and therefore is not properly handled by the relocation code.
__ movptr(cardtable, (intptr_t)ct->card_table()->byte_map_base());

@ -60,8 +60,8 @@ void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembl
#ifdef _LP64
__ leaq(end, Address(addr, count, TIMES_OOP, 0)); // end == addr+count*oop_size
__ subptr(end, BytesPerHeapOop); // end - 1 to make inclusive
__ shrptr(addr, CardTable::card_shift);
__ shrptr(end, CardTable::card_shift);
__ shrptr(addr, CardTable::card_shift());
__ shrptr(end, CardTable::card_shift());
__ subptr(end, addr); // end --> cards count
__ mov64(tmp, disp);
@ -72,8 +72,8 @@ __ BIND(L_loop);
__ jcc(Assembler::greaterEqual, L_loop);
#else
__ lea(end, Address(addr, count, Address::times_ptr, -wordSize));
__ shrptr(addr, CardTable::card_shift);
__ shrptr(end, CardTable::card_shift);
__ shrptr(addr, CardTable::card_shift());
__ shrptr(end, CardTable::card_shift());
__ subptr(end, addr); // end --> count
__ BIND(L_loop);
Address cardtable(addr, count, Address::times_1, disp);
@ -93,7 +93,7 @@ void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register ob
CardTableBarrierSet* ctbs = barrier_set_cast<CardTableBarrierSet>(bs);
CardTable* ct = ctbs->card_table();
__ shrptr(obj, CardTable::card_shift);
__ shrptr(obj, CardTable::card_shift());
Address card_addr;

@ -442,7 +442,7 @@ void G1BarrierSetC2::post_barrier(GraphKit* kit,
Node* cast = __ CastPX(__ ctrl(), adr);
// Divide pointer by card size
Node* card_offset = __ URShiftX( cast, __ ConI(CardTable::card_shift) );
Node* card_offset = __ URShiftX( cast, __ ConI(CardTable::card_shift()) );
// Combine card table base and card offset
Node* card_adr = __ AddP(no_base, byte_map_base_node(kit), card_offset );

@ -371,7 +371,7 @@ HeapRegion* OldGCAllocRegion::release() {
// Determine how far we are from the next card boundary. If it is smaller than
// the minimum object size we can allocate into, expand into the next card.
HeapWord* top = cur->top();
HeapWord* aligned_top = align_up(top, BOTConstants::N_bytes);
HeapWord* aligned_top = align_up(top, BOTConstants::card_size());
size_t to_allocate_words = pointer_delta(aligned_top, top, HeapWordSize);

@ -135,7 +135,7 @@ void G1Arguments::initialize_card_set_configuration() {
uint region_size_log_mb = (uint)MAX2(HeapRegion::LogOfHRGrainBytes - LOG_M, 0);
if (FLAG_IS_DEFAULT(G1RemSetArrayOfCardsEntries)) {
uint max_cards_in_inline_ptr = G1CardSetConfiguration::max_cards_in_inline_ptr(HeapRegion::LogOfHRGrainBytes - CardTable::card_shift);
uint max_cards_in_inline_ptr = G1CardSetConfiguration::max_cards_in_inline_ptr(HeapRegion::LogOfHRGrainBytes - CardTable::card_shift());
FLAG_SET_ERGO(G1RemSetArrayOfCardsEntries, MAX2(max_cards_in_inline_ptr * 2,
G1RemSetArrayOfCardsEntriesBase * (1u << (region_size_log_mb + 1))));
}

@ -52,14 +52,14 @@ G1BlockOffsetTable::G1BlockOffsetTable(MemRegion heap, G1RegionToSpaceMapper* st
bool G1BlockOffsetTable::is_card_boundary(HeapWord* p) const {
assert(p >= _reserved.start(), "just checking");
size_t delta = pointer_delta(p, _reserved.start());
return (delta & right_n_bits((int)BOTConstants::LogN_words)) == (size_t)NoBits;
return (delta & right_n_bits((int)BOTConstants::log_card_size_in_words())) == (size_t)NoBits;
}
#ifdef ASSERT
void G1BlockOffsetTable::check_index(size_t index, const char* msg) const {
assert((index) < (_reserved.word_size() >> BOTConstants::LogN_words),
assert((index) < (_reserved.word_size() >> BOTConstants::log_card_size_in_words()),
"%s - index: " SIZE_FORMAT ", _vs.committed_size: " SIZE_FORMAT,
msg, (index), (_reserved.word_size() >> BOTConstants::LogN_words));
msg, (index), (_reserved.word_size() >> BOTConstants::log_card_size_in_words()));
assert(G1CollectedHeap::heap()->is_in(address_for_index_raw(index)),
"Index " SIZE_FORMAT " corresponding to " PTR_FORMAT
" (%u) is not in committed area.",
@ -134,7 +134,7 @@ void G1BlockOffsetTablePart:: set_remainder_to_point_to_start(HeapWord* start, H
size_t start_card = _bot->index_for(start);
size_t end_card = _bot->index_for(end-1);
assert(start ==_bot->address_for_index(start_card), "Precondition");
assert(end ==_bot->address_for_index(end_card)+BOTConstants::N_words, "Precondition");
assert(end ==_bot->address_for_index(end_card)+BOTConstants::card_size_in_words(), "Precondition");
set_remainder_to_point_to_start_incl(start_card, end_card); // closed interval
}
@ -144,7 +144,7 @@ void G1BlockOffsetTablePart:: set_remainder_to_point_to_start(HeapWord* start, H
void G1BlockOffsetTablePart::set_remainder_to_point_to_start_incl(size_t start_card, size_t end_card) {
assert(start_card <= end_card, "precondition");
assert(start_card > _bot->index_for(_hr->bottom()), "Cannot be first card");
assert(_bot->offset_array(start_card-1) <= BOTConstants::N_words,
assert(_bot->offset_array(start_card-1) <= BOTConstants::card_size_in_words(),
"Offset card has an unexpected value");
size_t start_card_for_region = start_card;
u_char offset = max_jubyte;
@ -153,7 +153,7 @@ void G1BlockOffsetTablePart::set_remainder_to_point_to_start_incl(size_t start_c
// so that the reach ends in this region and not at the start
// of the next.
size_t reach = start_card - 1 + (BOTConstants::power_to_cards_back(i+1) - 1);
offset = BOTConstants::N_words + i;
offset = BOTConstants::card_size_in_words() + i;
if (reach >= end_card) {
_bot->set_offset_array(start_card_for_region, end_card, offset);
start_card_for_region = reach + 1;
@ -174,16 +174,16 @@ void G1BlockOffsetTablePart::check_all_cards(size_t start_card, size_t end_card)
if (end_card < start_card) {
return;
}
guarantee(_bot->offset_array(start_card) == BOTConstants::N_words, "Wrong value in second card");
guarantee(_bot->offset_array(start_card) == BOTConstants::card_size_in_words(), "Wrong value in second card");
for (size_t c = start_card + 1; c <= end_card; c++ /* yeah! */) {
u_char entry = _bot->offset_array(c);
if (c - start_card > BOTConstants::power_to_cards_back(1)) {
guarantee(entry > BOTConstants::N_words,
guarantee(entry > BOTConstants::card_size_in_words(),
"Should be in logarithmic region - "
"entry: %u, "
"_array->offset_array(c): %u, "
"N_words: %u",
(uint)entry, (uint)_bot->offset_array(c), BOTConstants::N_words);
(uint)entry, (uint)_bot->offset_array(c), BOTConstants::card_size_in_words());
}
size_t backskip = BOTConstants::entry_to_cards_back(entry);
size_t landing_card = c - backskip;
@ -196,10 +196,10 @@ void G1BlockOffsetTablePart::check_all_cards(size_t start_card, size_t end_card)
} else {
guarantee(landing_card == start_card - 1, "Tautology");
// Note that N_words is the maximum offset value
guarantee(_bot->offset_array(landing_card) <= BOTConstants::N_words,
guarantee(_bot->offset_array(landing_card) <= BOTConstants::card_size_in_words(),
"landing card offset: %u, "
"N_words: %u",
(uint)_bot->offset_array(landing_card), (uint)BOTConstants::N_words);
(uint)_bot->offset_array(landing_card), (uint)BOTConstants::card_size_in_words());
}
}
}
@ -224,13 +224,13 @@ void G1BlockOffsetTablePart::alloc_block_work(HeapWord** threshold_, HeapWord* b
"phantom block");
assert(blk_end > threshold, "should be past threshold");
assert(blk_start <= threshold, "blk_start should be at or before threshold");
assert(pointer_delta(threshold, blk_start) <= BOTConstants::N_words,
assert(pointer_delta(threshold, blk_start) <= BOTConstants::card_size_in_words(),
"offset should be <= BlockOffsetSharedArray::N");
assert(G1CollectedHeap::heap()->is_in_reserved(blk_start),
"reference must be into the heap");
assert(G1CollectedHeap::heap()->is_in_reserved(blk_end-1),
"limit must be within the heap");
assert(threshold == _bot->_reserved.start() + index*BOTConstants::N_words,
assert(threshold == _bot->_reserved.start() + index*BOTConstants::card_size_in_words(),
"index must agree with threshold");
DEBUG_ONLY(size_t orig_index = index;)
@ -250,14 +250,14 @@ void G1BlockOffsetTablePart::alloc_block_work(HeapWord** threshold_, HeapWord* b
HeapWord* rem_st = _bot->address_for_index(index + 1);
// Calculate rem_end this way because end_index
// may be the last valid index in the covered region.
HeapWord* rem_end = _bot->address_for_index(end_index) + BOTConstants::N_words;
HeapWord* rem_end = _bot->address_for_index(end_index) + BOTConstants::card_size_in_words();
set_remainder_to_point_to_start(rem_st, rem_end);
}
index = end_index + 1;
// Calculate threshold_ this way because end_index
// may be the last valid index in the covered region.
threshold = _bot->address_for_index(end_index) + BOTConstants::N_words;
threshold = _bot->address_for_index(end_index) + BOTConstants::card_size_in_words();
assert(threshold >= blk_end, "Incorrect offset threshold");
*threshold_ = threshold;
@ -268,7 +268,7 @@ void G1BlockOffsetTablePart::alloc_block_work(HeapWord** threshold_, HeapWord* b
size_t start_index = _bot->index_for(blk_start);
HeapWord* boundary = _bot->address_for_index(start_index);
assert((_bot->offset_array(orig_index) == 0 && blk_start == boundary) ||
(_bot->offset_array(orig_index) > 0 && _bot->offset_array(orig_index) <= BOTConstants::N_words),
(_bot->offset_array(orig_index) > 0 && _bot->offset_array(orig_index) <= BOTConstants::card_size_in_words()),
"offset array should have been set - "
"orig_index offset: %u, "
"blk_start: " PTR_FORMAT ", "
@ -278,12 +278,12 @@ void G1BlockOffsetTablePart::alloc_block_work(HeapWord** threshold_, HeapWord* b
for (size_t j = orig_index + 1; j <= end_index; j++) {
assert(_bot->offset_array(j) > 0 &&
_bot->offset_array(j) <=
(u_char) (BOTConstants::N_words+BOTConstants::N_powers-1),
(u_char) (BOTConstants::card_size_in_words()+BOTConstants::N_powers-1),
"offset array should have been set - "
"%u not > 0 OR %u not <= %u",
(uint) _bot->offset_array(j),
(uint) _bot->offset_array(j),
(uint) (BOTConstants::N_words+BOTConstants::N_powers-1));
(uint) (BOTConstants::card_size_in_words() + BOTConstants::N_powers - 1));
}
#endif
}
@ -297,7 +297,7 @@ void G1BlockOffsetTablePart::verify() const {
for (size_t current_card = start_card; current_card < end_card; current_card++) {
u_char entry = _bot->offset_array(current_card);
if (entry < BOTConstants::N_words) {
if (entry < BOTConstants::card_size_in_words()) {
// The entry should point to an object before the current card. Verify that
// it is possible to walk from that object in to the current card by just
// iterating over the objects following it.
@ -361,7 +361,7 @@ void G1BlockOffsetTablePart::zero_bottom_entry_raw() {
}
void G1BlockOffsetTablePart::initialize_threshold() {
_next_offset_threshold = _hr->bottom() + BOTConstants::N_words;
_next_offset_threshold = _hr->bottom() + BOTConstants::card_size_in_words();
}
void G1BlockOffsetTablePart::set_for_starts_humongous(HeapWord* obj_top, size_t fill_size) {

@ -55,9 +55,9 @@ private:
volatile u_char* _offset_array; // byte array keeping backwards offsets
void check_offset(size_t offset, const char* msg) const {
assert(offset <= BOTConstants::N_words,
assert(offset <= BOTConstants::card_size_in_words(),
"%s - offset: " SIZE_FORMAT ", N_words: %u",
msg, offset, BOTConstants::N_words);
msg, offset, BOTConstants::card_size_in_words());
}
// Bounds checking accessors:
@ -80,13 +80,13 @@ public:
// Return the number of slots needed for an offset array
// that covers mem_region_words words.
static size_t compute_size(size_t mem_region_words) {
size_t number_of_slots = (mem_region_words / BOTConstants::N_words);
size_t number_of_slots = (mem_region_words / BOTConstants::card_size_in_words());
return ReservedSpace::allocation_align_size_up(number_of_slots);
}
// Returns how many bytes of the heap a single byte of the BOT corresponds to.
static size_t heap_map_factor() {
return BOTConstants::N_bytes;
return BOTConstants::card_size();
}
// Initialize the Block Offset Table to cover the memory region passed
@ -102,7 +102,7 @@ public:
inline HeapWord* address_for_index(size_t index) const;
// Variant of address_for_index that does not check the index for validity.
inline HeapWord* address_for_index_raw(size_t index) const {
return _reserved.start() + (index << BOTConstants::LogN_words);
return _reserved.start() + (index << BOTConstants::log_card_size_in_words());
}
};

@ -41,7 +41,7 @@ inline HeapWord* G1BlockOffsetTablePart::threshold_for_addr(const void* addr) {
}
// Calculate next threshold.
HeapWord* threshold = card_boundary + BOTConstants::N_words;
HeapWord* threshold = card_boundary + BOTConstants::card_size_in_words();
return threshold;
}
@ -84,7 +84,7 @@ void G1BlockOffsetTable::set_offset_array(size_t left, size_t right, u_char offs
// Variant of index_for that does not check the index for validity.
inline size_t G1BlockOffsetTable::index_for_raw(const void* p) const {
return pointer_delta((char*)p, _reserved.start(), sizeof(char)) >> BOTConstants::LogN;
return pointer_delta((char*)p, _reserved.start(), sizeof(char)) >> BOTConstants::log_card_size();
}
inline size_t G1BlockOffsetTable::index_for(const void* p) const {
@ -120,14 +120,14 @@ inline HeapWord* G1BlockOffsetTablePart::block_at_or_preceding(const void* addr)
size_t index = _bot->index_for(addr);
uint offset = _bot->offset_array(index); // Extend u_char to uint.
while (offset >= BOTConstants::N_words) {
while (offset >= BOTConstants::card_size_in_words()) {
// The excess of the offset from N_words indicates a power of Base
// to go back by.
size_t n_cards_back = BOTConstants::entry_to_cards_back(offset);
index -= n_cards_back;
offset = _bot->offset_array(index);
}
assert(offset < BOTConstants::N_words, "offset too large");
assert(offset < BOTConstants::card_size_in_words(), "offset too large");
HeapWord* q = _bot->address_for_index(index);
return q - offset;

@ -126,7 +126,7 @@ void G1CardCounts::clear_range(MemRegion mr) {
HeapWord* start_addr = _ct->addr_for(from_card_ptr);
assert(start_addr == mr.start(), "MemRegion start must be aligned to a card.");
HeapWord* last_addr = _ct->addr_for(last_card_ptr);
assert((last_addr + G1CardTable::card_size_in_words) == mr.end(), "MemRegion end must be aligned to a card.");
assert((last_addr + G1CardTable::card_size_in_words()) == mr.end(), "MemRegion end must be aligned to a card.");
#endif // ASSERT
// Clear the counts for the (exclusive) card range.

@ -62,7 +62,7 @@ void G1CardTable::initialize(G1RegionToSpaceMapper* mapper) {
_covered[0] = _whole_heap;
_byte_map = (CardValue*) mapper->reserved().start();
_byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
_byte_map_base = _byte_map - (uintptr_t(low_bound) >> _card_shift);
assert(byte_for(low_bound) == &_byte_map[0], "Checking start of map");
assert(byte_for(high_bound-1) <= &_byte_map[_last_valid_index], "Checking end of map");

@ -110,12 +110,12 @@ public:
inline uint region_idx_for(CardValue* p);
static size_t compute_size(size_t mem_region_size_in_words) {
size_t number_of_slots = (mem_region_size_in_words / card_size_in_words);
size_t number_of_slots = (mem_region_size_in_words / _card_size_in_words);
return ReservedSpace::allocation_align_size_up(number_of_slots);
}
// Returns how many bytes of the heap a single byte of the Card Table corresponds to.
static size_t heap_map_factor() { return card_size; }
static size_t heap_map_factor() { return _card_size; }
void initialize() {}
void initialize(G1RegionToSpaceMapper* mapper);

@ -31,7 +31,7 @@
inline uint G1CardTable::region_idx_for(CardValue* p) {
size_t const card_idx = pointer_delta(p, _byte_map, sizeof(CardValue));
return (uint)(card_idx >> (HeapRegion::LogOfHRGrainBytes - card_shift));
return (uint)(card_idx >> (HeapRegion::LogOfHRGrainBytes - _card_shift));
}
inline bool G1CardTable::mark_clean_as_dirty(CardValue* card) {

@ -1660,7 +1660,7 @@ jint G1CollectedHeap::initialize() {
// The G1FromCardCache reserves card with value 0 as "invalid", so the heap must not
// start within the first card.
guarantee((uintptr_t)(heap_rs.base()) >= G1CardTable::card_size, "Java heap must not start within the first card.");
guarantee((uintptr_t)(heap_rs.base()) >= G1CardTable::card_size(), "Java heap must not start within the first card.");
G1FromCardCache::initialize(max_reserved_regions());
// Also create a G1 rem set.
_rem_set = new G1RemSet(this, _card_table, _hot_card_cache);

@ -808,7 +808,7 @@ class G1ScanHRForRegionClosure : public HeapRegionClosure {
return;
}
HeapWord* scan_end = MIN2(card_start + (num_cards << BOTConstants::LogN_words), top);
HeapWord* scan_end = MIN2(card_start + (num_cards << BOTConstants::log_card_size_in_words()), top);
if (_scanned_to >= scan_end) {
return;
}
@ -1674,7 +1674,7 @@ void G1RemSet::refine_card_concurrently(CardValue* const card_ptr,
// Don't use addr_for(card_ptr + 1) which can ask for
// a card beyond the heap.
HeapWord* end = start + G1CardTable::card_size_in_words;
HeapWord* end = start + G1CardTable::card_size_in_words();
MemRegion dirty_region(start, MIN2(scan_limit, end));
assert(!dirty_region.is_empty(), "sanity");

@ -91,7 +91,7 @@ void HeapRegion::setup_heap_region_size(size_t max_heap_size) {
GrainWords = GrainBytes >> LogHeapWordSize;
guarantee(CardsPerRegion == 0, "we should only set it once");
CardsPerRegion = GrainBytes >> G1CardTable::card_shift;
CardsPerRegion = GrainBytes >> G1CardTable::card_shift();
LogCardsPerRegion = log2i(CardsPerRegion);

@ -60,7 +60,7 @@ void HeapRegionRemSet::initialize(MemRegion reserved) {
vm_exit_during_initialization("Can not represent all cards in a card region within uint.");
}
_split_card_shift = CardBitsWithinCardRegion + CardTable::card_shift;
_split_card_shift = CardBitsWithinCardRegion + CardTable::card_shift();
_split_card_mask = ((size_t)1 << _split_card_shift) - 1;
// Check if the card region/region within cards combination can cover the heap.

@ -118,7 +118,7 @@ inline void HeapRegionRemSet::iterate_for_merge(CardOrRangeVisitor& cl) {
void HeapRegionRemSet::split_card(OopOrNarrowOopStar from, uint& card_region, uint& card_within_region) const {
size_t offset = pointer_delta(from, _heap_base_address, 1);
card_region = (uint)(offset >> _split_card_shift);
card_within_region = (uint)((offset & _split_card_mask) >> CardTable::card_shift);
card_within_region = (uint)((offset & _split_card_mask) >> CardTable::card_shift());
assert(card_within_region < ((uint)1 << G1CardSetContainer::LogCardsPerRegionLimit), "must be");
}
@ -129,7 +129,7 @@ void HeapRegionRemSet::add_reference(OopOrNarrowOopStar from, uint tid) {
}
uint cur_idx = _hr->hrm_index();
uintptr_t from_card = uintptr_t(from) >> CardTable::card_shift;
uintptr_t from_card = uintptr_t(from) >> CardTable::card_shift();
if (G1FromCardCache::contains_or_replace(tid, cur_idx, from_card)) {
// We can't check whether the card is in the remembered set - the card container

@ -31,26 +31,26 @@
#include "services/memTracker.hpp"
#include "utilities/align.hpp"
uint ObjectStartArray::block_shift = 0;
uint ObjectStartArray::block_size = 0;
uint ObjectStartArray::block_size_in_words = 0;
uint ObjectStartArray::_card_shift = 0;
uint ObjectStartArray::_card_size = 0;
uint ObjectStartArray::_card_size_in_words = 0;
void ObjectStartArray::initialize_block_size(uint card_shift) {
block_shift = card_shift;
block_size = 1 << block_shift;
block_size_in_words = block_size / sizeof(HeapWord);
_card_shift = card_shift;
_card_size = 1 << _card_shift;
_card_size_in_words = _card_size / sizeof(HeapWord);
}
void ObjectStartArray::initialize(MemRegion reserved_region) {
// We're based on the assumption that we use the same
// size blocks as the card table.
assert((int)block_size == (int)CardTable::card_size, "Sanity");
assert(block_size <= MaxBlockSize, "block_size must be less than or equal to " UINT32_FORMAT, MaxBlockSize);
assert(_card_size == CardTable::card_size(), "Sanity");
assert(_card_size <= MaxBlockSize, "block_size must be less than or equal to " UINT32_FORMAT, MaxBlockSize);
// Calculate how much space must be reserved
_reserved_region = reserved_region;
size_t bytes_to_reserve = reserved_region.word_size() / block_size_in_words;
size_t bytes_to_reserve = reserved_region.word_size() / _card_size_in_words;
assert(bytes_to_reserve > 0, "Sanity");
bytes_to_reserve =
@ -76,7 +76,7 @@ void ObjectStartArray::initialize(MemRegion reserved_region) {
MemTracker::record_virtual_memory_type((address)_raw_base, mtGC);
_offset_base = _raw_base - (size_t(reserved_region.start()) >> block_shift);
_offset_base = _raw_base - (size_t(reserved_region.start()) >> _card_shift);
_covered_region.set_start(reserved_region.start());
_covered_region.set_word_size(0);
@ -91,10 +91,10 @@ void ObjectStartArray::set_covered_region(MemRegion mr) {
HeapWord* low_bound = mr.start();
HeapWord* high_bound = mr.end();
assert((uintptr_t(low_bound) & (block_size - 1)) == 0, "heap must start at block boundary");
assert((uintptr_t(high_bound) & (block_size - 1)) == 0, "heap must end at block boundary");
assert((uintptr_t(low_bound) & (_card_size - 1)) == 0, "heap must start at block boundary");
assert((uintptr_t(high_bound) & (_card_size - 1)) == 0, "heap must end at block boundary");
size_t requested_blocks_size_in_bytes = mr.word_size() / block_size_in_words;
size_t requested_blocks_size_in_bytes = mr.word_size() / _card_size_in_words;
// Only commit memory in page sized chunks
requested_blocks_size_in_bytes =

@ -46,16 +46,16 @@ class ObjectStartArray : public CHeapObj<mtGC> {
jbyte* _raw_base;
jbyte* _offset_base;
static uint _card_shift;
static uint _card_size;
static uint _card_size_in_words;
public:
enum BlockValueConstants {
clean_block = -1
};
static uint block_shift;
static uint block_size;
static uint block_size_in_words;
// Maximum size an offset table entry can cover. This maximum is derived from that
// we need an extra bit for possible offsets in the byte for backskip values, leaving 2^7 possible offsets.
// Minimum object alignment is 8 bytes (2^3), so we can at most represent 2^10 offsets within a BOT value.
@ -64,13 +64,24 @@ class ObjectStartArray : public CHeapObj<mtGC> {
// Initialize block size based on card size
static void initialize_block_size(uint card_shift);
static uint card_shift() {
return _card_shift;
}
static uint card_size() {
return _card_size;
}
static uint card_size_in_words() {
return _card_size_in_words;
}
protected:
// Mapping from address to object start array entry
jbyte* block_for_addr(void* p) const {
assert(_covered_region.contains(p),
"out of bounds access to object start array");
jbyte* result = &_offset_base[uintptr_t(p) >> block_shift];
jbyte* result = &_offset_base[uintptr_t(p) >> _card_shift];
assert(_blocks_region.contains(result),
"out of bounds result in byte_for");
return result;
@ -81,7 +92,7 @@ class ObjectStartArray : public CHeapObj<mtGC> {
assert(_blocks_region.contains(p),
"out of bounds access to object start array");
size_t delta = pointer_delta(p, _offset_base, sizeof(jbyte));
HeapWord* result = (HeapWord*) (delta << block_shift);
HeapWord* result = (HeapWord*) (delta << _card_shift);
assert(_covered_region.contains(result),
"out of bounds accessor from card marking array");
return result;
@ -104,7 +115,7 @@ class ObjectStartArray : public CHeapObj<mtGC> {
}
size_t delta = pointer_delta(p, _offset_base, sizeof(jbyte));
HeapWord* result = (HeapWord*) (delta << block_shift);
HeapWord* result = (HeapWord*) (delta << _card_shift);
result += *p;
assert(_covered_region.contains(result),

@ -152,7 +152,7 @@ size_t PSOldGen::num_iterable_blocks() const {
void PSOldGen::object_iterate_block(ObjectClosure* cl, size_t block_index) {
size_t block_word_size = IterateBlockSize / HeapWordSize;
assert((block_word_size % (ObjectStartArray::block_size)) == 0,
assert((block_word_size % (ObjectStartArray::card_size())) == 0,
"Block size not a multiple of start_array block");
MutableSpace *space = object_space();

@ -33,16 +33,16 @@
#include "runtime/java.hpp"
#include "services/memTracker.hpp"
uint BOTConstants::LogN = 0;
uint BOTConstants::LogN_words = 0;
uint BOTConstants::N_bytes = 0;
uint BOTConstants::N_words = 0;
uint BOTConstants::_log_card_size = 0;
uint BOTConstants::_log_card_size_in_words = 0;
uint BOTConstants::_card_size = 0;
uint BOTConstants::_card_size_in_words = 0;
void BOTConstants::initialize_bot_size(uint card_shift) {
LogN = card_shift;
LogN_words = LogN - LogHeapWordSize;
N_bytes = 1 << LogN;
N_words = 1 << LogN_words;
_log_card_size = card_shift;
_log_card_size_in_words = _log_card_size - LogHeapWordSize;
_card_size = 1 << _log_card_size;
_card_size_in_words = 1 << _log_card_size_in_words;
}
//////////////////////////////////////////////////////////////////////
@ -99,7 +99,7 @@ void BlockOffsetSharedArray::resize(size_t new_word_size) {
bool BlockOffsetSharedArray::is_card_boundary(HeapWord* p) const {
assert(p >= _reserved.start(), "just checking");
size_t delta = pointer_delta(p, _reserved.start());
return (delta & right_n_bits((int)BOTConstants::LogN_words)) == (size_t)NoBits;
return (delta & right_n_bits((int)BOTConstants::log_card_size_in_words())) == (size_t)NoBits;
}
@ -116,7 +116,7 @@ BlockOffsetArray::BlockOffsetArray(BlockOffsetSharedArray* array,
set_init_to_zero(init_to_zero_);
if (!init_to_zero_) {
// initialize cards to point back to mr.start()
set_remainder_to_point_to_start(mr.start() + BOTConstants::N_words, mr.end());
set_remainder_to_point_to_start(mr.start() + BOTConstants::card_size_in_words(), mr.end());
_array->set_offset_array(0, 0); // set first card to 0
}
}
@ -172,7 +172,7 @@ set_remainder_to_point_to_start(HeapWord* start, HeapWord* end, bool reducing) {
size_t start_card = _array->index_for(start);
size_t end_card = _array->index_for(end-1);
assert(start ==_array->address_for_index(start_card), "Precondition");
assert(end ==_array->address_for_index(end_card)+BOTConstants::N_words, "Precondition");
assert(end ==_array->address_for_index(end_card)+BOTConstants::card_size_in_words(), "Precondition");
set_remainder_to_point_to_start_incl(start_card, end_card, reducing); // closed interval
}
@ -188,7 +188,7 @@ BlockOffsetArray::set_remainder_to_point_to_start_incl(size_t start_card, size_t
return;
}
assert(start_card > _array->index_for(_bottom), "Cannot be first card");
assert(_array->offset_array(start_card-1) <= BOTConstants::N_words,
assert(_array->offset_array(start_card-1) <= BOTConstants::card_size_in_words(),
"Offset card has an unexpected value");
size_t start_card_for_region = start_card;
u_char offset = max_jubyte;
@ -197,7 +197,7 @@ BlockOffsetArray::set_remainder_to_point_to_start_incl(size_t start_card, size_t
// so that the reach ends in this region and not at the start
// of the next.
size_t reach = start_card - 1 + (BOTConstants::power_to_cards_back(i+1) - 1);
offset = BOTConstants::N_words + i;
offset = BOTConstants::card_size_in_words() + i;
if (reach >= end_card) {
_array->set_offset_array(start_card_for_region, end_card, offset, reducing);
start_card_for_region = reach + 1;
@ -218,13 +218,13 @@ void BlockOffsetArray::check_all_cards(size_t start_card, size_t end_card) const
if (end_card < start_card) {
return;
}
guarantee(_array->offset_array(start_card) == BOTConstants::N_words, "Wrong value in second card");
u_char last_entry = BOTConstants::N_words;
guarantee(_array->offset_array(start_card) == BOTConstants::card_size_in_words(), "Wrong value in second card");
u_char last_entry = BOTConstants::card_size_in_words();
for (size_t c = start_card + 1; c <= end_card; c++ /* yeah! */) {
u_char entry = _array->offset_array(c);
guarantee(entry >= last_entry, "Monotonicity");
if (c - start_card > BOTConstants::power_to_cards_back(1)) {
guarantee(entry > BOTConstants::N_words, "Should be in logarithmic region");
guarantee(entry > BOTConstants::card_size_in_words(), "Should be in logarithmic region");
}
size_t backskip = BOTConstants::entry_to_cards_back(entry);
size_t landing_card = c - backskip;
@ -234,7 +234,7 @@ void BlockOffsetArray::check_all_cards(size_t start_card, size_t end_card) const
} else {
guarantee(landing_card == (start_card - 1), "Tautology");
// Note that N_words is the maximum offset value
guarantee(_array->offset_array(landing_card) <= BOTConstants::N_words, "Offset value");
guarantee(_array->offset_array(landing_card) <= BOTConstants::card_size_in_words(), "Offset value");
}
last_entry = entry; // remember for monotonicity test
}
@ -266,7 +266,7 @@ BlockOffsetArray::do_block_internal(HeapWord* blk_start,
uintptr_t start_ui = (uintptr_t)blk_start;
// Calculate the last card boundary preceding end of blk
intptr_t boundary_before_end = (intptr_t)end_ui;
clear_bits(boundary_before_end, right_n_bits((int)BOTConstants::LogN));
clear_bits(boundary_before_end, right_n_bits((int)BOTConstants::log_card_size()));
if (start_ui <= (uintptr_t)boundary_before_end) {
// blk starts at or crosses a boundary
// Calculate index of card on which blk begins
@ -279,7 +279,7 @@ BlockOffsetArray::do_block_internal(HeapWord* blk_start,
if (blk_start != boundary) {
// blk starts strictly after boundary
// adjust card boundary and start_index forward to next card
boundary += BOTConstants::N_words;
boundary += BOTConstants::card_size_in_words();
start_index++;
}
assert(start_index <= end_index, "monotonicity of index_for()");
@ -296,8 +296,8 @@ BlockOffsetArray::do_block_internal(HeapWord* blk_start,
// We have finished marking the "offset card". We need to now
// mark the subsequent cards that this blk spans.
if (start_index < end_index) {
HeapWord* rem_st = _array->address_for_index(start_index) + BOTConstants::N_words;
HeapWord* rem_end = _array->address_for_index(end_index) + BOTConstants::N_words;
HeapWord* rem_st = _array->address_for_index(start_index) + BOTConstants::card_size_in_words();
HeapWord* rem_end = _array->address_for_index(end_index) + BOTConstants::card_size_in_words();
set_remainder_to_point_to_start(rem_st, rem_end, reducing);
}
break;
@ -380,22 +380,22 @@ HeapWord* BlockOffsetArrayContigSpace::block_start_unsafe(const void* addr) cons
HeapWord* q = _array->address_for_index(index);
uint offset = _array->offset_array(index); // Extend u_char to uint.
while (offset > BOTConstants::N_words) {
while (offset > BOTConstants::card_size_in_words()) {
// The excess of the offset from N_words indicates a power of Base
// to go back by.
size_t n_cards_back = BOTConstants::entry_to_cards_back(offset);
q -= (BOTConstants::N_words * n_cards_back);
q -= (BOTConstants::card_size_in_words() * n_cards_back);
assert(q >= _sp->bottom(), "Went below bottom!");
index -= n_cards_back;
offset = _array->offset_array(index);
}
while (offset == BOTConstants::N_words) {
while (offset == BOTConstants::card_size_in_words()) {
assert(q >= _sp->bottom(), "Went below bottom!");
q -= BOTConstants::N_words;
q -= BOTConstants::card_size_in_words();
index--;
offset = _array->offset_array(index);
}
assert(offset < BOTConstants::N_words, "offset too large");
assert(offset < BOTConstants::card_size_in_words(), "offset too large");
q -= offset;
HeapWord* n = q;
@ -428,14 +428,14 @@ void BlockOffsetArrayContigSpace::alloc_block_work(HeapWord* blk_start,
"should be past threshold");
assert(blk_start <= _next_offset_threshold,
"blk_start should be at or before threshold");
assert(pointer_delta(_next_offset_threshold, blk_start) <= BOTConstants::N_words,
assert(pointer_delta(_next_offset_threshold, blk_start) <= BOTConstants::card_size_in_words(),
"offset should be <= BlockOffsetSharedArray::N");
assert(_sp->is_in_reserved(blk_start),
"reference must be into the space");
assert(_sp->is_in_reserved(blk_end-1),
"limit must be within the space");
assert(_next_offset_threshold ==
_array->_reserved.start() + _next_offset_index*BOTConstants::N_words,
_array->_reserved.start() + _next_offset_index*BOTConstants::card_size_in_words(),
"index must agree with threshold");
debug_only(size_t orig_next_offset_index = _next_offset_index;)
@ -457,7 +457,7 @@ void BlockOffsetArrayContigSpace::alloc_block_work(HeapWord* blk_start,
HeapWord* rem_st = _array->address_for_index(_next_offset_index + 1);
// Calculate rem_end this way because end_index
// may be the last valid index in the covered region.
HeapWord* rem_end = _array->address_for_index(end_index) + BOTConstants::N_words;
HeapWord* rem_end = _array->address_for_index(end_index) + BOTConstants::card_size_in_words();
set_remainder_to_point_to_start(rem_st, rem_end);
}
@ -465,7 +465,7 @@ void BlockOffsetArrayContigSpace::alloc_block_work(HeapWord* blk_start,
_next_offset_index = end_index + 1;
// Calculate _next_offset_threshold this way because end_index
// may be the last valid index in the covered region.
_next_offset_threshold = _array->address_for_index(end_index) + BOTConstants::N_words;
_next_offset_threshold = _array->address_for_index(end_index) + BOTConstants::card_size_in_words();
assert(_next_offset_threshold >= blk_end, "Incorrect offset threshold");
#ifdef ASSERT
@ -476,11 +476,11 @@ void BlockOffsetArrayContigSpace::alloc_block_work(HeapWord* blk_start,
assert((_array->offset_array(orig_next_offset_index) == 0 &&
blk_start == boundary) ||
(_array->offset_array(orig_next_offset_index) > 0 &&
_array->offset_array(orig_next_offset_index) <= BOTConstants::N_words),
_array->offset_array(orig_next_offset_index) <= BOTConstants::card_size_in_words()),
"offset array should have been set");
for (size_t j = orig_next_offset_index + 1; j <= end_index; j++) {
assert(_array->offset_array(j) > 0 &&
_array->offset_array(j) <= (u_char) (BOTConstants::N_words+BOTConstants::N_powers-1),
_array->offset_array(j) <= (u_char) (BOTConstants::card_size_in_words()+BOTConstants::N_powers-1),
"offset array should have been set");
}
#endif

@ -49,12 +49,12 @@
class ContiguousSpace;
class BOTConstants : public AllStatic {
public:
static uint LogN;
static uint LogN_words;
static uint N_bytes;
static uint N_words;
static uint _log_card_size;
static uint _log_card_size_in_words;
static uint _card_size;
static uint _card_size_in_words;
public:
// entries "e" of at least N_words mean "go back by Base^(e-N_words)."
// All entries are less than "N_words + N_powers".
static const uint LogBase = 4;
@ -67,9 +67,22 @@ public:
static size_t power_to_cards_back(uint i) {
return (size_t)1 << (LogBase * i);
}
static size_t entry_to_cards_back(u_char entry) {
assert(entry >= N_words, "Precondition");
return power_to_cards_back(entry - N_words);
assert(entry >= _card_size_in_words, "Precondition");
return power_to_cards_back(entry - _card_size_in_words);
}
static uint log_card_size() {
return _log_card_size;
}
static uint log_card_size_in_words() {
return _log_card_size_in_words;
}
static uint card_size() {
return _card_size;
}
static uint card_size_in_words() {
return _card_size_in_words;
}
};
@ -91,7 +104,7 @@ public:
BlockOffsetTable(HeapWord* bottom, HeapWord* end):
_bottom(bottom), _end(end) {
assert(_bottom <= _end, "arguments out of order");
assert(BOTConstants::N_bytes == CardTable::card_size, "sanity");
assert(BOTConstants::card_size() == CardTable::card_size(), "sanity");
}
// Note that the committed size of the covered space may have changed,
@ -178,7 +191,7 @@ class BlockOffsetSharedArray: public CHeapObj<mtGC> {
check_reducing_assertion(reducing);
assert(index < _vs.committed_size(), "index out of range");
assert(high >= low, "addresses out of order");
assert(pointer_delta(high, low) <= BOTConstants::N_words, "offset too large");
assert(pointer_delta(high, low) <= BOTConstants::card_size_in_words(), "offset too large");
assert(!reducing || _offset_array[index] >= (u_char)pointer_delta(high, low),
"Not reducing");
_offset_array[index] = (u_char)pointer_delta(high, low);
@ -189,7 +202,7 @@ class BlockOffsetSharedArray: public CHeapObj<mtGC> {
assert(index_for(right - 1) < _vs.committed_size(),
"right address out of range");
assert(left < right, "Heap addresses out of order");
size_t num_cards = pointer_delta(right, left) >> BOTConstants::LogN_words;
size_t num_cards = pointer_delta(right, left) >> BOTConstants::log_card_size_in_words();
fill_range(index_for(left), num_cards, offset);
}
@ -206,7 +219,7 @@ class BlockOffsetSharedArray: public CHeapObj<mtGC> {
void check_offset_array(size_t index, HeapWord* high, HeapWord* low) const {
assert(index < _vs.committed_size(), "index out of range");
assert(high >= low, "addresses out of order");
assert(pointer_delta(high, low) <= BOTConstants::N_words, "offset too large");
assert(pointer_delta(high, low) <= BOTConstants::card_size_in_words(), "offset too large");
assert(_offset_array[index] == pointer_delta(high, low),
"Wrong offset");
}
@ -221,7 +234,7 @@ class BlockOffsetSharedArray: public CHeapObj<mtGC> {
// to be reserved.
size_t compute_size(size_t mem_region_words) {
size_t number_of_slots = (mem_region_words / BOTConstants::N_words) + 1;
size_t number_of_slots = (mem_region_words / BOTConstants::card_size_in_words()) + 1;
return ReservedSpace::allocation_align_size_up(number_of_slots);
}
@ -335,7 +348,7 @@ class BlockOffsetArray: public BlockOffsetTable {
assert(_array->is_card_boundary(new_end),
"new _end would not be a card boundary");
// set all the newly added cards
_array->set_offset_array(_end, new_end, BOTConstants::N_words);
_array->set_offset_array(_end, new_end, BOTConstants::card_size_in_words());
}
_end = new_end; // update _end
}

@ -50,14 +50,14 @@ inline size_t BlockOffsetSharedArray::index_for(const void* p) const {
pc < (char*)_reserved.end(),
"p not in range.");
size_t delta = pointer_delta(pc, _reserved.start(), sizeof(char));
size_t result = delta >> BOTConstants::LogN;
size_t result = delta >> BOTConstants::log_card_size();
assert(result < _vs.committed_size(), "bad index from address");
return result;
}
inline HeapWord* BlockOffsetSharedArray::address_for_index(size_t index) const {
assert(index < _vs.committed_size(), "bad index");
HeapWord* result = _reserved.start() + (index << BOTConstants::LogN_words);
HeapWord* result = _reserved.start() + (index << BOTConstants::log_card_size_in_words());
assert(result >= _reserved.start() && result < _reserved.end(),
"bad address from index");
return result;

@ -69,9 +69,9 @@ void CardTableBarrierSetC1::post_barrier(LIRAccess& access, LIR_Opr addr, LIR_Op
if (TwoOperandLIRForm) {
LIR_Opr addr_opr = LIR_OprFact::address(new LIR_Address(addr, addr->type()));
__ leal(addr_opr, tmp);
__ unsigned_shift_right(tmp, CardTable::card_shift, tmp);
__ unsigned_shift_right(tmp, CardTable::card_shift(), tmp);
} else {
__ unsigned_shift_right(addr, CardTable::card_shift, tmp);
__ unsigned_shift_right(addr, CardTable::card_shift(), tmp);
}
LIR_Address* card_addr;

@ -91,7 +91,7 @@ void CardTableBarrierSetC2::post_barrier(GraphKit* kit,
Node* cast = __ CastPX(__ ctrl(), adr);
// Divide by card size
Node* card_offset = __ URShiftX(cast, __ ConI(CardTable::card_shift));
Node* card_offset = __ URShiftX(cast, __ ConI(CardTable::card_shift()));
// Combine card table base and card offset
Node* card_adr = __ AddP(__ top(), byte_map_base_node(kit), card_offset);

@ -38,27 +38,27 @@
#include "gc/parallel/objectStartArray.hpp"
#endif
uint CardTable::card_shift = 0;
uint CardTable::card_size = 0;
uint CardTable::card_size_in_words = 0;
uint CardTable::_card_shift = 0;
uint CardTable::_card_size = 0;
uint CardTable::_card_size_in_words = 0;
void CardTable::initialize_card_size() {
assert(UseG1GC || UseParallelGC || UseSerialGC,
"Initialize card size should only be called by card based collectors.");
card_size = GCCardSizeInBytes;
card_shift = log2i_exact(card_size);
card_size_in_words = card_size / sizeof(HeapWord);
_card_size = GCCardSizeInBytes;
_card_shift = log2i_exact(_card_size);
_card_size_in_words = _card_size / sizeof(HeapWord);
// Set blockOffsetTable size based on card table entry size
BOTConstants::initialize_bot_size(card_shift);
BOTConstants::initialize_bot_size(_card_shift);
#if INCLUDE_PARALLELGC
// Set ObjectStartArray block size based on card table entry size
ObjectStartArray::initialize_block_size(card_shift);
ObjectStartArray::initialize_block_size(_card_shift);
#endif
log_info_p(gc, init)("CardTable entry size: " UINT32_FORMAT, card_size);
log_info_p(gc, init)("CardTable entry size: " UINT32_FORMAT, _card_size);
}
size_t CardTable::compute_byte_map_size() {
@ -82,8 +82,8 @@ CardTable::CardTable(MemRegion whole_heap) :
_committed(MemRegion::create_array(_max_covered_regions, mtGC)),
_guard_region()
{
assert((uintptr_t(_whole_heap.start()) & (card_size - 1)) == 0, "heap must start at card boundary");
assert((uintptr_t(_whole_heap.end()) & (card_size - 1)) == 0, "heap must end at card boundary");
assert((uintptr_t(_whole_heap.start()) & (_card_size - 1)) == 0, "heap must start at card boundary");
assert((uintptr_t(_whole_heap.end()) & (_card_size - 1)) == 0, "heap must end at card boundary");
}
CardTable::~CardTable() {
@ -120,7 +120,7 @@ void CardTable::initialize() {
//
// _byte_map = _byte_map_base + (uintptr_t(low_bound) >> card_shift)
_byte_map = (CardValue*) heap_rs.base();
_byte_map_base = _byte_map - (uintptr_t(low_bound) >> card_shift);
_byte_map_base = _byte_map - (uintptr_t(low_bound) >> _card_shift);
assert(byte_for(low_bound) == &_byte_map[0], "Checking start of map");
assert(byte_for(high_bound-1) <= &_byte_map[_last_valid_index], "Checking end of map");
@ -413,7 +413,7 @@ void CardTable::dirty_card_iterate(MemRegion mr, MemRegionClosure* cl) {
next_entry <= limit && *next_entry == dirty_card;
dirty_cards++, next_entry++);
MemRegion cur_cards(addr_for(cur_entry),
dirty_cards*card_size_in_words);
dirty_cards*_card_size_in_words);
cl->do_MemRegion(cur_cards);
}
}
@ -439,7 +439,7 @@ MemRegion CardTable::dirty_card_range_after_reset(MemRegion mr,
next_entry <= limit && *next_entry == dirty_card;
dirty_cards++, next_entry++);
MemRegion cur_cards(addr_for(cur_entry),
dirty_cards*card_size_in_words);
dirty_cards * _card_size_in_words);
if (reset) {
for (size_t i = 0; i < dirty_cards; i++) {
cur_entry[i] = reset_val;
@ -493,7 +493,7 @@ void CardTable::verify_region(MemRegion mr, CardValue val, bool val_equals) {
}
log_error(gc, verify)("== card " PTR_FORMAT " [" PTR_FORMAT "," PTR_FORMAT "], val: %d",
p2i(curr), p2i(addr_for(curr)),
p2i((HeapWord*) (((size_t) addr_for(curr)) + card_size)),
p2i((HeapWord*) (((size_t) addr_for(curr)) + _card_size)),
(int) curr_val);
}
}

@ -111,6 +111,11 @@ protected:
// a word's worth (row) of clean card values
static const intptr_t clean_card_row = (intptr_t)(-1);
// CardTable entry size
static uint _card_shift;
static uint _card_size;
static uint _card_size_in_words;
public:
CardTable(MemRegion whole_heap);
virtual ~CardTable();
@ -133,8 +138,8 @@ public:
// in, um, words.
inline size_t cards_required(size_t covered_words) {
// Add one for a guard card, used to detect errors.
const size_t words = align_up(covered_words, card_size_in_words);
return words / card_size_in_words + 1;
const size_t words = align_up(covered_words, _card_size_in_words);
return words / _card_size_in_words + 1;
}
// Dirty the bytes corresponding to "mr" (not all of which must be
@ -157,7 +162,7 @@ public:
"Attempt to access p = " PTR_FORMAT " out of bounds of "
" card marking array's _whole_heap = [" PTR_FORMAT "," PTR_FORMAT ")",
p2i(p), p2i(_whole_heap.start()), p2i(_whole_heap.end()));
CardValue* result = &_byte_map_base[uintptr_t(p) >> card_shift];
CardValue* result = &_byte_map_base[uintptr_t(p) >> _card_shift];
assert(result >= _byte_map && result < _byte_map + _byte_map_size,
"out of bounds accessor for card marking array");
return result;
@ -189,7 +194,7 @@ public:
" _byte_map: " PTR_FORMAT " _byte_map + _byte_map_size: " PTR_FORMAT,
p2i(p), p2i(_byte_map), p2i(_byte_map + _byte_map_size));
size_t delta = pointer_delta(p, _byte_map_base, sizeof(CardValue));
HeapWord* result = (HeapWord*) (delta << card_shift);
HeapWord* result = (HeapWord*) (delta << _card_shift);
assert(_whole_heap.contains(result),
"Returning result = " PTR_FORMAT " out of bounds of "
" card marking array's _whole_heap = [" PTR_FORMAT "," PTR_FORMAT ")",
@ -228,10 +233,17 @@ public:
MemRegion dirty_card_range_after_reset(MemRegion mr, bool reset,
int reset_val);
// CardTable entry size
static uint card_shift;
static uint card_size;
static uint card_size_in_words;
static uint card_shift() {
return _card_shift;
}
static uint card_size() {
return _card_size;
}
static uint card_size_in_words() {
return _card_size_in_words;
}
static constexpr CardValue clean_card_val() { return clean_card; }
static constexpr CardValue dirty_card_val() { return dirty_card; }

@ -261,10 +261,6 @@
declare_constant(BarrierSet::ModRef) \
declare_constant(BarrierSet::CardTableBarrierSet) \
\
declare_constant(BOTConstants::LogN) \
declare_constant(BOTConstants::LogN_words) \
declare_constant(BOTConstants::N_bytes) \
declare_constant(BOTConstants::N_words) \
declare_constant(BOTConstants::LogBase) \
declare_constant(BOTConstants::Base) \
declare_constant(BOTConstants::N_powers) \
@ -274,9 +270,6 @@
declare_constant(CardTable::dirty_card) \
declare_constant(CardTable::Precise) \
declare_constant(CardTable::ObjHeadPreciseArray) \
declare_constant(CardTable::card_shift) \
declare_constant(CardTable::card_size) \
declare_constant(CardTable::card_size_in_words) \
\
declare_constant(CollectedHeap::Serial) \
declare_constant(CollectedHeap::Parallel) \

@ -138,7 +138,7 @@ void CompilerToVM::Data::initialize(JVMCI_TRAPS) {
CardTable::CardValue* base = ci_card_table_address();
assert(base != NULL, "unexpected byte_map_base");
cardtable_start_address = base;
cardtable_shift = CardTable::card_shift;
cardtable_shift = CardTable::card_shift();
} else {
// No card mark barriers
cardtable_start_address = 0;

@ -57,7 +57,7 @@ TEST_VM(FreeRegionList, length) {
bot_rs.size(),
os::vm_page_size(),
HeapRegion::GrainBytes,
BOTConstants::N_bytes,
BOTConstants::card_size(),
mtGC);
G1BlockOffsetTable bot(heap, bot_storage);
bot_storage->commit_regions(0, num_regions_in_test);

@ -240,7 +240,7 @@ TEST_VM_F(G1CardSetContainersTest, basic_cardset_inptr_test) {
uint const max = (uint)log2i(HeapRegionBounds::max_size());
for (uint i = min; i <= max; i++) {
G1CardSetContainersTest::cardset_inlineptr_test(i - CardTable::card_shift);
G1CardSetContainersTest::cardset_inlineptr_test(i - CardTable::card_shift());
}
}