8283257: x86: Clean up invocation/branch counter updates code

Reviewed-by: redestad, kvn
This commit is contained in:
Aleksey Shipilev 2022-03-23 06:30:19 +00:00
parent b035fda459
commit 82e1a1cf8b
4 changed files with 16 additions and 22 deletions

@ -1972,19 +1972,18 @@ void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) {
#endif
}
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
int increment, Address mask,
Register scratch, bool preloaded,
Condition cond, Label* where) {
if (!preloaded) {
movl(scratch, counter_addr);
}
incrementl(scratch, increment);
// Jump if ((*counter_addr += increment) & mask) == 0
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr, Address mask,
Register scratch, Label* where) {
// This update is actually not atomic and can lose a number of updates
// under heavy contention, but the alternative of using the (contended)
// atomic update here penalizes profiling paths too much.
movl(scratch, counter_addr);
incrementl(scratch, InvocationCounter::count_increment);
movl(counter_addr, scratch);
andl(scratch, mask);
if (where != NULL) {
jcc(cond, *where);
jcc(Assembler::zero, *where);
}
}

@ -248,10 +248,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
bool decrement = false);
void increment_mdp_data_at(Register mdp_in, Register reg, int constant,
bool decrement = false);
void increment_mask_and_jump(Address counter_addr,
int increment, Address mask,
Register scratch, bool preloaded,
Condition cond, Label* where);
void increment_mask_and_jump(Address counter_addr, Address mask,
Register scratch, Label* where);
void set_mdp_flag_at(Register mdp_in, int flag_constant);
void test_mdp_data_at(Register mdp_in, int offset, Register value,
Register test_value_out,

@ -388,7 +388,6 @@ address TemplateInterpreterGenerator::generate_safept_entry_for(
void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
Label done;
// Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
int increment = InvocationCounter::count_increment;
Label no_mdo;
if (ProfileInterpreter) {
// Are we profiling?
@ -399,7 +398,7 @@ void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
const Address mdo_invocation_counter(rax, in_bytes(MethodData::invocation_counter_offset()) +
in_bytes(InvocationCounter::counter_offset()));
const Address mask(rax, in_bytes(MethodData::invoke_mask_offset()));
__ increment_mask_and_jump(mdo_invocation_counter, increment, mask, rcx, false, Assembler::zero, overflow);
__ increment_mask_and_jump(mdo_invocation_counter, mask, rcx, overflow);
__ jmp(done);
}
__ bind(no_mdo);
@ -409,8 +408,7 @@ void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
InvocationCounter::counter_offset());
__ get_method_counters(rbx, rax, done);
const Address mask(rax, in_bytes(MethodCounters::invoke_mask_offset()));
__ increment_mask_and_jump(invocation_counter, increment, mask, rcx,
false, Assembler::zero, overflow);
__ increment_mask_and_jump(invocation_counter, mask, rcx, overflow);
__ bind(done);
}

@ -2197,7 +2197,6 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
__ bind(has_counters);
Label no_mdo;
int increment = InvocationCounter::count_increment;
if (ProfileInterpreter) {
// Are we profiling?
__ movptr(rbx, Address(rcx, in_bytes(Method::method_data_offset())));
@ -2207,7 +2206,7 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
const Address mdo_backedge_counter(rbx, in_bytes(MethodData::backedge_counter_offset()) +
in_bytes(InvocationCounter::counter_offset()));
const Address mask(rbx, in_bytes(MethodData::backedge_mask_offset()));
__ increment_mask_and_jump(mdo_backedge_counter, increment, mask, rax, false, Assembler::zero,
__ increment_mask_and_jump(mdo_backedge_counter, mask, rax,
UseOnStackReplacement ? &backedge_counter_overflow : NULL);
__ jmp(dispatch);
}
@ -2215,8 +2214,8 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
// Increment backedge counter in MethodCounters*
__ movptr(rcx, Address(rcx, Method::method_counters_offset()));
const Address mask(rcx, in_bytes(MethodCounters::backedge_mask_offset()));
__ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
rax, false, Assembler::zero, UseOnStackReplacement ? &backedge_counter_overflow : NULL);
__ increment_mask_and_jump(Address(rcx, be_offset), mask, rax,
UseOnStackReplacement ? &backedge_counter_overflow : NULL);
__ bind(dispatch);
}