8338379: Accesses to class init state should be properly synchronized
Reviewed-by: mdoerr, dholmes, coleenp, fyang, amitkumar
This commit is contained in:
parent
20f36c666c
commit
6600161ad4
src/hotspot
cpu
aarch64
arm
ppc
riscv
s390
x86
share
@ -1168,8 +1168,8 @@ void LIR_Assembler::emit_opConvert(LIR_OpConvert* op) {
|
||||
|
||||
void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
if (op->init_check()) {
|
||||
__ ldrb(rscratch1, Address(op->klass()->as_register(),
|
||||
InstanceKlass::init_state_offset()));
|
||||
__ lea(rscratch1, Address(op->klass()->as_register(), InstanceKlass::init_state_offset()));
|
||||
__ ldarb(rscratch1, rscratch1);
|
||||
__ cmpw(rscratch1, InstanceKlass::fully_initialized);
|
||||
add_debug_info_for_null_check_here(op->stub()->info());
|
||||
__ br(Assembler::NE, *op->stub()->entry());
|
||||
|
@ -1838,7 +1838,8 @@ void MacroAssembler::clinit_barrier(Register klass, Register scratch, Label* L_f
|
||||
L_slow_path = &L_fallthrough;
|
||||
}
|
||||
// Fast path check: class is fully initialized
|
||||
ldrb(scratch, Address(klass, InstanceKlass::init_state_offset()));
|
||||
lea(scratch, Address(klass, InstanceKlass::init_state_offset()));
|
||||
ldarb(scratch, scratch);
|
||||
subs(zr, scratch, InstanceKlass::fully_initialized);
|
||||
br(Assembler::EQ, *L_fast_path);
|
||||
|
||||
|
@ -948,6 +948,7 @@ void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
if (op->init_check()) {
|
||||
Register tmp = op->tmp1()->as_register();
|
||||
__ ldrb(tmp, Address(op->klass()->as_register(), InstanceKlass::init_state_offset()));
|
||||
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
|
||||
add_debug_info_for_null_check_here(op->stub()->info());
|
||||
__ cmp(tmp, InstanceKlass::fully_initialized);
|
||||
__ b(*op->stub()->entry(), ne);
|
||||
|
@ -3974,6 +3974,7 @@ void TemplateTable::_new() {
|
||||
// make sure klass is initialized
|
||||
// make sure klass is fully initialized
|
||||
__ ldrb(Rtemp, Address(Rklass, InstanceKlass::init_state_offset()));
|
||||
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
|
||||
__ cmp(Rtemp, InstanceKlass::fully_initialized);
|
||||
__ b(slow_case, ne);
|
||||
|
||||
|
@ -2274,6 +2274,7 @@ void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
}
|
||||
__ lbz(op->tmp1()->as_register(),
|
||||
in_bytes(InstanceKlass::init_state_offset()), op->klass()->as_register());
|
||||
// acquire barrier included in membar_storestore() which follows the allocation immediately.
|
||||
__ cmpwi(CCR0, op->tmp1()->as_register(), InstanceKlass::fully_initialized);
|
||||
__ bc_far_optimized(Assembler::bcondCRbiIs0, __ bi0(CCR0, Assembler::equal), *op->stub()->entry());
|
||||
}
|
||||
|
@ -2410,7 +2410,7 @@ void MacroAssembler::verify_secondary_supers_table(Register r_sub_klass,
|
||||
void MacroAssembler::clinit_barrier(Register klass, Register thread, Label* L_fast_path, Label* L_slow_path) {
|
||||
assert(L_fast_path != nullptr || L_slow_path != nullptr, "at least one is required");
|
||||
|
||||
Label L_fallthrough;
|
||||
Label L_check_thread, L_fallthrough;
|
||||
if (L_fast_path == nullptr) {
|
||||
L_fast_path = &L_fallthrough;
|
||||
} else if (L_slow_path == nullptr) {
|
||||
@ -2419,10 +2419,14 @@ void MacroAssembler::clinit_barrier(Register klass, Register thread, Label* L_fa
|
||||
|
||||
// Fast path check: class is fully initialized
|
||||
lbz(R0, in_bytes(InstanceKlass::init_state_offset()), klass);
|
||||
// acquire by cmp-branch-isync if fully_initialized
|
||||
cmpwi(CCR0, R0, InstanceKlass::fully_initialized);
|
||||
beq(CCR0, *L_fast_path);
|
||||
bne(CCR0, L_check_thread);
|
||||
isync();
|
||||
b(*L_fast_path);
|
||||
|
||||
// Fast path check: current thread is initializer thread
|
||||
bind(L_check_thread);
|
||||
ld(R0, in_bytes(InstanceKlass::init_thread_offset()), klass);
|
||||
cmpd(CCR0, thread, R0);
|
||||
if (L_slow_path == &L_fallthrough) {
|
||||
|
@ -980,6 +980,7 @@ void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
if (op->init_check()) {
|
||||
__ lbu(t0, Address(op->klass()->as_register(),
|
||||
InstanceKlass::init_state_offset()));
|
||||
__ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
|
||||
__ mv(t1, (u1)InstanceKlass::fully_initialized);
|
||||
add_debug_info_for_null_check_here(op->stub()->info());
|
||||
__ bne(t0, t1, *op->stub()->entry(), /* is_far */ true);
|
||||
|
@ -493,6 +493,7 @@ void MacroAssembler::clinit_barrier(Register klass, Register tmp, Label* L_fast_
|
||||
|
||||
// Fast path check: class is fully initialized
|
||||
lbu(tmp, Address(klass, InstanceKlass::init_state_offset()));
|
||||
membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
|
||||
sub(tmp, tmp, InstanceKlass::fully_initialized);
|
||||
beqz(tmp, *L_fast_path);
|
||||
|
||||
|
@ -2350,6 +2350,7 @@ void LIR_Assembler::shift_op(LIR_Code code, LIR_Opr left, jint count, LIR_Opr de
|
||||
void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
if (op->init_check()) {
|
||||
// Make sure klass is initialized & doesn't have finalizer.
|
||||
// init_state needs acquire, but S390 is TSO, and so we are already good.
|
||||
const int state_offset = in_bytes(InstanceKlass::init_state_offset());
|
||||
Register iklass = op->klass()->as_register();
|
||||
add_debug_info_for_null_check_here(op->stub()->info());
|
||||
|
@ -3459,7 +3459,8 @@ void MacroAssembler::clinit_barrier(Register klass, Register thread, Label* L_fa
|
||||
L_slow_path = &L_fallthrough;
|
||||
}
|
||||
|
||||
// Fast path check: class is fully initialized
|
||||
// Fast path check: class is fully initialized.
|
||||
// init_state needs acquire, but S390 is TSO, and so we are already good.
|
||||
z_cli(Address(klass, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
|
||||
z_bre(*L_fast_path);
|
||||
|
||||
|
@ -1578,6 +1578,7 @@ void LIR_Assembler::emit_opConvert(LIR_OpConvert* op) {
|
||||
void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||
if (op->init_check()) {
|
||||
add_debug_info_for_null_check_here(op->stub()->info());
|
||||
// init_state needs acquire, but x86 is TSO, and so we are already good.
|
||||
__ cmpb(Address(op->klass()->as_register(),
|
||||
InstanceKlass::init_state_offset()),
|
||||
InstanceKlass::fully_initialized);
|
||||
|
@ -5084,7 +5084,8 @@ void MacroAssembler::clinit_barrier(Register klass, Register thread, Label* L_fa
|
||||
L_slow_path = &L_fallthrough;
|
||||
}
|
||||
|
||||
// Fast path check: class is fully initialized
|
||||
// Fast path check: class is fully initialized.
|
||||
// init_state needs acquire, but x86 is TSO, and so we are already good.
|
||||
cmpb(Address(klass, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
|
||||
jcc(Assembler::equal, *L_fast_path);
|
||||
|
||||
|
@ -4048,6 +4048,7 @@ void TemplateTable::_new() {
|
||||
__ push(rcx); // save the contexts of klass for initializing the header
|
||||
|
||||
// make sure klass is initialized
|
||||
// init_state needs acquire, but x86 is TSO, and so we are already good.
|
||||
#ifdef _LP64
|
||||
assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
|
||||
__ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
|
||||
|
@ -4103,7 +4103,7 @@ void InstanceKlass::set_init_state(ClassState state) {
|
||||
assert(good_state || state == allocated, "illegal state transition");
|
||||
#endif
|
||||
assert(_init_thread == nullptr, "should be cleared before state change");
|
||||
_init_state = state;
|
||||
Atomic::release_store(&_init_state, state);
|
||||
}
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
|
@ -507,14 +507,14 @@ public:
|
||||
|
||||
public:
|
||||
// initialization state
|
||||
bool is_loaded() const { return _init_state >= loaded; }
|
||||
bool is_linked() const { return _init_state >= linked; }
|
||||
bool is_initialized() const { return _init_state == fully_initialized; }
|
||||
bool is_not_initialized() const { return _init_state < being_initialized; }
|
||||
bool is_being_initialized() const { return _init_state == being_initialized; }
|
||||
bool is_in_error_state() const { return _init_state == initialization_error; }
|
||||
bool is_loaded() const { return init_state() >= loaded; }
|
||||
bool is_linked() const { return init_state() >= linked; }
|
||||
bool is_initialized() const { return init_state() == fully_initialized; }
|
||||
bool is_not_initialized() const { return init_state() < being_initialized; }
|
||||
bool is_being_initialized() const { return init_state() == being_initialized; }
|
||||
bool is_in_error_state() const { return init_state() == initialization_error; }
|
||||
bool is_reentrant_initialization(Thread *thread) { return thread == _init_thread; }
|
||||
ClassState init_state() const { return _init_state; }
|
||||
ClassState init_state() const { return Atomic::load_acquire(&_init_state); }
|
||||
const char* init_state_name() const;
|
||||
bool is_rewritten() const { return _misc_flags.rewritten(); }
|
||||
|
||||
|
@ -3008,7 +3008,7 @@ void GraphKit::guard_klass_being_initialized(Node* klass) {
|
||||
Node* adr = basic_plus_adr(top(), klass, init_state_off);
|
||||
Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
|
||||
adr->bottom_type()->is_ptr(), TypeInt::BYTE,
|
||||
T_BYTE, MemNode::unordered);
|
||||
T_BYTE, MemNode::acquire);
|
||||
init_state = _gvn.transform(init_state);
|
||||
|
||||
Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
|
||||
|
@ -2904,7 +2904,7 @@ bool LibraryCallKit::inline_unsafe_allocate() {
|
||||
Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
|
||||
// Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
|
||||
// can generate code to load it as unsigned byte.
|
||||
Node* inst = make_load(nullptr, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);
|
||||
Node* inst = make_load(nullptr, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::acquire);
|
||||
Node* bits = intcon(InstanceKlass::fully_initialized);
|
||||
test = _gvn.transform(new SubINode(inst, bits));
|
||||
// The 'test' is non-zero if we need to take a slow path.
|
||||
|
Loading…
x
Reference in New Issue
Block a user