diff --git a/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp b/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp index da38f1d12e7..5fea0b8d925 100644 --- a/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp @@ -1778,6 +1778,8 @@ void InterpreterMacroAssembler::load_field_entry(Register cache, Register index, ldr(cache, Address(rcpool, ConstantPoolCache::field_entries_offset())); add(cache, cache, Array::base_offset_in_bytes()); lea(cache, Address(cache, index)); + // Prevents stale data from being read after the bytecode is patched to the fast bytecode + membar(MacroAssembler::LoadLoad); } void InterpreterMacroAssembler::load_method_entry(Register cache, Register index, int bcp_offset) { diff --git a/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp b/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp index eaaefb55f36..f32a4ee7372 100644 --- a/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp @@ -2355,7 +2355,9 @@ void TemplateTable::load_resolved_field_entry(Register obj, __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset()))); // TOS state - __ load_unsigned_byte(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset()))); + if (tos_state != noreg) { + __ load_unsigned_byte(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset()))); + } // Klass overwrite register if (is_static) { @@ -3069,13 +3071,9 @@ void TemplateTable::fast_storefield(TosState state) // access constant pool cache __ load_field_entry(r2, r1); - __ push(r0); - // R1: field offset, R2: TOS, R3: flags - load_resolved_field_entry(r2, r2, r0, r1, r3); - __ pop(r0); - // Must prevent reordering of the following cp cache loads with bytecode load - __ membar(MacroAssembler::LoadLoad); + // R1: field offset, R2: field holder, R3: flags + load_resolved_field_entry(r2, r2, noreg, r1, r3); { Label notVolatile; @@ -3163,9 +3161,6 @@ void TemplateTable::fast_accessfield(TosState state) // access constant pool cache __ load_field_entry(r2, r1); - // Must prevent reordering of the following cp cache loads with bytecode load - __ membar(MacroAssembler::LoadLoad); - __ load_sized_value(r1, Address(r2, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/); __ load_unsigned_byte(r3, Address(r2, in_bytes(ResolvedFieldEntry::flags_offset()))); diff --git a/src/hotspot/cpu/riscv/interp_masm_riscv.cpp b/src/hotspot/cpu/riscv/interp_masm_riscv.cpp index a770482b121..c5f97b60c42 100644 --- a/src/hotspot/cpu/riscv/interp_masm_riscv.cpp +++ b/src/hotspot/cpu/riscv/interp_masm_riscv.cpp @@ -1881,6 +1881,8 @@ void InterpreterMacroAssembler::load_field_entry(Register cache, Register index, ld(cache, Address(xcpool, ConstantPoolCache::field_entries_offset())); add(cache, cache, Array::base_offset_in_bytes()); add(cache, cache, index); + // Prevents stale data from being read after the bytecode is patched to the fast bytecode + membar(MacroAssembler::LoadLoad); } void InterpreterMacroAssembler::get_method_counters(Register method, diff --git a/src/hotspot/cpu/riscv/templateTable_riscv.cpp b/src/hotspot/cpu/riscv/templateTable_riscv.cpp index 6240e4b4b50..5c72ddc9f81 100644 --- a/src/hotspot/cpu/riscv/templateTable_riscv.cpp +++ b/src/hotspot/cpu/riscv/templateTable_riscv.cpp @@ -2272,7 +2272,9 @@ void TemplateTable::load_resolved_field_entry(Register obj, __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset()))); // TOS state - __ load_unsigned_byte(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset()))); + if (tos_state != noreg) { + __ load_unsigned_byte(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset()))); + } // Klass overwrite register if (is_static) { @@ -3036,13 +3038,9 @@ void TemplateTable::fast_storefield(TosState state) { // access constant pool cache __ load_field_entry(x12, x11); - __ push_reg(x10); - // X11: field offset, X12: TOS, X13: flags - load_resolved_field_entry(x12, x12, x10, x11, x13); - __ pop_reg(x10); - // Must prevent reordering of the following cp cache loads with bytecode load - __ membar(MacroAssembler::LoadLoad); + // X11: field offset, X12: field holder, X13: flags + load_resolved_field_entry(x12, x12, noreg, x11, x13); { Label notVolatile; @@ -3133,9 +3131,6 @@ void TemplateTable::fast_accessfield(TosState state) { // access constant pool cache __ load_field_entry(x12, x11); - // Must prevent reordering of the following cp cache loads with bytecode load - __ membar(MacroAssembler::LoadLoad); - __ load_sized_value(x11, Address(x12, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/); __ load_unsigned_byte(x13, Address(x12, in_bytes(ResolvedFieldEntry::flags_offset()))); diff --git a/src/hotspot/cpu/x86/templateTable_x86.cpp b/src/hotspot/cpu/x86/templateTable_x86.cpp index 664cd86d486..248055337d8 100644 --- a/src/hotspot/cpu/x86/templateTable_x86.cpp +++ b/src/hotspot/cpu/x86/templateTable_x86.cpp @@ -3495,7 +3495,7 @@ void TemplateTable::fast_storefield(TosState state) { __ push(rax); __ load_field_entry(rcx, rax); load_resolved_field_entry(noreg, cache, rax, rbx, rdx); - // RBX: field offset, RCX: RAX: TOS, RDX: flags + // RBX: field offset, RAX: TOS, RDX: flags __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift)); __ pop(rax);