8315940: ARM32: Move field resolution information out of the cpCache

Reviewed-by: dsamersoff
This commit is contained in:
Aleksei Voitylov 2023-10-13 11:33:34 +00:00 committed by Dmitry Samersoff
parent e490cf9c65
commit 266636deab
3 changed files with 209 additions and 163 deletions

@ -37,6 +37,7 @@
#include "oops/markWord.hpp"
#include "oops/method.hpp"
#include "oops/methodData.hpp"
#include "oops/resolvedFieldEntry.hpp"
#include "oops/resolvedIndyEntry.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/jvmtiThreadState.hpp"
@ -312,6 +313,36 @@ void InterpreterMacroAssembler::load_resolved_indy_entry(Register cache, Registe
add(cache, cache, index);
}
void InterpreterMacroAssembler::load_field_entry(Register cache, Register index, int bcp_offset) {
// Get index out of bytecode pointer
assert_different_registers(cache, index);
get_index_at_bcp(index, bcp_offset, cache /*as tmp*/, sizeof(u2));
// Scale the index to be the entry index * sizeof(ResolvedFieldEntry)
// sizeof(ResolvedFieldEntry) is 16 on Arm, so using shift
if (is_power_of_2(sizeof(ResolvedFieldEntry))) {
// load constant pool cache pointer
ldr(cache, Address(FP, frame::interpreter_frame_cache_offset * wordSize));
// Get address of field entries array
ldr(cache, Address(cache, in_bytes(ConstantPoolCache::field_entries_offset())));
add(cache, cache, Array<ResolvedFieldEntry>::base_offset_in_bytes());
add(cache, cache, AsmOperand(index, lsl, log2i_exact(sizeof(ResolvedFieldEntry))));
}
else {
mov(cache, sizeof(ResolvedFieldEntry));
mul(index, index, cache);
// load constant pool cache pointer
ldr(cache, Address(FP, frame::interpreter_frame_cache_offset * wordSize));
// Get address of field entries array
ldr(cache, Address(cache, in_bytes(ConstantPoolCache::field_entries_offset())));
add(cache, cache, Array<ResolvedFieldEntry>::base_offset_in_bytes());
add(cache, cache, index);
}
}
// Generate a subtype check: branch to not_subtype if sub_klass is
// not a subtype of super_klass.
// Profiling code for the subtype check failure (profile_typecheck_failed)

@ -102,6 +102,7 @@ class InterpreterMacroAssembler: public MacroAssembler {
void load_resolved_klass_at_offset(Register Rcpool, Register Rindex, Register Rklass);
void load_resolved_indy_entry(Register cache, Register index);
void load_field_entry(Register cache, Register index, int bcp_offset = 1);
void pop_ptr(Register r);
void pop_i(Register r = R0_tos);

@ -39,6 +39,7 @@
#include "oops/objArrayKlass.hpp"
#include "oops/oop.inline.hpp"
#include "oops/resolvedIndyEntry.hpp"
#include "oops/resolvedFieldEntry.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/methodHandles.hpp"
#include "runtime/frame.inline.hpp"
@ -227,7 +228,16 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
// additional, required work.
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
assert(load_bc_into_bc_reg, "we use bc_reg as temp");
__ get_cache_and_index_and_bytecode_at_bcp(bc_reg, temp_reg, temp_reg, byte_no, 1, sizeof(u2));
__ load_field_entry(temp_reg, bc_reg);
if (byte_no == f1_byte) {
__ add(temp_reg, temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset()));
} else {
__ add(temp_reg, temp_reg, in_bytes(ResolvedFieldEntry::put_code_offset()));
}
// Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
__ ldrb(temp_reg, temp_reg);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
__ mov(bc_reg, bc);
__ cbz(temp_reg, L_patch_done); // test if bytecode is zero
}
@ -2567,12 +2577,6 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
Label resolved;
Bytecodes::Code code = bytecode();
switch (code) {
case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
default: break;
}
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
__ get_cache_and_index_and_bytecode_at_bcp(Rcache, Rindex, Rtemp, byte_no, 1, index_size);
__ cmp(Rtemp, code); // have we resolved this bytecode?
@ -2587,6 +2591,69 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
__ bind(resolved);
}
void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
Register Rcache,
Register Rindex) {
assert_different_registers(Rcache, Rindex, Rtemp);
Label resolved;
Bytecodes::Code code = bytecode();
switch (code) {
case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
default: break;
}
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
__ load_field_entry(Rcache, Rindex);
if (byte_no == f1_byte) {
__ add(Rtemp, Rcache, in_bytes(ResolvedFieldEntry::get_code_offset()));
} else {
__ add(Rtemp, Rcache, in_bytes(ResolvedFieldEntry::put_code_offset()));
}
// Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
__ ldrb(Rtemp, Rtemp);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
__ cmp(Rtemp, code); // have we resolved this bytecode?
__ b(resolved, eq);
// resolve first time through
address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
__ mov(R1, code);
__ call_VM(noreg, entry, R1);
// Update registers with resolved info
__ load_field_entry(Rcache, Rindex);
__ bind(resolved);
}
void TemplateTable::load_resolved_field_entry(Register obj,
Register cache,
Register tos_state,
Register offset,
Register flags,
bool is_static = false) {
assert_different_registers(cache, tos_state, flags, offset);
// Field offset
__ ldr(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())));
// Flags
__ ldrb(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
// TOS state
__ ldrb(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset())));
// Klass overwrite register
if (is_static) {
__ ldr(obj, Address(cache, ResolvedFieldEntry::field_holder_offset()));
const int mirror_offset = in_bytes(Klass::java_mirror_offset());
__ ldr(obj, Address(obj, mirror_offset));
__ resolve_oop_handle(obj);
}
}
// The Rcache and Rindex registers must be set before call
void TemplateTable::load_field_cp_cache_entry(Register Rcache,
@ -2742,8 +2809,8 @@ void TemplateTable::jvmti_post_field_access(Register Rcache,
__ cbz(Rtemp, Lcontinue);
// cache entry pointer
__ add(R2, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ add(R2, R2, in_bytes(ConstantPoolCache::base_offset()));
__ load_field_entry(R2, Rindex);
if (is_static) {
__ mov(R1, 0); // null object reference
} else {
@ -2756,7 +2823,7 @@ void TemplateTable::jvmti_post_field_access(Register Rcache,
// R2: cache entry pointer
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
R1, R2);
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
__ load_field_entry(Rcache, Rindex);
__ bind(Lcontinue);
}
@ -2773,29 +2840,25 @@ void TemplateTable::pop_and_check_object(Register r) {
void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
transition(vtos, vtos);
const Register Roffset = R2_tmp;
const Register Robj = R3_tmp;
const Register Rcache = R4_tmp;
const Register Rflagsav = Rtmp_save0; // R4/R19
const Register Rindex = R5_tmp;
const Register Rflags = R5_tmp;
const Register Rcache = R4_tmp;
const Register Rindex = R3_tmp;
resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
const Register Roffset = R2_tmp;
const Register Rtos_state = R3_tmp;
const Register Robj = R4_tmp; // Rcache is free at the time of loading Robj
const Register Rflags = R5_tmp;
resolve_cache_and_index_for_field(byte_no, Rcache, Rindex);
jvmti_post_field_access(Rcache, Rindex, is_static, false);
load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags, is_static);
__ mov(Rflagsav, Rflags);
if (!is_static) pop_and_check_object(Robj);
if (!is_static) {
pop_and_check_object(Robj);
}
Label Done, Lint, Ltable, shouldNotReachHere;
Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
// compute type
__ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
// Make sure we don't need to mask flags after the above shift
ConstantPoolCacheEntry::verify_tos_state_shift();
// There are actually two versions of implementation of getfield/getstatic:
//
// 1) Table switch using add(PC,...) instruction (fast_version)
@ -2818,16 +2881,16 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
assert(number_of_states == 10, "number of tos states should be equal to 9");
__ cmp(Rflags, itos);
__ cmp(Rtos_state, itos);
if(atos_merged_with_itos) {
__ cmp(Rflags, atos, ne);
__ cmp(Rtos_state, atos, ne);
}
// table switch by type
if(fast_version) {
__ add(PC, PC, AsmOperand(Rflags, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
__ add(PC, PC, AsmOperand(Rtos_state, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
} else {
__ ldr(PC, Address(PC, Rflags, lsl, LogBytesPerWord), ne);
__ ldr(PC, Address(PC, Rtos_state, lsl, LogBytesPerWord), ne);
}
// jump to itos/atos case
@ -2868,7 +2931,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
// ztos (same as btos for getfield)
{
assert(ztos == seq++, "btos has unexpected value");
assert(ztos == seq++, "ztos has unexpected value");
FixedSizeCodeBlock ztos_block(_masm, max_block_size, fast_version);
__ bind(Lztos);
__ access_load_at(T_BOOLEAN, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
@ -2992,13 +3055,13 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
__ bind(Done);
// Check for volatile field
Label notVolatile;
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
{
// Check for volatile field
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
}
void TemplateTable::getfield(int byte_no) {
@ -3029,6 +3092,8 @@ void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool
__ ldr_global_s32(Rtemp, (address)JvmtiExport::get_field_modification_count_addr());
__ cbz(Rtemp, Lcontinue);
__ mov(R2, Rcache);
if (is_static) {
// Life is simple. Null out the object pointer.
__ mov(R1, 0);
@ -3037,16 +3102,10 @@ void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool
// We don't know the size of the value, though; it could be one or two words
// depending on its type. As a result, we must find the type to determine where
// the object is.
__ ldrb(R3, Address(Rcache, in_bytes(ResolvedFieldEntry::type_offset())));
__ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ ldr_u32(Rtemp, Address(Rtemp, cp_base_offset + ConstantPoolCacheEntry::flags_offset()));
__ logical_shift_right(Rtemp, Rtemp, ConstantPoolCacheEntry::tos_state_shift);
// Make sure we don't need to mask Rtemp after the above shift
ConstantPoolCacheEntry::verify_tos_state_shift();
__ cmp(Rtemp, ltos);
__ cond_cmp(Rtemp, dtos, ne);
__ cmp(R3, ltos);
__ cond_cmp(R3, dtos, ne);
// two word value (ltos/dtos)
__ ldr(R1, Address(SP, Interpreter::expr_offset_in_bytes(2)), eq);
@ -3054,10 +3113,6 @@ void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool
__ ldr(R1, Address(SP, Interpreter::expr_offset_in_bytes(1)), ne);
}
// cache entry pointer
__ add(R2, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ add(R2, R2, in_bytes(cp_base_offset));
// object (tos)
__ mov(R3, Rstack_top);
@ -3066,7 +3121,7 @@ void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool
// R3: value object on the stack
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
R1, R2, R3);
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
__ load_field_entry(Rcache, Rindex);
__ bind(Lcontinue);
}
@ -3076,34 +3131,29 @@ void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool
void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
transition(vtos, vtos);
const Register Roffset = R2_tmp;
const Register Robj = R3_tmp;
const Register Rcache = R4_tmp;
const Register Rflagsav = Rtmp_save0; // R4/R19
const Register Rindex = R5_tmp;
const Register Rflags = R5_tmp;
const Register Rcache = R4_tmp;
const Register Rindex = R3_tmp;
resolve_cache_and_index(byte_no, Rcache, Rindex, sizeof(u2));
const Register Roffset = R2_tmp;
const Register Rtos_state = R3_tmp;
const Register Robj = R4_tmp; // Rcache is free at the time of loading Robj
const Register Rflags = R5_tmp;
resolve_cache_and_index_for_field(byte_no, Rcache, Rindex);
jvmti_post_field_mod(Rcache, Rindex, is_static);
load_field_cp_cache_entry(Rcache, Rindex, Roffset, Rflags, Robj, is_static);
load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags, is_static);
// Check for volatile field
Label notVolatile;
__ mov(Rflagsav, Rflags);
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
{
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
Label Done, Lint, shouldNotReachHere;
Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
// compute type
__ logical_shift_right(Rflags, Rflags, ConstantPoolCacheEntry::tos_state_shift);
// Make sure we don't need to mask flags after the above shift
ConstantPoolCacheEntry::verify_tos_state_shift();
// There are actually two versions of implementation of putfield/putstatic:
//
// 32-bit ARM:
@ -3124,13 +3174,13 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
assert(number_of_states == 10, "number of tos states should be equal to 9");
// itos case is frequent and is moved outside table switch
__ cmp(Rflags, itos);
__ cmp(Rtos_state, itos);
// table switch by type
if (fast_version) {
__ add(PC, PC, AsmOperand(Rflags, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
__ add(PC, PC, AsmOperand(Rtos_state, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
} else {
__ ldr(PC, Address(PC, Rflags, lsl, LogBytesPerWord), ne);
__ ldr(PC, Address(PC, Rtos_state, lsl, LogBytesPerWord), ne);
}
// jump to itos case
@ -3267,7 +3317,7 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
// atos
{
assert(atos == seq++, "dtos has unexpected value");
assert(atos == seq++, "atos has unexpected value");
__ bind(Latos);
__ pop(atos);
if (!is_static) pop_and_check_object(Robj);
@ -3293,32 +3343,11 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
__ bind(Done);
Label notVolatile2;
if (is_static) {
// Just check for volatile. Memory barrier for static final field
// is handled by class initialization.
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
__ bind(notVolatile2);
} else {
// Check for volatile field and final field
Label skipMembar;
__ tst(Rflagsav, 1 << ConstantPoolCacheEntry::is_volatile_shift |
1 << ConstantPoolCacheEntry::is_final_shift);
__ b(skipMembar, eq);
__ tbz(Rflagsav, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
// StoreLoad barrier after volatile field write
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
__ b(skipMembar);
// StoreStore barrier after final field write
__ bind(notVolatile2);
volatile_barrier(MacroAssembler::StoreStore, Rtemp);
__ bind(skipMembar);
{
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
}
@ -3358,7 +3387,7 @@ void TemplateTable::jvmti_post_fast_field_mod(TosState state) {
__ push(state); // save value on the stack
// access constant pool cache entry
__ get_cache_entry_pointer_at_bcp(R2, R1, 1);
__ load_field_entry(R2, R1);
__ mov(R1, R3);
assert(Interpreter::expr_offset_in_bytes(0) == 0, "adjust this code");
@ -3383,30 +3412,26 @@ void TemplateTable::fast_storefield(TosState state) {
jvmti_post_fast_field_mod(state);
const Register Rcache = R2_tmp;
const Register Rindex = R3_tmp;
const Register Roffset = R3_tmp;
const Register Rflags = Rtmp_save0; // R4/R19
const Register Robj = R5_tmp;
const Register Rcache = R4_tmp;
const Register Rindex = R3_tmp;
const Register Roffset = R2_tmp;
const Register Rtos_state = R3_tmp;
const Register Robj = R4_tmp; // Rcache is free at the time of loading Robj
const Register Rflags = R5_tmp;
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
__ add(Rcache, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ load_field_entry(Rcache, Rindex);
load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags);
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rcache, base + ConstantPoolCacheEntry::flags_offset()));
// replace index with field offset from cache entry
__ ldr(Roffset, Address(Rcache, base + ConstantPoolCacheEntry::f2_offset()));
// Check for volatile store
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
{
// Check for volatile store
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
// Get object from stack
pop_and_check_object(Robj);
@ -3446,23 +3471,14 @@ void TemplateTable::fast_storefield(TosState state) {
ShouldNotReachHere();
}
Label notVolatile2;
Label skipMembar;
__ tst(Rflags, 1 << ConstantPoolCacheEntry::is_volatile_shift |
1 << ConstantPoolCacheEntry::is_final_shift);
__ b(skipMembar, eq);
{
// Check for volatile store
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile2);
// StoreLoad barrier after volatile field write
volatile_barrier(MacroAssembler::StoreLoad, Rtemp);
__ b(skipMembar);
// StoreStore barrier after final field write
__ bind(notVolatile2);
volatile_barrier(MacroAssembler::StoreStore, Rtemp);
__ bind(skipMembar);
}
void TemplateTable::fast_accessfield(TosState state) {
@ -3476,7 +3492,7 @@ void TemplateTable::fast_accessfield(TosState state) {
__ ldr_global_s32(R2, (address) JvmtiExport::get_field_access_count_addr());
__ cbz(R2, done);
// access constant pool cache entry
__ get_cache_entry_pointer_at_bcp(R2, R1, 1);
__ load_field_entry(R2, R1);
__ push_ptr(R0_tos); // save object pointer before call_VM() clobbers it
__ verify_oop(R0_tos);
__ mov(R1, R0_tos);
@ -3495,16 +3511,15 @@ void TemplateTable::fast_accessfield(TosState state) {
const Register Roffset = R3_tmp;
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 1);
__ load_field_entry(Rcache, Rindex);
// replace index with field offset from cache entry
__ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
__ ldr(Roffset, Address(Rcache, ResolvedFieldEntry::field_offset_offset()));
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
__ ldrb(Rflags, Address(Rcache, ResolvedFieldEntry::flags_offset()));
__ verify_oop(Robj);
__ null_check(Robj, Rtemp);
__ null_check(Robj);
Address addr = Address(Robj, Roffset);
// access field
@ -3538,13 +3553,13 @@ void TemplateTable::fast_accessfield(TosState state) {
ShouldNotReachHere();
}
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
{
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
}
@ -3562,12 +3577,11 @@ void TemplateTable::fast_xaccess(TosState state) {
__ ldr(Robj, aaddress(0));
// access constant pool cache
__ get_cache_and_index_at_bcp(Rcache, Rindex, 2);
__ add(Rtemp, Rcache, AsmOperand(Rindex, lsl, LogBytesPerWord));
__ ldr(Roffset, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset()));
__ load_field_entry(Rcache, Rindex, 2);
__ ldr(Roffset, Address(Rcache, ResolvedFieldEntry::field_offset_offset()));
// load flags to test volatile
__ ldr_u32(Rflags, Address(Rtemp, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
__ ldrb(Rflags, Address(Rcache, ResolvedFieldEntry::flags_offset()));
// make sure exception is reported in correct bcp range (getfield is next instruction)
__ add(Rbcp, Rbcp, 1);
@ -3590,13 +3604,13 @@ void TemplateTable::fast_xaccess(TosState state) {
ShouldNotReachHere();
}
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
{
// Check for volatile load
Label notVolatile;
__ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
__ bind(notVolatile);
}
__ bind(done);
}