8345047: RISC-V: Remove explicit use of AvoidUnalignedAccesses in interpreter

Reviewed-by: mli, fjiang
This commit is contained in:
Fei Yang 2024-11-27 10:26:09 +00:00
parent b3986bdbdb
commit 82137db24d
2 changed files with 13 additions and 36 deletions

View File

@ -179,15 +179,10 @@ void InterpreterMacroAssembler::check_and_handle_earlyret(Register java_thread)
void InterpreterMacroAssembler::get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset) {
assert(bcp_offset >= 0, "bcp is still pointing to start of bytecode");
if (AvoidUnalignedAccesses && (bcp_offset % 2)) {
lbu(t1, Address(xbcp, bcp_offset));
lbu(reg, Address(xbcp, bcp_offset + 1));
slli(t1, t1, 8);
add(reg, reg, t1);
} else {
lhu(reg, Address(xbcp, bcp_offset));
revb_h_h_u(reg, reg);
}
}
void InterpreterMacroAssembler::get_dispatch() {
@ -200,15 +195,7 @@ void InterpreterMacroAssembler::get_cache_index_at_bcp(Register index,
size_t index_size) {
assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");
if (index_size == sizeof(u2)) {
if (AvoidUnalignedAccesses) {
assert_different_registers(index, tmp);
load_unsigned_byte(index, Address(xbcp, bcp_offset));
load_unsigned_byte(tmp, Address(xbcp, bcp_offset + 1));
slli(tmp, tmp, 8);
add(index, index, tmp);
} else {
load_unsigned_short(index, Address(xbcp, bcp_offset));
}
load_short_misaligned(index, Address(xbcp, bcp_offset), tmp, false);
} else if (index_size == sizeof(u4)) {
load_int_misaligned(index, Address(xbcp, bcp_offset), tmp, false);
} else if (index_size == sizeof(u1)) {

View File

@ -292,15 +292,10 @@ void TemplateTable::bipush() {
void TemplateTable::sipush() {
transition(vtos, itos);
if (AvoidUnalignedAccesses) {
__ load_signed_byte(x10, at_bcp(1));
__ load_unsigned_byte(t1, at_bcp(2));
__ slli(x10, x10, 8);
__ add(x10, x10, t1);
} else {
__ load_unsigned_short(x10, at_bcp(1));
__ revb_h_h(x10, x10); // reverse bytes in half-word and sign-extend
}
}
void TemplateTable::ldc(LdcType type) {
@ -1626,15 +1621,10 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
// load branch displacement
if (!is_wide) {
if (AvoidUnalignedAccesses) {
__ lb(x12, at_bcp(1));
__ lbu(t1, at_bcp(2));
__ slli(x12, x12, 8);
__ add(x12, x12, t1);
} else {
__ lhu(x12, at_bcp(1));
__ revb_h_h(x12, x12); // reverse bytes in half-word and sign-extend
}
} else {
__ lwu(x12, at_bcp(1));
__ revb_w_w(x12, x12); // reverse bytes in word and sign-extend