diff --git a/src/hotspot/cpu/riscv/interp_masm_riscv.cpp b/src/hotspot/cpu/riscv/interp_masm_riscv.cpp index 897222ef995..ece872ac034 100644 --- a/src/hotspot/cpu/riscv/interp_masm_riscv.cpp +++ b/src/hotspot/cpu/riscv/interp_masm_riscv.cpp @@ -179,15 +179,10 @@ void InterpreterMacroAssembler::check_and_handle_earlyret(Register java_thread) void InterpreterMacroAssembler::get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset) { assert(bcp_offset >= 0, "bcp is still pointing to start of bytecode"); - if (AvoidUnalignedAccesses && (bcp_offset % 2)) { - lbu(t1, Address(xbcp, bcp_offset)); - lbu(reg, Address(xbcp, bcp_offset + 1)); - slli(t1, t1, 8); - add(reg, reg, t1); - } else { - lhu(reg, Address(xbcp, bcp_offset)); - revb_h_h_u(reg, reg); - } + lbu(t1, Address(xbcp, bcp_offset)); + lbu(reg, Address(xbcp, bcp_offset + 1)); + slli(t1, t1, 8); + add(reg, reg, t1); } void InterpreterMacroAssembler::get_dispatch() { @@ -200,15 +195,7 @@ void InterpreterMacroAssembler::get_cache_index_at_bcp(Register index, size_t index_size) { assert(bcp_offset > 0, "bcp is still pointing to start of bytecode"); if (index_size == sizeof(u2)) { - if (AvoidUnalignedAccesses) { - assert_different_registers(index, tmp); - load_unsigned_byte(index, Address(xbcp, bcp_offset)); - load_unsigned_byte(tmp, Address(xbcp, bcp_offset + 1)); - slli(tmp, tmp, 8); - add(index, index, tmp); - } else { - load_unsigned_short(index, Address(xbcp, bcp_offset)); - } + load_short_misaligned(index, Address(xbcp, bcp_offset), tmp, false); } else if (index_size == sizeof(u4)) { load_int_misaligned(index, Address(xbcp, bcp_offset), tmp, false); } else if (index_size == sizeof(u1)) { diff --git a/src/hotspot/cpu/riscv/templateTable_riscv.cpp b/src/hotspot/cpu/riscv/templateTable_riscv.cpp index 52b33c62616..62dc952bde0 100644 --- a/src/hotspot/cpu/riscv/templateTable_riscv.cpp +++ b/src/hotspot/cpu/riscv/templateTable_riscv.cpp @@ -292,15 +292,10 @@ void TemplateTable::bipush() { void TemplateTable::sipush() { transition(vtos, itos); - if (AvoidUnalignedAccesses) { - __ load_signed_byte(x10, at_bcp(1)); - __ load_unsigned_byte(t1, at_bcp(2)); - __ slli(x10, x10, 8); - __ add(x10, x10, t1); - } else { - __ load_unsigned_short(x10, at_bcp(1)); - __ revb_h_h(x10, x10); // reverse bytes in half-word and sign-extend - } + __ load_signed_byte(x10, at_bcp(1)); + __ load_unsigned_byte(t1, at_bcp(2)); + __ slli(x10, x10, 8); + __ add(x10, x10, t1); } void TemplateTable::ldc(LdcType type) { @@ -1626,15 +1621,10 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) { // load branch displacement if (!is_wide) { - if (AvoidUnalignedAccesses) { - __ lb(x12, at_bcp(1)); - __ lbu(t1, at_bcp(2)); - __ slli(x12, x12, 8); - __ add(x12, x12, t1); - } else { - __ lhu(x12, at_bcp(1)); - __ revb_h_h(x12, x12); // reverse bytes in half-word and sign-extend - } + __ lb(x12, at_bcp(1)); + __ lbu(t1, at_bcp(2)); + __ slli(x12, x12, 8); + __ add(x12, x12, t1); } else { __ lwu(x12, at_bcp(1)); __ revb_w_w(x12, x12); // reverse bytes in word and sign-extend