8334430: Clean up nativeInst_x86.*

Reviewed-by: jwaters, jiefu
This commit is contained in:
Vladimir Kozlov 2024-06-18 14:48:46 +00:00
parent 8bc2fbe578
commit 6f860f8f6f
2 changed files with 2 additions and 331 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -41,143 +41,6 @@ void NativeInstruction::wrote(int offset) {
ICache::invalidate_word(addr_at(offset));
}
#ifdef ASSERT
void NativeLoadGot::report_and_fail() const {
tty->print_cr("Addr: " INTPTR_FORMAT " Code: %x %x %x", p2i(instruction_address()),
(has_rex ? ubyte_at(0) : 0), ubyte_at(rex_size), ubyte_at(rex_size + 1));
fatal("not a indirect rip mov to rbx");
}
void NativeLoadGot::verify() const {
if (has_rex) {
int rex = ubyte_at(0);
if (rex != rex_prefix && rex != rex_b_prefix) {
report_and_fail();
}
}
int inst = ubyte_at(rex_size);
if (inst != instruction_code) {
report_and_fail();
}
int modrm = ubyte_at(rex_size + 1);
if (modrm != modrm_rbx_code && modrm != modrm_rax_code) {
report_and_fail();
}
}
#endif
intptr_t NativeLoadGot::data() const {
return *(intptr_t *) got_address();
}
address NativePltCall::destination() const {
NativeGotJump* jump = nativeGotJump_at(plt_jump());
return jump->destination();
}
address NativePltCall::plt_entry() const {
return return_address() + displacement();
}
address NativePltCall::plt_jump() const {
address entry = plt_entry();
// Virtual PLT code has move instruction first
if (((NativeGotJump*)entry)->is_GotJump()) {
return entry;
} else {
return nativeLoadGot_at(entry)->next_instruction_address();
}
}
address NativePltCall::plt_load_got() const {
address entry = plt_entry();
if (!((NativeGotJump*)entry)->is_GotJump()) {
// Virtual PLT code has move instruction first
return entry;
} else {
// Static PLT code has move instruction second (from c2i stub)
return nativeGotJump_at(entry)->next_instruction_address();
}
}
address NativePltCall::plt_c2i_stub() const {
address entry = plt_load_got();
// This method should be called only for static calls which has C2I stub.
NativeLoadGot* load = nativeLoadGot_at(entry);
return entry;
}
address NativePltCall::plt_resolve_call() const {
NativeGotJump* jump = nativeGotJump_at(plt_jump());
address entry = jump->next_instruction_address();
if (((NativeGotJump*)entry)->is_GotJump()) {
return entry;
} else {
// c2i stub 2 instructions
entry = nativeLoadGot_at(entry)->next_instruction_address();
return nativeGotJump_at(entry)->next_instruction_address();
}
}
void NativePltCall::reset_to_plt_resolve_call() {
set_destination_mt_safe(plt_resolve_call());
}
void NativePltCall::set_destination_mt_safe(address dest) {
// rewriting the value in the GOT, it should always be aligned
NativeGotJump* jump = nativeGotJump_at(plt_jump());
address* got = (address *) jump->got_address();
*got = dest;
}
void NativePltCall::set_stub_to_clean() {
NativeLoadGot* method_loader = nativeLoadGot_at(plt_c2i_stub());
NativeGotJump* jump = nativeGotJump_at(method_loader->next_instruction_address());
method_loader->set_data(0);
jump->set_jump_destination((address)-1);
}
void NativePltCall::verify() const {
// Make sure code pattern is actually a call rip+off32 instruction.
int inst = ubyte_at(0);
if (inst != instruction_code) {
tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()),
inst);
fatal("not a call rip+off32");
}
}
address NativeGotJump::destination() const {
address *got_entry = (address *) got_address();
return *got_entry;
}
#ifdef ASSERT
void NativeGotJump::report_and_fail() const {
tty->print_cr("Addr: " INTPTR_FORMAT " Code: %x %x %x", p2i(instruction_address()),
(has_rex() ? ubyte_at(0) : 0), ubyte_at(rex_size()), ubyte_at(rex_size() + 1));
fatal("not a indirect rip jump");
}
void NativeGotJump::verify() const {
if (has_rex()) {
int rex = ubyte_at(0);
if (rex != rex_prefix) {
report_and_fail();
}
}
int inst = ubyte_at(rex_size());
if (inst != instruction_code) {
report_and_fail();
}
int modrm = ubyte_at(rex_size() + 1);
if (modrm != modrm_code) {
report_and_fail();
}
}
#endif
void NativeCall::verify() {
// Make sure code pattern is actually a call imm32 instruction.
int inst = ubyte_at(0);
@ -565,28 +428,6 @@ void NativeJump::patch_verified_entry(address entry, address verified_entry, add
}
address NativeFarJump::jump_destination() const {
NativeMovConstReg* mov = nativeMovConstReg_at(addr_at(0));
return (address)mov->data();
}
void NativeFarJump::verify() {
if (is_far_jump()) {
NativeMovConstReg* mov = nativeMovConstReg_at(addr_at(0));
NativeInstruction* jmp = nativeInstruction_at(mov->next_instruction_address());
if (jmp->is_jump_reg()) return;
}
fatal("not a jump instruction");
}
void NativePopReg::insert(address code_pos, Register reg) {
assert(reg->encoding() < 8, "no space for REX");
assert(NativePopReg::instruction_size == sizeof(char), "right address unit for update");
*code_pos = (u_char)(instruction_code | reg->encoding());
ICache::invalidate_range(code_pos, instruction_size);
}
void NativeIllegalInstruction::insert(address code_pos) {
assert(NativeIllegalInstruction::instruction_size == sizeof(short), "right address unit for update");
*(short *)code_pos = instruction_code;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -37,7 +37,6 @@
// - - NativeMovRegMem
// - - NativeMovRegMemPatching
// - - NativeJump
// - - NativeFarJump
// - - NativeIllegalOpCode
// - - NativeGeneralJump
// - - NativeReturn
@ -64,7 +63,6 @@ class NativeInstruction {
inline bool is_return();
inline bool is_jump();
inline bool is_jump_reg();
inline bool is_far_jump();
inline bool is_cond_jump();
inline bool is_safepoint_poll();
inline bool is_mov_literal64();
@ -104,47 +102,6 @@ inline NativeInstruction* nativeInstruction_at(address address) {
return inst;
}
class NativePltCall: public NativeInstruction {
public:
enum Intel_specific_constants {
instruction_code = 0xE8,
instruction_size = 5,
instruction_offset = 0,
displacement_offset = 1,
return_address_offset = 5
};
address instruction_address() const { return addr_at(instruction_offset); }
address next_instruction_address() const { return addr_at(return_address_offset); }
address displacement_address() const { return addr_at(displacement_offset); }
int displacement() const { return (jint) int_at(displacement_offset); }
address return_address() const { return addr_at(return_address_offset); }
address destination() const;
address plt_entry() const;
address plt_jump() const;
address plt_load_got() const;
address plt_resolve_call() const;
address plt_c2i_stub() const;
void set_stub_to_clean();
void reset_to_plt_resolve_call();
void set_destination_mt_safe(address dest);
void verify() const;
};
inline NativePltCall* nativePltCall_at(address address) {
NativePltCall* call = (NativePltCall*) address;
#ifdef ASSERT
call->verify();
#endif
return call;
}
inline NativePltCall* nativePltCall_before(address addr) {
address at = addr - NativePltCall::instruction_size;
return nativePltCall_at(at);
}
class NativeCall;
inline NativeCall* nativeCall_at(address address);
// The NativeCall is an abstraction for accessing/manipulating native call imm32/rel32off
@ -426,57 +383,6 @@ class NativeLoadAddress: public NativeMovRegMem {
}
};
// destination is rbx or rax
// mov rbx, [rip + offset]
class NativeLoadGot: public NativeInstruction {
#ifdef AMD64
static const bool has_rex = true;
static const int rex_size = 1;
#else
static const bool has_rex = false;
static const int rex_size = 0;
#endif
enum Intel_specific_constants {
rex_prefix = 0x48,
rex_b_prefix = 0x49,
instruction_code = 0x8b,
modrm_rbx_code = 0x1d,
modrm_rax_code = 0x05,
instruction_length = 6 + rex_size,
offset_offset = 2 + rex_size
};
int rip_offset() const { return int_at(offset_offset); }
address return_address() const { return addr_at(instruction_length); }
address got_address() const { return return_address() + rip_offset(); }
#ifdef ASSERT
void report_and_fail() const;
address instruction_address() const { return addr_at(0); }
#endif
public:
address next_instruction_address() const { return return_address(); }
intptr_t data() const;
void set_data(intptr_t data) {
intptr_t *addr = (intptr_t *) got_address();
*addr = data;
}
DEBUG_ONLY( void verify() const );
};
inline NativeLoadGot* nativeLoadGot_at(address addr) {
NativeLoadGot* load = (NativeLoadGot*) addr;
#ifdef ASSERT
load->verify();
#endif
return load;
}
// jump rel32off
class NativeJump: public NativeInstruction {
public:
enum Intel_specific_constants {
@ -532,26 +438,6 @@ inline NativeJump* nativeJump_at(address address) {
return jump;
}
// far jump reg
class NativeFarJump: public NativeInstruction {
public:
address jump_destination() const;
// Creation
inline friend NativeFarJump* nativeFarJump_at(address address);
void verify();
};
inline NativeFarJump* nativeFarJump_at(address address) {
NativeFarJump* jump = (NativeFarJump*)(address);
#ifdef ASSERT
jump->verify();
#endif
return jump;
}
// Handles all kinds of jump on Intel. Long/far, conditional/unconditional
class NativeGeneralJump: public NativeInstruction {
public:
@ -585,61 +471,6 @@ inline NativeGeneralJump* nativeGeneralJump_at(address address) {
return jump;
}
class NativeGotJump: public NativeInstruction {
enum Intel_specific_constants {
rex_prefix = 0x41,
instruction_code = 0xff,
modrm_code = 0x25,
instruction_size = 6,
rip_offset = 2
};
bool has_rex() const { return ubyte_at(0) == rex_prefix; }
int rex_size() const { return has_rex() ? 1 : 0; }
address return_address() const { return addr_at(instruction_size + rex_size()); }
int got_offset() const { return (jint) int_at(rip_offset + rex_size()); }
#ifdef ASSERT
void report_and_fail() const;
address instruction_address() const { return addr_at(0); }
#endif
public:
address got_address() const { return return_address() + got_offset(); }
address next_instruction_address() const { return return_address(); }
bool is_GotJump() const { return ubyte_at(rex_size()) == instruction_code; }
address destination() const;
void set_jump_destination(address dest) {
address *got_entry = (address *) got_address();
*got_entry = dest;
}
DEBUG_ONLY( void verify() const; )
};
inline NativeGotJump* nativeGotJump_at(address addr) {
NativeGotJump* jump = (NativeGotJump*)(addr);
debug_only(jump->verify());
return jump;
}
class NativePopReg : public NativeInstruction {
public:
enum Intel_specific_constants {
instruction_code = 0x58,
instruction_size = 1,
instruction_offset = 0,
data_offset = 1,
next_instruction_offset = 1
};
// Insert a pop instruction
static void insert(address code_pos, Register reg);
};
class NativeIllegalInstruction: public NativeInstruction {
public:
enum Intel_specific_constants {
@ -702,7 +533,6 @@ inline bool NativeInstruction::is_jump_reg() {
if (ubyte_at(0) == Assembler::REX_B) pos = 1;
return ubyte_at(pos) == 0xFF && (ubyte_at(pos + 1) & 0xF0) == 0xE0;
}
inline bool NativeInstruction::is_far_jump() { return is_mov_literal64(); }
inline bool NativeInstruction::is_cond_jump() { return (int_at(0) & 0xF0FF) == 0x800F /* long jump */ ||
(ubyte_at(0) & 0xF0) == 0x70; /* short jump */ }
inline bool NativeInstruction::is_safepoint_poll() {