8299229: [JVMCI] add support for UseZGC
Reviewed-by: eosterlund, kvn
This commit is contained in:
parent
14df5c130e
commit
5e1fe43080
@ -228,7 +228,7 @@ public:
|
||||
static void spatch(address a, int msb, int lsb, int64_t val) {
|
||||
int nbits = msb - lsb + 1;
|
||||
int64_t chk = val >> (nbits - 1);
|
||||
guarantee (chk == -1 || chk == 0, "Field too big for insn");
|
||||
guarantee (chk == -1 || chk == 0, "Field too big for insn at " INTPTR_FORMAT, p2i(a));
|
||||
unsigned uval = val;
|
||||
unsigned mask = checked_cast<unsigned>(right_n_bits(nbits));
|
||||
uval &= mask;
|
||||
|
@ -32,17 +32,21 @@
|
||||
#include "memory/resourceArea.hpp"
|
||||
#include "runtime/frame.inline.hpp"
|
||||
#include "runtime/javaThread.hpp"
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "runtime/registerMap.hpp"
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "utilities/align.hpp"
|
||||
#include "utilities/debug.hpp"
|
||||
#include "utilities/formatBuffer.hpp"
|
||||
#if INCLUDE_JVMCI
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
#endif
|
||||
|
||||
static int slow_path_size(nmethod* nm) {
|
||||
// The slow path code is out of line with C2
|
||||
return nm->is_compiled_by_c2() ? 0 : 6;
|
||||
}
|
||||
|
||||
// This is the offset of the entry barrier from where the frame is completed.
|
||||
// This is the offset of the entry barrier relative to where the frame is completed.
|
||||
// If any code changes between the end of the verified entry where the entry
|
||||
// barrier resides, and the completion of the frame, then
|
||||
// NativeNMethodCmpBarrier::verify() will immediately complain when it does
|
||||
@ -62,40 +66,67 @@ static int entry_barrier_offset(nmethod* nm) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
class NativeNMethodBarrier: public NativeInstruction {
|
||||
address instruction_address() const { return addr_at(0); }
|
||||
class NativeNMethodBarrier {
|
||||
address _instruction_address;
|
||||
int* _guard_addr;
|
||||
nmethod* _nm;
|
||||
|
||||
address instruction_address() const { return _instruction_address; }
|
||||
|
||||
int *guard_addr() {
|
||||
return _guard_addr;
|
||||
}
|
||||
|
||||
int local_guard_offset(nmethod* nm) {
|
||||
// It's the last instruction
|
||||
return (-entry_barrier_offset(nm)) - 4;
|
||||
}
|
||||
|
||||
int *guard_addr(nmethod* nm) {
|
||||
if (nm->is_compiled_by_c2()) {
|
||||
// With c2 compiled code, the guard is out-of-line in a stub
|
||||
// We find it using the RelocIterator.
|
||||
RelocIterator iter(nm);
|
||||
while (iter.next()) {
|
||||
if (iter.type() == relocInfo::entry_guard_type) {
|
||||
entry_guard_Relocation* const reloc = iter.entry_guard_reloc();
|
||||
return reinterpret_cast<int*>(reloc->addr());
|
||||
}
|
||||
}
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
return reinterpret_cast<int*>(instruction_address() + local_guard_offset(nm));
|
||||
}
|
||||
|
||||
public:
|
||||
int get_value(nmethod* nm) {
|
||||
return Atomic::load_acquire(guard_addr(nm));
|
||||
NativeNMethodBarrier(nmethod* nm): _nm(nm) {
|
||||
#if INCLUDE_JVMCI
|
||||
if (nm->is_compiled_by_jvmci()) {
|
||||
address pc = nm->code_begin() + nm->jvmci_nmethod_data()->nmethod_entry_patch_offset();
|
||||
RelocIterator iter(nm, pc, pc + 4);
|
||||
guarantee(iter.next(), "missing relocs");
|
||||
guarantee(iter.type() == relocInfo::section_word_type, "unexpected reloc");
|
||||
|
||||
_guard_addr = (int*) iter.section_word_reloc()->target();
|
||||
_instruction_address = pc;
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
_instruction_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
if (nm->is_compiled_by_c2()) {
|
||||
// With c2 compiled code, the guard is out-of-line in a stub
|
||||
// We find it using the RelocIterator.
|
||||
RelocIterator iter(nm);
|
||||
while (iter.next()) {
|
||||
if (iter.type() == relocInfo::entry_guard_type) {
|
||||
entry_guard_Relocation* const reloc = iter.entry_guard_reloc();
|
||||
_guard_addr = reinterpret_cast<int*>(reloc->addr());
|
||||
return;
|
||||
}
|
||||
}
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
_guard_addr = reinterpret_cast<int*>(instruction_address() + local_guard_offset(nm));
|
||||
}
|
||||
}
|
||||
|
||||
void set_value(nmethod* nm, int value) {
|
||||
Atomic::release_store(guard_addr(nm), value);
|
||||
int get_value() {
|
||||
return Atomic::load_acquire(guard_addr());
|
||||
}
|
||||
|
||||
void verify() const;
|
||||
void set_value(int value) {
|
||||
Atomic::release_store(guard_addr(), value);
|
||||
}
|
||||
|
||||
bool check_barrier(err_msg& msg) const;
|
||||
void verify() const {
|
||||
err_msg msg("%s", "");
|
||||
assert(check_barrier(msg), "%s", msg.buffer());
|
||||
}
|
||||
};
|
||||
|
||||
// Store the instruction bitmask, bits and name for checking the barrier.
|
||||
@ -107,13 +138,14 @@ struct CheckInsn {
|
||||
|
||||
// The first instruction of the nmethod entry barrier is an ldr (literal)
|
||||
// instruction. Verify that it's really there, so the offsets are not skewed.
|
||||
void NativeNMethodBarrier::verify() const {
|
||||
bool NativeNMethodBarrier::check_barrier(err_msg& msg) const {
|
||||
uint32_t* addr = (uint32_t*) instruction_address();
|
||||
uint32_t inst = *addr;
|
||||
if ((inst & 0xff000000) != 0x18000000) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", (intptr_t)addr, inst);
|
||||
fatal("not an ldr (literal) instruction.");
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x not an ldr", p2i(addr), inst);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -156,13 +188,6 @@ void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {
|
||||
new_frame->pc = SharedRuntime::get_handle_wrong_method_stub();
|
||||
}
|
||||
|
||||
static NativeNMethodBarrier* native_nmethod_barrier(nmethod* nm) {
|
||||
address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
NativeNMethodBarrier* barrier = reinterpret_cast<NativeNMethodBarrier*>(barrier_address);
|
||||
debug_only(barrier->verify());
|
||||
return barrier;
|
||||
}
|
||||
|
||||
void BarrierSetNMethod::set_guard_value(nmethod* nm, int value) {
|
||||
if (!supports_entry_barrier(nm)) {
|
||||
return;
|
||||
@ -179,8 +204,8 @@ void BarrierSetNMethod::set_guard_value(nmethod* nm, int value) {
|
||||
bs_asm->increment_patching_epoch();
|
||||
}
|
||||
|
||||
NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
|
||||
barrier->set_value(nm, value);
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
barrier.set_value(value);
|
||||
}
|
||||
|
||||
int BarrierSetNMethod::guard_value(nmethod* nm) {
|
||||
@ -188,6 +213,13 @@ int BarrierSetNMethod::guard_value(nmethod* nm) {
|
||||
return disarmed_guard_value();
|
||||
}
|
||||
|
||||
NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
|
||||
return barrier->get_value(nm);
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
return barrier.get_value();
|
||||
}
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
bool BarrierSetNMethod::verify_barrier(nmethod* nm, err_msg& msg) {
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
return barrier.check_barrier(msg);
|
||||
}
|
||||
#endif
|
||||
|
@ -58,8 +58,6 @@ const bool CCallingConventionRequiresIntsAsLongs = false;
|
||||
|
||||
#define SUPPORT_RESERVED_STACK_AREA
|
||||
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
|
||||
#if defined(__APPLE__) || defined(_WIN64)
|
||||
#define R18_RESERVED
|
||||
#define R18_RESERVED_ONLY(code) code
|
||||
|
@ -122,27 +122,28 @@ void CodeInstaller::pd_relocate_ForeignCall(NativeInstruction* inst, jlong forei
|
||||
}
|
||||
|
||||
void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& method, jint pc_offset, JVMCI_TRAPS) {
|
||||
NativeCall* call = NULL;
|
||||
switch (_next_call_type) {
|
||||
case INLINE_INVOKE:
|
||||
break;
|
||||
return;
|
||||
case INVOKEVIRTUAL:
|
||||
case INVOKEINTERFACE: {
|
||||
assert(!method->is_static(), "cannot call static method with invokeinterface");
|
||||
NativeCall* call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
_instructions->relocate(call->instruction_address(), virtual_call_Relocation::spec(_invoke_mark_pc));
|
||||
call->trampoline_jump(cbuf, SharedRuntime::get_resolve_virtual_call_stub(), JVMCI_CHECK);
|
||||
break;
|
||||
}
|
||||
case INVOKESTATIC: {
|
||||
assert(method->is_static(), "cannot call non-static method with invokestatic");
|
||||
NativeCall* call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
_instructions->relocate(call->instruction_address(), relocInfo::static_call_type);
|
||||
call->trampoline_jump(cbuf, SharedRuntime::get_resolve_static_call_stub(), JVMCI_CHECK);
|
||||
break;
|
||||
}
|
||||
case INVOKESPECIAL: {
|
||||
assert(!method->is_static(), "cannot call static method with invokespecial");
|
||||
NativeCall* call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
call = nativeCall_at(_instructions->start() + pc_offset);
|
||||
_instructions->relocate(call->instruction_address(), relocInfo::opt_virtual_call_type);
|
||||
call->trampoline_jump(cbuf, SharedRuntime::get_resolve_opt_virtual_call_stub(), JVMCI_CHECK);
|
||||
break;
|
||||
@ -151,6 +152,15 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& metho
|
||||
JVMCI_ERROR("invalid _next_call_type value");
|
||||
break;
|
||||
}
|
||||
if (Continuations::enabled()) {
|
||||
// Check for proper post_call_nop
|
||||
NativePostCallNop* nop = nativePostCallNop_at(call->next_instruction_address());
|
||||
if (nop == NULL) {
|
||||
JVMCI_ERROR("missing post call nop at offset %d", pc_offset);
|
||||
} else {
|
||||
_instructions->relocate(call->next_instruction_address(), relocInfo::post_call_nop_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) {
|
||||
|
@ -59,6 +59,4 @@ const bool CCallingConventionRequiresIntsAsLongs = true;
|
||||
// Define the condition to use this -XX flag.
|
||||
#define USE_POLL_BIT_ONLY UseSIGTRAP
|
||||
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
|
||||
#endif // CPU_PPC_GLOBALDEFINITIONS_PPC_HPP
|
||||
|
@ -36,6 +36,9 @@
|
||||
#include "runtime/registerMap.hpp"
|
||||
#include "utilities/align.hpp"
|
||||
#include "utilities/debug.hpp"
|
||||
#if INCLUDE_JVMCI
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
#endif
|
||||
|
||||
static int slow_path_size(nmethod* nm) {
|
||||
// The slow path code is out of line with C2.
|
||||
@ -57,40 +60,67 @@ static int entry_barrier_offset(nmethod* nm) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
class NativeNMethodBarrier: public NativeInstruction {
|
||||
address instruction_address() const { return addr_at(0); }
|
||||
class NativeNMethodBarrier {
|
||||
address _instruction_address;
|
||||
int* _guard_addr;
|
||||
nmethod* _nm;
|
||||
|
||||
address instruction_address() const { return _instruction_address; }
|
||||
|
||||
int *guard_addr() {
|
||||
return _guard_addr;
|
||||
}
|
||||
|
||||
int local_guard_offset(nmethod* nm) {
|
||||
// It's the last instruction
|
||||
return (-entry_barrier_offset(nm)) - 4;
|
||||
}
|
||||
|
||||
int *guard_addr(nmethod* nm) {
|
||||
if (nm->is_compiled_by_c2()) {
|
||||
// With c2 compiled code, the guard is out-of-line in a stub
|
||||
// We find it using the RelocIterator.
|
||||
RelocIterator iter(nm);
|
||||
while (iter.next()) {
|
||||
if (iter.type() == relocInfo::entry_guard_type) {
|
||||
entry_guard_Relocation* const reloc = iter.entry_guard_reloc();
|
||||
return reinterpret_cast<int*>(reloc->addr());
|
||||
}
|
||||
}
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
return reinterpret_cast<int*>(instruction_address() + local_guard_offset(nm));
|
||||
}
|
||||
|
||||
public:
|
||||
int get_value(nmethod* nm) {
|
||||
return Atomic::load_acquire(guard_addr(nm));
|
||||
NativeNMethodBarrier(nmethod* nm): _nm(nm) {
|
||||
#if INCLUDE_JVMCI
|
||||
if (nm->is_compiled_by_jvmci()) {
|
||||
address pc = nm->code_begin() + nm->jvmci_nmethod_data()->nmethod_entry_patch_offset();
|
||||
RelocIterator iter(nm, pc, pc + 4);
|
||||
guarantee(iter.next(), "missing relocs");
|
||||
guarantee(iter.type() == relocInfo::section_word_type, "unexpected reloc");
|
||||
|
||||
_guard_addr = (int*) iter.section_word_reloc()->target();
|
||||
_instruction_address = pc;
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
_instruction_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
if (nm->is_compiled_by_c2()) {
|
||||
// With c2 compiled code, the guard is out-of-line in a stub
|
||||
// We find it using the RelocIterator.
|
||||
RelocIterator iter(nm);
|
||||
while (iter.next()) {
|
||||
if (iter.type() == relocInfo::entry_guard_type) {
|
||||
entry_guard_Relocation* const reloc = iter.entry_guard_reloc();
|
||||
_guard_addr = reinterpret_cast<int*>(reloc->addr());
|
||||
return;
|
||||
}
|
||||
}
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
_guard_addr = reinterpret_cast<int*>(instruction_address() + local_guard_offset(nm));
|
||||
}
|
||||
}
|
||||
|
||||
void set_value(nmethod* nm, int value) {
|
||||
Atomic::release_store(guard_addr(nm), value);
|
||||
int get_value() {
|
||||
return Atomic::load_acquire(guard_addr());
|
||||
}
|
||||
|
||||
void verify() const;
|
||||
void set_value(int value) {
|
||||
Atomic::release_store(guard_addr(), value);
|
||||
}
|
||||
|
||||
bool check_barrier(err_msg& msg) const;
|
||||
void verify() const {
|
||||
err_msg msg("%s", "");
|
||||
assert(check_barrier(msg), "%s", msg.buffer());
|
||||
}
|
||||
};
|
||||
|
||||
// Store the instruction bitmask, bits and name for checking the barrier.
|
||||
@ -112,16 +142,17 @@ static const struct CheckInsn barrierInsn[] = {
|
||||
// The encodings must match the instructions emitted by
|
||||
// BarrierSetAssembler::nmethod_entry_barrier. The matching ignores the specific
|
||||
// register numbers and immediate values in the encoding.
|
||||
void NativeNMethodBarrier::verify() const {
|
||||
bool NativeNMethodBarrier::check_barrier(err_msg& msg) const {
|
||||
intptr_t addr = (intptr_t) instruction_address();
|
||||
for(unsigned int i = 0; i < sizeof(barrierInsn)/sizeof(struct CheckInsn); i++ ) {
|
||||
uint32_t inst = *((uint32_t*) addr);
|
||||
if ((inst & barrierInsn[i].mask) != barrierInsn[i].bits) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", addr, inst);
|
||||
fatal("not an %s instruction.", barrierInsn[i].name);
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x not an %s instruction", addr, inst, barrierInsn[i].name);
|
||||
return false;
|
||||
}
|
||||
addr += 4;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -164,13 +195,6 @@ void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {
|
||||
new_frame->pc = SharedRuntime::get_handle_wrong_method_stub();
|
||||
}
|
||||
|
||||
static NativeNMethodBarrier* native_nmethod_barrier(nmethod* nm) {
|
||||
address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
NativeNMethodBarrier* barrier = reinterpret_cast<NativeNMethodBarrier*>(barrier_address);
|
||||
debug_only(barrier->verify());
|
||||
return barrier;
|
||||
}
|
||||
|
||||
void BarrierSetNMethod::set_guard_value(nmethod* nm, int value) {
|
||||
if (!supports_entry_barrier(nm)) {
|
||||
return;
|
||||
@ -187,8 +211,8 @@ void BarrierSetNMethod::set_guard_value(nmethod* nm, int value) {
|
||||
bs_asm->increment_patching_epoch();
|
||||
}
|
||||
|
||||
NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
|
||||
barrier->set_value(nm, value);
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
barrier.set_value(value);
|
||||
}
|
||||
|
||||
int BarrierSetNMethod::guard_value(nmethod* nm) {
|
||||
@ -196,6 +220,13 @@ int BarrierSetNMethod::guard_value(nmethod* nm) {
|
||||
return disarmed_guard_value();
|
||||
}
|
||||
|
||||
NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
|
||||
return barrier->get_value(nm);
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
return barrier.get_value();
|
||||
}
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
bool BarrierSetNMethod::verify_barrier(nmethod* nm, err_msg& msg) {
|
||||
NativeNMethodBarrier barrier(nm);
|
||||
return barrier.check_barrier(msg);
|
||||
}
|
||||
#endif
|
||||
|
@ -47,8 +47,6 @@ const bool CCallingConventionRequiresIntsAsLongs = false;
|
||||
|
||||
#define SUPPORT_RESERVED_STACK_AREA
|
||||
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
|
||||
#define USE_POINTERS_TO_REGISTER_IMPL_ARRAY
|
||||
|
||||
#define DEFAULT_CACHE_LINE_SIZE 64
|
||||
|
@ -46,6 +46,4 @@ const bool CCallingConventionRequiresIntsAsLongs = true;
|
||||
|
||||
#define SUPPORT_RESERVED_STACK_AREA
|
||||
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
|
||||
#endif // CPU_S390_GLOBALDEFINITIONS_S390_HPP
|
||||
|
@ -32,7 +32,11 @@
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "utilities/align.hpp"
|
||||
#include "utilities/debug.hpp"
|
||||
#include "utilities/formatBuffer.hpp"
|
||||
#include "utilities/macros.hpp"
|
||||
#if INCLUDE_JVMCI
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
#endif
|
||||
|
||||
class NativeNMethodCmpBarrier: public NativeInstruction {
|
||||
public:
|
||||
@ -58,55 +62,63 @@ public:
|
||||
|
||||
jint get_immediate() const { return int_at(imm_offset); }
|
||||
void set_immediate(jint imm) { set_int_at(imm_offset, imm); }
|
||||
void verify() const;
|
||||
bool check_barrier(err_msg& msg) const;
|
||||
void verify() const {
|
||||
#ifdef ASSERT
|
||||
err_msg msg("%s", "");
|
||||
assert(check_barrier(msg), "%s", msg.buffer());
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
#ifdef _LP64
|
||||
void NativeNMethodCmpBarrier::verify() const {
|
||||
bool NativeNMethodCmpBarrier::check_barrier(err_msg& msg) const {
|
||||
// Only require 4 byte alignment
|
||||
if (((uintptr_t) instruction_address()) & 0x3) {
|
||||
fatal("Not properly aligned");
|
||||
msg.print("Addr: " INTPTR_FORMAT " not properly aligned", p2i(instruction_address()));
|
||||
return false;
|
||||
}
|
||||
|
||||
int prefix = ubyte_at(0);
|
||||
if (prefix != instruction_rex_prefix) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " Prefix: 0x%x", p2i(instruction_address()),
|
||||
prefix);
|
||||
fatal("not a cmp barrier");
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x expected 0x%x", p2i(instruction_address()), prefix, instruction_rex_prefix);
|
||||
return false;
|
||||
}
|
||||
|
||||
int inst = ubyte_at(1);
|
||||
if (inst != instruction_code) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()),
|
||||
inst);
|
||||
fatal("not a cmp barrier");
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x expected 0x%x", p2i(instruction_address()), inst, instruction_code);
|
||||
return false;
|
||||
}
|
||||
|
||||
int modrm = ubyte_at(2);
|
||||
if (modrm != instruction_modrm) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()),
|
||||
modrm);
|
||||
fatal("not a cmp barrier");
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x expected mod/rm 0x%x", p2i(instruction_address()), modrm, instruction_modrm);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#else
|
||||
void NativeNMethodCmpBarrier::verify() const {
|
||||
bool NativeNMethodCmpBarrier::check_barrier(err_msg& msg) const {
|
||||
if (((uintptr_t) instruction_address()) & 0x3) {
|
||||
fatal("Not properly aligned");
|
||||
msg.print("Addr: " INTPTR_FORMAT " not properly aligned", p2i(instruction_address()));
|
||||
return false;
|
||||
}
|
||||
|
||||
int inst = ubyte_at(0);
|
||||
if (inst != instruction_code) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()),
|
||||
msg.print("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()),
|
||||
inst);
|
||||
fatal("not a cmp barrier");
|
||||
return false;
|
||||
}
|
||||
|
||||
int modrm = ubyte_at(1);
|
||||
if (modrm != instruction_modrm) {
|
||||
tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()),
|
||||
msg.print("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()),
|
||||
modrm);
|
||||
fatal("not a cmp barrier");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif // _LP64
|
||||
|
||||
@ -170,9 +182,18 @@ static const int entry_barrier_offset(nmethod* nm) {
|
||||
}
|
||||
|
||||
static NativeNMethodCmpBarrier* native_nmethod_barrier(nmethod* nm) {
|
||||
address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
address barrier_address;
|
||||
#if INCLUDE_JVMCI
|
||||
if (nm->is_compiled_by_jvmci()) {
|
||||
barrier_address = nm->code_begin() + nm->jvmci_nmethod_data()->nmethod_entry_patch_offset();
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset(nm);
|
||||
}
|
||||
|
||||
NativeNMethodCmpBarrier* barrier = reinterpret_cast<NativeNMethodCmpBarrier*>(barrier_address);
|
||||
debug_only(barrier->verify());
|
||||
barrier->verify();
|
||||
return barrier;
|
||||
}
|
||||
|
||||
@ -193,3 +214,11 @@ int BarrierSetNMethod::guard_value(nmethod* nm) {
|
||||
NativeNMethodCmpBarrier* cmp = native_nmethod_barrier(nm);
|
||||
return cmp->get_immediate();
|
||||
}
|
||||
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
bool BarrierSetNMethod::verify_barrier(nmethod* nm, err_msg& msg) {
|
||||
NativeNMethodCmpBarrier* barrier = native_nmethod_barrier(nm);
|
||||
return barrier->check_barrier(msg);
|
||||
}
|
||||
#endif
|
||||
|
@ -71,12 +71,6 @@ const bool CCallingConventionRequiresIntsAsLongs = false;
|
||||
#define SUPPORT_RESERVED_STACK_AREA
|
||||
#endif
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS EnableJVMCI
|
||||
#else
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
#endif
|
||||
|
||||
#define USE_POINTERS_TO_REGISTER_IMPL_ARRAY
|
||||
|
||||
#endif // CPU_X86_GLOBALDEFINITIONS_X86_HPP
|
||||
|
@ -185,6 +185,15 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &, methodHandle& method, j
|
||||
if (!call->is_displacement_aligned()) {
|
||||
JVMCI_ERROR("unaligned displacement for call at offset %d", pc_offset);
|
||||
}
|
||||
if (Continuations::enabled()) {
|
||||
// Check for proper post_call_nop
|
||||
NativePostCallNop* nop = nativePostCallNop_at(call->next_instruction_address());
|
||||
if (nop == NULL) {
|
||||
JVMCI_ERROR("missing post call nop at offset %d", pc_offset);
|
||||
} else {
|
||||
_instructions->relocate(call->next_instruction_address(), relocInfo::post_call_nop_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) {
|
||||
|
@ -42,6 +42,4 @@
|
||||
// 32-bit integer argument values are extended to 64 bits.
|
||||
const bool CCallingConventionRequiresIntsAsLongs = false;
|
||||
|
||||
#define COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS false
|
||||
|
||||
#endif // CPU_ZERO_GLOBALDEFINITIONS_ZERO_HPP
|
||||
|
@ -550,9 +550,7 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
|
||||
#if INCLUDE_JVMCI
|
||||
, char* speculations,
|
||||
int speculations_len,
|
||||
int nmethod_mirror_index,
|
||||
const char* nmethod_mirror_name,
|
||||
FailedSpeculation** failed_speculations
|
||||
JVMCINMethodData* jvmci_data
|
||||
#endif
|
||||
)
|
||||
{
|
||||
@ -561,7 +559,7 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
|
||||
// create nmethod
|
||||
nmethod* nm = nullptr;
|
||||
#if INCLUDE_JVMCI
|
||||
int jvmci_data_size = !compiler->is_jvmci() ? 0 : JVMCINMethodData::compute_size(nmethod_mirror_name);
|
||||
int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
|
||||
#endif
|
||||
int nmethod_size =
|
||||
CodeBlob::allocation_size(code_buffer, sizeof(nmethod))
|
||||
@ -588,17 +586,11 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
|
||||
#if INCLUDE_JVMCI
|
||||
, speculations,
|
||||
speculations_len,
|
||||
jvmci_data_size
|
||||
jvmci_data
|
||||
#endif
|
||||
);
|
||||
|
||||
if (nm != nullptr) {
|
||||
#if INCLUDE_JVMCI
|
||||
if (compiler->is_jvmci()) {
|
||||
// Initialize the JVMCINMethodData object inlined into nm
|
||||
nm->jvmci_nmethod_data()->initialize(nmethod_mirror_index, nmethod_mirror_name, failed_speculations);
|
||||
}
|
||||
#endif
|
||||
// To make dependency checking during class loading fast, record
|
||||
// the nmethod dependencies in the classes it is dependent on.
|
||||
// This allows the dependency checking code to simply walk the
|
||||
@ -786,7 +778,7 @@ nmethod::nmethod(
|
||||
#if INCLUDE_JVMCI
|
||||
, char* speculations,
|
||||
int speculations_len,
|
||||
int jvmci_data_size
|
||||
JVMCINMethodData* jvmci_data
|
||||
#endif
|
||||
)
|
||||
: CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
|
||||
@ -866,6 +858,7 @@ nmethod::nmethod(
|
||||
#if INCLUDE_JVMCI
|
||||
_speculations_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize);
|
||||
_jvmci_data_offset = _speculations_offset + align_up(speculations_len, oopSize);
|
||||
int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
|
||||
_nmethod_end_offset = _jvmci_data_offset + align_up(jvmci_data_size, oopSize);
|
||||
#else
|
||||
_nmethod_end_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize);
|
||||
@ -885,6 +878,13 @@ nmethod::nmethod(
|
||||
dependencies->copy_to(this);
|
||||
clear_unloading_state();
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
if (compiler->is_jvmci()) {
|
||||
// Initialize the JVMCINMethodData object inlined into nm
|
||||
jvmci_nmethod_data()->copy(jvmci_data);
|
||||
}
|
||||
#endif
|
||||
|
||||
Universe::heap()->register_nmethod(this);
|
||||
debug_only(Universe::heap()->verify_nmethod(this));
|
||||
|
||||
|
@ -292,9 +292,9 @@ class nmethod : public CompiledMethod {
|
||||
AbstractCompiler* compiler,
|
||||
CompLevel comp_level
|
||||
#if INCLUDE_JVMCI
|
||||
, char* speculations,
|
||||
int speculations_len,
|
||||
int jvmci_data_size
|
||||
, char* speculations = nullptr,
|
||||
int speculations_len = 0,
|
||||
JVMCINMethodData* jvmci_data = nullptr
|
||||
#endif
|
||||
);
|
||||
|
||||
@ -345,9 +345,7 @@ class nmethod : public CompiledMethod {
|
||||
#if INCLUDE_JVMCI
|
||||
, char* speculations = nullptr,
|
||||
int speculations_len = 0,
|
||||
int nmethod_mirror_index = -1,
|
||||
const char* nmethod_mirror_name = nullptr,
|
||||
FailedSpeculation** failed_speculations = nullptr
|
||||
JVMCINMethodData* jvmci_data = nullptr
|
||||
#endif
|
||||
);
|
||||
|
||||
|
@ -39,6 +39,9 @@
|
||||
#include "runtime/threadWXSetters.inline.hpp"
|
||||
#include "runtime/threads.hpp"
|
||||
#include "utilities/debug.hpp"
|
||||
#if INCLUDE_JVMCI
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
#endif
|
||||
|
||||
int BarrierSetNMethod::disarmed_guard_value() const {
|
||||
return *disarmed_guard_value_address();
|
||||
@ -62,11 +65,17 @@ bool BarrierSetNMethod::supports_entry_barrier(nmethod* nm) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!nm->is_native_method() && !nm->is_compiled_by_c2() && !nm->is_compiled_by_c1()) {
|
||||
return false;
|
||||
if (nm->is_native_method() || nm->is_compiled_by_c2() || nm->is_compiled_by_c1()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
#if INCLUDE_JVMCI
|
||||
if (nm->is_compiled_by_jvmci() && nm->jvmci_nmethod_data()->has_entry_barrier()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void BarrierSetNMethod::disarm(nmethod* nm) {
|
||||
|
@ -26,6 +26,7 @@
|
||||
#define SHARE_GC_SHARED_BARRIERSETNMETHOD_HPP
|
||||
|
||||
#include "memory/allocation.hpp"
|
||||
#include "utilities/formatBuffer.hpp"
|
||||
#include "utilities/globalDefinitions.hpp"
|
||||
#include "utilities/sizes.hpp"
|
||||
|
||||
@ -55,6 +56,10 @@ public:
|
||||
void set_guard_value(nmethod* nm, int value);
|
||||
|
||||
void arm_all_nmethods();
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
bool verify_barrier(nmethod* nm, FormatBuffer<>& msg);
|
||||
#endif
|
||||
};
|
||||
|
||||
|
||||
|
@ -27,6 +27,7 @@
|
||||
#include "compiler/compileBroker.hpp"
|
||||
#include "compiler/compilerThread.hpp"
|
||||
#include "compiler/oopMap.hpp"
|
||||
#include "gc/shared/barrierSetNMethod.hpp"
|
||||
#include "jvmci/jvmciCodeInstaller.hpp"
|
||||
#include "jvmci/jvmciCompilerToVM.hpp"
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
@ -379,7 +380,7 @@ Handle CodeInstaller::read_oop(HotSpotCompiledCodeStream* stream, u1 tag, JVMCI_
|
||||
if (obj == nullptr) {
|
||||
JVMCI_THROW_MSG_(InternalError, "Constant was unexpectedly null", Handle());
|
||||
} else {
|
||||
oopDesc::verify(obj);
|
||||
guarantee(oopDesc::is_oop_or_null(obj), "invalid oop: " INTPTR_FORMAT, p2i((oopDesc*) obj));
|
||||
}
|
||||
return Handle(stream->thread(), obj);
|
||||
}
|
||||
@ -727,6 +728,14 @@ JVMCI::CodeInstallResult CodeInstaller::install(JVMCICompiler* compiler,
|
||||
JVMCI_THROW_MSG_(IllegalArgumentException, "InstalledCode object must be a HotSpotNmethod when installing a HotSpotCompiledNmethod", JVMCI::ok);
|
||||
}
|
||||
|
||||
// We would like to be strict about the nmethod entry barrier but there are various test
|
||||
// configurations which generate assembly without being a full compiler. So for now we enforce
|
||||
// that JIT compiled methods must have an nmethod barrier.
|
||||
bool install_default = JVMCIENV->get_HotSpotNmethod_isDefault(installed_code) != 0;
|
||||
if (_nmethod_entry_patch_offset == -1 && install_default) {
|
||||
JVMCI_THROW_MSG_(IllegalArgumentException, "nmethod entry barrier is missing", JVMCI::ok);
|
||||
}
|
||||
|
||||
JVMCIObject mirror = installed_code;
|
||||
nmethod* nm = nullptr; // nm is an out parameter of register_method
|
||||
result = runtime()->register_method(jvmci_env(),
|
||||
@ -751,7 +760,8 @@ JVMCI::CodeInstallResult CodeInstaller::install(JVMCICompiler* compiler,
|
||||
mirror,
|
||||
failed_speculations,
|
||||
speculations,
|
||||
speculations_len);
|
||||
speculations_len,
|
||||
_nmethod_entry_patch_offset);
|
||||
if (result == JVMCI::ok) {
|
||||
cb = nm;
|
||||
if (compile_state == nullptr) {
|
||||
@ -760,6 +770,17 @@ JVMCI::CodeInstallResult CodeInstaller::install(JVMCICompiler* compiler,
|
||||
nm->maybe_print_nmethod(directive);
|
||||
DirectivesStack::release(directive);
|
||||
}
|
||||
|
||||
if (nm != nullptr) {
|
||||
if (_nmethod_entry_patch_offset != -1) {
|
||||
err_msg msg("");
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
|
||||
if (!bs_nm->verify_barrier(nm, msg)) {
|
||||
JVMCI_THROW_MSG_(IllegalArgumentException, err_msg("nmethod entry barrier is malformed: %s", msg.buffer()), JVMCI::ok);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -804,7 +825,9 @@ void CodeInstaller::initialize_fields(HotSpotCompiledCodeStream* stream, u1 code
|
||||
}
|
||||
_constants_size = data_section_size;
|
||||
_next_call_type = INVOKE_INVALID;
|
||||
_has_monitors = false;
|
||||
_has_wide_vector = false;
|
||||
_nmethod_entry_patch_offset = -1;
|
||||
}
|
||||
|
||||
u1 CodeInstaller::as_read_oop_tag(HotSpotCompiledCodeStream* stream, u1 patch_object_tag, JVMCI_TRAPS) {
|
||||
@ -1256,6 +1279,9 @@ void CodeInstaller::site_Mark(CodeBuffer& buffer, jint pc_offset, HotSpotCompile
|
||||
case FRAME_COMPLETE:
|
||||
_offsets.set_value(CodeOffsets::Frame_Complete, pc_offset);
|
||||
break;
|
||||
case ENTRY_BARRIER_PATCH:
|
||||
_nmethod_entry_patch_offset = pc_offset;
|
||||
break;
|
||||
case INVOKEVIRTUAL:
|
||||
case INVOKEINTERFACE:
|
||||
case INLINE_INVOKE:
|
||||
|
@ -152,6 +152,7 @@ private:
|
||||
EXCEPTION_HANDLER_ENTRY,
|
||||
DEOPT_HANDLER_ENTRY,
|
||||
FRAME_COMPLETE,
|
||||
ENTRY_BARRIER_PATCH,
|
||||
INVOKEINTERFACE,
|
||||
INVOKEVIRTUAL,
|
||||
INVOKESTATIC,
|
||||
@ -271,6 +272,7 @@ private:
|
||||
jint _sites_count;
|
||||
|
||||
CodeOffsets _offsets;
|
||||
int _nmethod_entry_patch_offset;
|
||||
|
||||
jint _code_size;
|
||||
jint _total_frame_size;
|
||||
|
@ -33,20 +33,21 @@
|
||||
#include "compiler/compilerEvent.hpp"
|
||||
#include "compiler/disassembler.hpp"
|
||||
#include "compiler/oopMap.hpp"
|
||||
#include "interpreter/linkResolver.hpp"
|
||||
#include "interpreter/bytecodeStream.hpp"
|
||||
#include "interpreter/linkResolver.hpp"
|
||||
#include "jfr/jfrEvents.hpp"
|
||||
#include "jvmci/jvmciCompilerToVM.hpp"
|
||||
#include "jvmci/jvmciCodeInstaller.hpp"
|
||||
#include "jvmci/jvmciCompilerToVM.hpp"
|
||||
#include "jvmci/jvmciRuntime.hpp"
|
||||
#include "logging/log.hpp"
|
||||
#include "logging/logTag.hpp"
|
||||
#include "memory/oopFactory.hpp"
|
||||
#include "memory/universe.hpp"
|
||||
#include "oops/constantPool.inline.hpp"
|
||||
#include "oops/instanceMirrorKlass.hpp"
|
||||
#include "oops/instanceKlass.inline.hpp"
|
||||
#include "oops/instanceMirrorKlass.hpp"
|
||||
#include "oops/method.inline.hpp"
|
||||
#include "oops/objArrayKlass.inline.hpp"
|
||||
#include "oops/typeArrayOop.inline.hpp"
|
||||
#include "prims/jvmtiExport.hpp"
|
||||
#include "prims/methodHandles.hpp"
|
||||
@ -61,8 +62,8 @@
|
||||
#include "runtime/reflectionUtils.hpp"
|
||||
#include "runtime/stackFrameStream.inline.hpp"
|
||||
#include "runtime/timerTrace.hpp"
|
||||
#include "runtime/vframe_hp.hpp"
|
||||
#include "runtime/vframe.inline.hpp"
|
||||
#include "runtime/vframe_hp.hpp"
|
||||
#if INCLUDE_JFR
|
||||
#include "jfr/jfr.hpp"
|
||||
#endif
|
||||
@ -101,6 +102,54 @@ class JVMCITraceMark : public StackObj {
|
||||
}
|
||||
};
|
||||
|
||||
class JavaArgumentUnboxer : public SignatureIterator {
|
||||
protected:
|
||||
JavaCallArguments* _jca;
|
||||
arrayOop _args;
|
||||
int _index;
|
||||
|
||||
Handle next_arg(BasicType expectedType);
|
||||
|
||||
public:
|
||||
JavaArgumentUnboxer(Symbol* signature,
|
||||
JavaCallArguments* jca,
|
||||
arrayOop args,
|
||||
bool is_static)
|
||||
: SignatureIterator(signature)
|
||||
{
|
||||
this->_return_type = T_ILLEGAL;
|
||||
_jca = jca;
|
||||
_index = 0;
|
||||
_args = args;
|
||||
if (!is_static) {
|
||||
_jca->push_oop(next_arg(T_OBJECT));
|
||||
}
|
||||
do_parameters_on(this);
|
||||
assert(_index == args->length(), "arg count mismatch with signature");
|
||||
}
|
||||
|
||||
private:
|
||||
friend class SignatureIterator; // so do_parameters_on can call do_type
|
||||
void do_type(BasicType type) {
|
||||
if (is_reference_type(type)) {
|
||||
_jca->push_oop(next_arg(T_OBJECT));
|
||||
return;
|
||||
}
|
||||
Handle arg = next_arg(type);
|
||||
int box_offset = java_lang_boxing_object::value_offset(type);
|
||||
switch (type) {
|
||||
case T_BOOLEAN: _jca->push_int(arg->bool_field(box_offset)); break;
|
||||
case T_CHAR: _jca->push_int(arg->char_field(box_offset)); break;
|
||||
case T_SHORT: _jca->push_int(arg->short_field(box_offset)); break;
|
||||
case T_BYTE: _jca->push_int(arg->byte_field(box_offset)); break;
|
||||
case T_INT: _jca->push_int(arg->int_field(box_offset)); break;
|
||||
case T_LONG: _jca->push_long(arg->long_field(box_offset)); break;
|
||||
case T_FLOAT: _jca->push_float(arg->float_field(box_offset)); break;
|
||||
case T_DOUBLE: _jca->push_double(arg->double_field(box_offset)); break;
|
||||
default: ShouldNotReachHere();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Handle JavaArgumentUnboxer::next_arg(BasicType expectedType) {
|
||||
assert(_index < _args->length(), "out of bounds");
|
||||
@ -383,45 +432,93 @@ C2V_VMENTRY_NULL(jobject, getConstantPool, (JNIEnv* env, jobject, ARGUMENT_PAIR(
|
||||
}
|
||||
|
||||
C2V_VMENTRY_NULL(jobject, getResolvedJavaType0, (JNIEnv* env, jobject, jobject base, jlong offset, jboolean compressed))
|
||||
JVMCIKlassHandle klass(THREAD);
|
||||
JVMCIObject base_object = JVMCIENV->wrap(base);
|
||||
jlong base_address = 0;
|
||||
if (base_object.is_non_null() && offset == oopDesc::klass_offset_in_bytes()) {
|
||||
if (base_object.is_null()) {
|
||||
JVMCI_THROW_MSG_NULL(NullPointerException, "base object is null");
|
||||
}
|
||||
|
||||
const char* base_desc = nullptr;
|
||||
JVMCIKlassHandle klass(THREAD);
|
||||
if (offset == oopDesc::klass_offset_in_bytes()) {
|
||||
if (JVMCIENV->isa_HotSpotObjectConstantImpl(base_object)) {
|
||||
Handle base_oop = JVMCIENV->asConstant(base_object, JVMCI_CHECK_NULL);
|
||||
klass = base_oop->klass();
|
||||
} else {
|
||||
assert(false, "What types are we actually expecting here?");
|
||||
goto unexpected;
|
||||
}
|
||||
} else if (!compressed) {
|
||||
if (base_object.is_non_null()) {
|
||||
if (JVMCIENV->isa_HotSpotResolvedJavaMethodImpl(base_object)) {
|
||||
base_address = (intptr_t) JVMCIENV->asMethod(base_object);
|
||||
} else if (JVMCIENV->isa_HotSpotConstantPool(base_object)) {
|
||||
base_address = (intptr_t) JVMCIENV->asConstantPool(base_object);
|
||||
} else if (JVMCIENV->isa_HotSpotResolvedObjectTypeImpl(base_object)) {
|
||||
base_address = (intptr_t) JVMCIENV->asKlass(base_object);
|
||||
} else if (JVMCIENV->isa_HotSpotObjectConstantImpl(base_object)) {
|
||||
Handle base_oop = JVMCIENV->asConstant(base_object, JVMCI_CHECK_NULL);
|
||||
if (base_oop->is_a(vmClasses::Class_klass())) {
|
||||
base_address = cast_from_oop<jlong>(base_oop());
|
||||
if (JVMCIENV->isa_HotSpotConstantPool(base_object)) {
|
||||
ConstantPool* cp = JVMCIENV->asConstantPool(base_object);
|
||||
if (offset == ConstantPool::pool_holder_offset_in_bytes()) {
|
||||
klass = cp->pool_holder();
|
||||
} else {
|
||||
base_desc = FormatBufferResource("[constant pool for %s]", cp->pool_holder()->signature_name());
|
||||
goto unexpected;
|
||||
}
|
||||
} else if (JVMCIENV->isa_HotSpotResolvedObjectTypeImpl(base_object)) {
|
||||
Klass* base_klass = JVMCIENV->asKlass(base_object);
|
||||
if (offset == in_bytes(Klass::subklass_offset())) {
|
||||
klass = base_klass->subklass();
|
||||
} else if (offset == in_bytes(Klass::super_offset())) {
|
||||
klass = base_klass->super();
|
||||
} else if (offset == in_bytes(Klass::next_sibling_offset())) {
|
||||
klass = base_klass->next_sibling();
|
||||
} else if (offset == in_bytes(ObjArrayKlass::element_klass_offset()) && base_klass->is_objArray_klass()) {
|
||||
klass = ObjArrayKlass::cast(base_klass)->element_klass();
|
||||
} else if (offset >= in_bytes(Klass::primary_supers_offset()) &&
|
||||
offset < in_bytes(Klass::primary_supers_offset()) + (int) (sizeof(Klass*) * Klass::primary_super_limit()) &&
|
||||
offset % sizeof(Klass*) == 0) {
|
||||
// Offset is within the primary supers array
|
||||
int index = (int) ((offset - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*));
|
||||
klass = base_klass->primary_super_of_depth(index);
|
||||
} else {
|
||||
base_desc = FormatBufferResource("[%s]", base_klass->signature_name());
|
||||
goto unexpected;
|
||||
}
|
||||
} else if (JVMCIENV->isa_HotSpotObjectConstantImpl(base_object)) {
|
||||
Handle base_oop = JVMCIENV->asConstant(base_object, JVMCI_CHECK_NULL);
|
||||
if (base_oop->is_a(vmClasses::Class_klass())) {
|
||||
if (offset == java_lang_Class::klass_offset()) {
|
||||
klass = java_lang_Class::as_Klass(base_oop());
|
||||
} else if (offset == java_lang_Class::array_klass_offset()) {
|
||||
klass = java_lang_Class::array_klass_acquire(base_oop());
|
||||
} else {
|
||||
base_desc = FormatBufferResource("[Class=%s]", java_lang_Class::as_Klass(base_oop())->signature_name());
|
||||
goto unexpected;
|
||||
}
|
||||
} else {
|
||||
if (!base_oop.is_null()) {
|
||||
base_desc = FormatBufferResource("[%s]", base_oop()->klass()->signature_name());
|
||||
}
|
||||
goto unexpected;
|
||||
}
|
||||
if (base_address == 0) {
|
||||
JVMCI_THROW_MSG_NULL(IllegalArgumentException,
|
||||
err_msg("Unexpected arguments: %s " JLONG_FORMAT " %s", JVMCIENV->klass_name(base_object), offset, compressed ? "true" : "false"));
|
||||
} else if (JVMCIENV->isa_HotSpotMethodData(base_object)) {
|
||||
jlong base_address = (intptr_t) JVMCIENV->asMethodData(base_object);
|
||||
klass = *((Klass**) (intptr_t) (base_address + offset));
|
||||
if (klass == nullptr || !klass->is_loader_alive()) {
|
||||
// Klasses in methodData might be concurrently unloading so return null in that case.
|
||||
return nullptr;
|
||||
}
|
||||
} else {
|
||||
goto unexpected;
|
||||
}
|
||||
klass = *((Klass**) (intptr_t) (base_address + offset));
|
||||
} else {
|
||||
JVMCI_THROW_MSG_NULL(IllegalArgumentException,
|
||||
err_msg("Unexpected arguments: %s " JLONG_FORMAT " %s",
|
||||
base_object.is_non_null() ? JVMCIENV->klass_name(base_object) : "null",
|
||||
offset, compressed ? "true" : "false"));
|
||||
goto unexpected;
|
||||
}
|
||||
assert (klass == nullptr || klass->is_klass(), "invalid read");
|
||||
JVMCIObject result = JVMCIENV->get_jvmci_type(klass, JVMCI_CHECK_NULL);
|
||||
return JVMCIENV->get_jobject(result);
|
||||
|
||||
{
|
||||
if (klass == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
JVMCIObject result = JVMCIENV->get_jvmci_type(klass, JVMCI_CHECK_NULL);
|
||||
return JVMCIENV->get_jobject(result);
|
||||
}
|
||||
|
||||
unexpected:
|
||||
JVMCI_THROW_MSG_NULL(IllegalArgumentException,
|
||||
err_msg("Unexpected arguments: %s%s " JLONG_FORMAT " %s",
|
||||
JVMCIENV->klass_name(base_object), base_desc == nullptr ? "" : base_desc,
|
||||
offset, compressed ? "true" : "false"));
|
||||
}
|
||||
|
||||
C2V_VMENTRY_NULL(jobject, findUniqueConcreteMethod, (JNIEnv* env, jobject, ARGUMENT_PAIR(klass), ARGUMENT_PAIR(method)))
|
||||
@ -1712,16 +1809,38 @@ C2V_VMENTRY_0(jint, methodDataProfileDataSize, (JNIEnv* env, jobject, jlong meth
|
||||
if (mdo->is_valid(profile_data)) {
|
||||
return profile_data->size_in_bytes();
|
||||
}
|
||||
// Java code should never directly access the extra data section
|
||||
JVMCI_THROW_MSG_0(IllegalArgumentException, err_msg("Invalid profile data position %d", position));
|
||||
C2V_END
|
||||
|
||||
C2V_VMENTRY_0(jint, methodDataExceptionSeen, (JNIEnv* env, jobject, jlong method_data_pointer, jint bci))
|
||||
MethodData* mdo = (MethodData*) method_data_pointer;
|
||||
MutexLocker mu(mdo->extra_data_lock());
|
||||
DataLayout* data = mdo->extra_data_base();
|
||||
DataLayout* end = mdo->extra_data_limit();
|
||||
DataLayout* end = mdo->args_data_limit();
|
||||
for (;; data = mdo->next_extra(data)) {
|
||||
assert(data < end, "moved past end of extra data");
|
||||
profile_data = data->data_in();
|
||||
if (mdo->dp_to_di(profile_data->dp()) == position) {
|
||||
return profile_data->size_in_bytes();
|
||||
int tag = data->tag();
|
||||
switch(tag) {
|
||||
case DataLayout::bit_data_tag: {
|
||||
BitData* bit_data = (BitData*) data->data_in();
|
||||
if (bit_data->bci() == bci) {
|
||||
return bit_data->exception_seen() ? 1 : 0;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataLayout::no_tag:
|
||||
// There is a free slot so return false since a BitData would have been allocated to record
|
||||
// true if it had been seen.
|
||||
return 0;
|
||||
case DataLayout::arg_info_data_tag:
|
||||
// The bci wasn't found and there are no free slots to record a trap for this location, so always
|
||||
// return unknown.
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
JVMCI_THROW_MSG_0(IllegalArgumentException, err_msg("Invalid profile data position %d", position));
|
||||
ShouldNotReachHere();
|
||||
return -1;
|
||||
C2V_END
|
||||
|
||||
C2V_VMENTRY_NULL(jobject, getInterfaces, (JNIEnv* env, jobject, ARGUMENT_PAIR(klass)))
|
||||
@ -3017,6 +3136,7 @@ JNINativeMethod CompilerToVM::methods[] = {
|
||||
{CC "writeDebugOutput", CC "(JIZ)V", FN_PTR(writeDebugOutput)},
|
||||
{CC "flushDebugOutput", CC "()V", FN_PTR(flushDebugOutput)},
|
||||
{CC "methodDataProfileDataSize", CC "(JI)I", FN_PTR(methodDataProfileDataSize)},
|
||||
{CC "methodDataExceptionSeen", CC "(JI)I", FN_PTR(methodDataExceptionSeen)},
|
||||
{CC "interpreterFrameSize", CC "(" BYTECODE_FRAME ")I", FN_PTR(interpreterFrameSize)},
|
||||
{CC "compileToBytecode", CC "(" OBJECTCONSTANT ")V", FN_PTR(compileToBytecode)},
|
||||
{CC "getFlagValue", CC "(" STRING ")" OBJECT, FN_PTR(getFlagValue)},
|
||||
|
@ -24,6 +24,7 @@
|
||||
#ifndef SHARE_JVMCI_JVMCICOMPILERTOVM_HPP
|
||||
#define SHARE_JVMCI_JVMCICOMPILERTOVM_HPP
|
||||
|
||||
#include "gc/shared/barrierSetAssembler.hpp"
|
||||
#include "gc/shared/cardTable.hpp"
|
||||
#include "jvmci/jvmciExceptions.hpp"
|
||||
#include "runtime/javaCalls.hpp"
|
||||
@ -48,6 +49,26 @@ class CompilerToVM {
|
||||
static address SharedRuntime_deopt_blob_unpack;
|
||||
static address SharedRuntime_deopt_blob_unpack_with_exception_in_tls;
|
||||
static address SharedRuntime_deopt_blob_uncommon_trap;
|
||||
static address SharedRuntime_polling_page_return_handler;
|
||||
|
||||
static address nmethod_entry_barrier;
|
||||
static int thread_disarmed_guard_value_offset;
|
||||
static int thread_address_bad_mask_offset;
|
||||
#ifdef AARCH64
|
||||
static int BarrierSetAssembler_nmethod_patching_type;
|
||||
static address BarrierSetAssembler_patching_epoch_addr;
|
||||
#endif
|
||||
|
||||
static address ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded;
|
||||
static address ZBarrierSetRuntime_load_barrier_on_oop_array;
|
||||
static address ZBarrierSetRuntime_clone;
|
||||
|
||||
static bool continuations_enabled;
|
||||
|
||||
static size_t ThreadLocalAllocBuffer_alignment_reserve;
|
||||
|
||||
@ -129,55 +150,4 @@ class CompilerToVM {
|
||||
static int methods_count();
|
||||
|
||||
};
|
||||
|
||||
|
||||
class JavaArgumentUnboxer : public SignatureIterator {
|
||||
protected:
|
||||
JavaCallArguments* _jca;
|
||||
arrayOop _args;
|
||||
int _index;
|
||||
|
||||
Handle next_arg(BasicType expectedType);
|
||||
|
||||
public:
|
||||
JavaArgumentUnboxer(Symbol* signature,
|
||||
JavaCallArguments* jca,
|
||||
arrayOop args,
|
||||
bool is_static)
|
||||
: SignatureIterator(signature)
|
||||
{
|
||||
this->_return_type = T_ILLEGAL;
|
||||
_jca = jca;
|
||||
_index = 0;
|
||||
_args = args;
|
||||
if (!is_static) {
|
||||
_jca->push_oop(next_arg(T_OBJECT));
|
||||
}
|
||||
do_parameters_on(this);
|
||||
assert(_index == args->length(), "arg count mismatch with signature");
|
||||
}
|
||||
|
||||
private:
|
||||
friend class SignatureIterator; // so do_parameters_on can call do_type
|
||||
void do_type(BasicType type) {
|
||||
if (is_reference_type(type)) {
|
||||
_jca->push_oop(next_arg(T_OBJECT));
|
||||
return;
|
||||
}
|
||||
Handle arg = next_arg(type);
|
||||
int box_offset = java_lang_boxing_object::value_offset(type);
|
||||
switch (type) {
|
||||
case T_BOOLEAN: _jca->push_int(arg->bool_field(box_offset)); break;
|
||||
case T_CHAR: _jca->push_int(arg->char_field(box_offset)); break;
|
||||
case T_SHORT: _jca->push_int(arg->short_field(box_offset)); break;
|
||||
case T_BYTE: _jca->push_int(arg->byte_field(box_offset)); break;
|
||||
case T_INT: _jca->push_int(arg->int_field(box_offset)); break;
|
||||
case T_LONG: _jca->push_long(arg->long_field(box_offset)); break;
|
||||
case T_FLOAT: _jca->push_float(arg->float_field(box_offset)); break;
|
||||
case T_DOUBLE: _jca->push_double(arg->double_field(box_offset)); break;
|
||||
default: ShouldNotReachHere();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif // SHARE_JVMCI_JVMCICOMPILERTOVM_HPP
|
||||
|
@ -26,12 +26,18 @@
|
||||
#include "compiler/compiler_globals.hpp"
|
||||
#include "compiler/oopMap.hpp"
|
||||
#include "gc/shared/barrierSet.hpp"
|
||||
#include "gc/shared/barrierSetAssembler.hpp"
|
||||
#include "gc/shared/barrierSetNMethod.hpp"
|
||||
#include "gc/shared/cardTable.hpp"
|
||||
#include "gc/shared/collectedHeap.hpp"
|
||||
#include "gc/shared/gc_globals.hpp"
|
||||
#include "gc/shared/tlab_globals.hpp"
|
||||
#include "jvmci/jvmciEnv.hpp"
|
||||
#if INCLUDE_ZGC
|
||||
#include "gc/z/zBarrierSetRuntime.hpp"
|
||||
#include "gc/z/zThreadLocalData.hpp"
|
||||
#endif
|
||||
#include "jvmci/jvmciCompilerToVM.hpp"
|
||||
#include "jvmci/jvmciEnv.hpp"
|
||||
#include "jvmci/vmStructs_jvmci.hpp"
|
||||
#include "memory/universe.hpp"
|
||||
#include "oops/compressedOops.hpp"
|
||||
@ -52,6 +58,27 @@ address CompilerToVM::Data::SharedRuntime_handle_wrong_method_stub;
|
||||
address CompilerToVM::Data::SharedRuntime_deopt_blob_unpack;
|
||||
address CompilerToVM::Data::SharedRuntime_deopt_blob_unpack_with_exception_in_tls;
|
||||
address CompilerToVM::Data::SharedRuntime_deopt_blob_uncommon_trap;
|
||||
address CompilerToVM::Data::SharedRuntime_polling_page_return_handler;
|
||||
|
||||
address CompilerToVM::Data::nmethod_entry_barrier;
|
||||
int CompilerToVM::Data::thread_disarmed_guard_value_offset;
|
||||
int CompilerToVM::Data::thread_address_bad_mask_offset;
|
||||
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_oop_array;
|
||||
address CompilerToVM::Data::ZBarrierSetRuntime_clone;
|
||||
|
||||
bool CompilerToVM::Data::continuations_enabled;
|
||||
|
||||
#ifdef AARCH64
|
||||
int CompilerToVM::Data::BarrierSetAssembler_nmethod_patching_type;
|
||||
address CompilerToVM::Data::BarrierSetAssembler_patching_epoch_addr;
|
||||
#endif
|
||||
|
||||
size_t CompilerToVM::Data::ThreadLocalAllocBuffer_alignment_reserve;
|
||||
|
||||
@ -108,6 +135,33 @@ void CompilerToVM::Data::initialize(JVMCI_TRAPS) {
|
||||
SharedRuntime_deopt_blob_unpack = SharedRuntime::deopt_blob()->unpack();
|
||||
SharedRuntime_deopt_blob_unpack_with_exception_in_tls = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
|
||||
SharedRuntime_deopt_blob_uncommon_trap = SharedRuntime::deopt_blob()->uncommon_trap();
|
||||
SharedRuntime_polling_page_return_handler = SharedRuntime::polling_page_return_handler_blob()->entry_point();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != nullptr) {
|
||||
thread_disarmed_guard_value_offset = in_bytes(bs_nm->thread_disarmed_guard_value_offset());
|
||||
AMD64_ONLY(nmethod_entry_barrier = StubRoutines::x86::method_entry_barrier());
|
||||
AARCH64_ONLY(nmethod_entry_barrier = StubRoutines::aarch64::method_entry_barrier());
|
||||
BarrierSetAssembler* bs_asm = BarrierSet::barrier_set()->barrier_set_assembler();
|
||||
AARCH64_ONLY(BarrierSetAssembler_nmethod_patching_type = (int) bs_asm->nmethod_patching_type());
|
||||
AARCH64_ONLY(BarrierSetAssembler_patching_epoch_addr = bs_asm->patching_epoch_addr());
|
||||
}
|
||||
|
||||
#if INCLUDE_ZGC
|
||||
if (UseZGC) {
|
||||
thread_address_bad_mask_offset = in_bytes(ZThreadLocalData::address_bad_mask_offset());
|
||||
ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded = ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded = ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded = ZBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded = ZBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded = ZBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded = ZBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
ZBarrierSetRuntime_load_barrier_on_oop_array = ZBarrierSetRuntime::load_barrier_on_oop_array_addr();
|
||||
ZBarrierSetRuntime_clone = ZBarrierSetRuntime::clone_addr();
|
||||
}
|
||||
#endif
|
||||
|
||||
continuations_enabled = Continuations::enabled();
|
||||
|
||||
ThreadLocalAllocBuffer_alignment_reserve = ThreadLocalAllocBuffer::alignment_reserve();
|
||||
|
||||
|
@ -1163,16 +1163,19 @@ JVMCIObject JVMCIEnv::get_jvmci_method(const methodHandle& method, JVMCI_TRAPS)
|
||||
if (method() == nullptr) {
|
||||
return method_object;
|
||||
}
|
||||
JavaThread* THREAD = JVMCI::compilation_tick(JavaThread::current()); // For exception macros.
|
||||
JVMCIKlassHandle holder_klass(THREAD, method->method_holder());
|
||||
JVMCIObject holder = get_jvmci_type(holder_klass, JVMCI_CHECK_(JVMCIObject()));
|
||||
|
||||
CompilerOracle::tag_blackhole_if_possible(method);
|
||||
|
||||
JavaThread* THREAD = JVMCI::compilation_tick(JavaThread::current()); // For exception macros.
|
||||
jmetadata handle = _runtime->allocate_handle(method);
|
||||
jboolean exception = false;
|
||||
if (is_hotspot()) {
|
||||
JavaValue result(T_OBJECT);
|
||||
JavaCallArguments args;
|
||||
args.push_long((jlong) handle);
|
||||
args.push_oop(Handle(THREAD, HotSpotJVMCI::resolve(holder)));
|
||||
JavaCalls::call_static(&result, HotSpotJVMCI::HotSpotResolvedJavaMethodImpl::klass(),
|
||||
vmSymbols::fromMetaspace_name(),
|
||||
vmSymbols::method_fromMetaspace_signature(), &args, THREAD);
|
||||
@ -1185,7 +1188,7 @@ JVMCIObject JVMCIEnv::get_jvmci_method(const methodHandle& method, JVMCI_TRAPS)
|
||||
JNIAccessMark jni(this, THREAD);
|
||||
method_object = JNIJVMCI::wrap(jni()->CallStaticObjectMethod(JNIJVMCI::HotSpotResolvedJavaMethodImpl::clazz(),
|
||||
JNIJVMCI::HotSpotResolvedJavaMethodImpl_fromMetaspace_method(),
|
||||
(jlong) handle));
|
||||
(jlong) handle, holder.as_jobject()));
|
||||
exception = jni()->ExceptionCheck();
|
||||
}
|
||||
|
||||
@ -1208,6 +1211,9 @@ JVMCIObject JVMCIEnv::get_jvmci_type(const JVMCIKlassHandle& klass, JVMCI_TRAPS)
|
||||
return type;
|
||||
}
|
||||
|
||||
guarantee(klass->is_klass(), "must be valid klass");
|
||||
guarantee(klass->is_loader_alive(), "klass must be alive");
|
||||
|
||||
jlong pointer = (jlong) klass();
|
||||
JavaThread* THREAD = JVMCI::compilation_tick(JavaThread::current()); // For exception macros.
|
||||
jboolean exception = false;
|
||||
@ -1506,9 +1512,9 @@ jlong JVMCIEnv::make_oop_handle(const Handle& obj) {
|
||||
|
||||
oop JVMCIEnv::resolve_oop_handle(jlong oopHandle) {
|
||||
assert(oopHandle != 0, "should be a valid handle");
|
||||
oop obj = *((oopDesc**) oopHandle);
|
||||
oop obj = NativeAccess<>::oop_load(reinterpret_cast<oop*>(oopHandle));
|
||||
if (obj != nullptr) {
|
||||
oopDesc::verify(obj);
|
||||
guarantee(oopDesc::is_oop_or_null(obj), "invalid oop: " INTPTR_FORMAT, p2i((oopDesc*) obj));
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
@ -1644,6 +1650,9 @@ ConstantPool* JVMCIEnv::asConstantPool(JVMCIObject obj) {
|
||||
return *constantPoolHandle;
|
||||
}
|
||||
|
||||
MethodData* JVMCIEnv::asMethodData(JVMCIObject obj) {
|
||||
return (MethodData*) get_HotSpotMethodData_methodDataPointer(obj);
|
||||
}
|
||||
|
||||
// Lookup an nmethod with a matching base and compile id
|
||||
nmethod* JVMCIEnv::lookup_nmethod(address code, jlong compile_id_snapshot) {
|
||||
|
@ -27,6 +27,7 @@
|
||||
|
||||
#include "classfile/javaClasses.hpp"
|
||||
#include "jvmci/jvmciJavaClasses.hpp"
|
||||
#include "oops/klass.hpp"
|
||||
#include "runtime/javaThread.hpp"
|
||||
#include "runtime/jniHandles.hpp"
|
||||
|
||||
@ -375,6 +376,9 @@ public:
|
||||
// Unpack an instance of HotSpotResolvedObjectTypeImpl into the original Klass*
|
||||
Klass* asKlass(JVMCIObject jvmci_type);
|
||||
|
||||
// Unpack an instance of HotSpotMethodData into the original MethodData*
|
||||
MethodData* asMethodData(JVMCIObject jvmci_method_data);
|
||||
|
||||
JVMCIObject get_jvmci_method(const methodHandle& method, JVMCI_TRAPS);
|
||||
|
||||
JVMCIObject get_jvmci_type(const JVMCIKlassHandle& klass, JVMCI_TRAPS);
|
||||
|
@ -87,6 +87,9 @@
|
||||
start_class(HotSpotResolvedJavaMethodImpl, jdk_vm_ci_hotspot_HotSpotResolvedJavaMethodImpl) \
|
||||
long_field(HotSpotResolvedJavaMethodImpl, methodHandle) \
|
||||
end_class \
|
||||
start_class(HotSpotMethodData, jdk_vm_ci_hotspot_HotSpotMethodData) \
|
||||
long_field(HotSpotMethodData, methodDataPointer) \
|
||||
end_class \
|
||||
start_class(InstalledCode, jdk_vm_ci_code_InstalledCode) \
|
||||
long_field(InstalledCode, address) \
|
||||
long_field(InstalledCode, entryPoint) \
|
||||
|
@ -258,6 +258,12 @@ JRT_ENTRY_NO_ASYNC(static address, exception_handler_for_pc_helper(JavaThread* c
|
||||
current->set_is_method_handle_return(false);
|
||||
|
||||
Handle exception(current, ex);
|
||||
|
||||
// The frame we rethrow the exception to might not have been processed by the GC yet.
|
||||
// The stack watermark barrier takes care of detecting that and ensuring the frame
|
||||
// has updated oops.
|
||||
StackWatermarkSet::after_unwind(current);
|
||||
|
||||
cm = CodeCache::find_compiled(pc);
|
||||
assert(cm != nullptr, "this is not a compiled method");
|
||||
// Adjust the pc as needed/
|
||||
@ -753,22 +759,27 @@ void JVMCIRuntime::call_getCompiler(TRAPS) {
|
||||
JVMCIENV->call_HotSpotJVMCIRuntime_getCompiler(jvmciRuntime, JVMCI_CHECK);
|
||||
}
|
||||
|
||||
void JVMCINMethodData::initialize(
|
||||
int nmethod_mirror_index,
|
||||
const char* name,
|
||||
FailedSpeculation** failed_speculations)
|
||||
void JVMCINMethodData::initialize(int nmethod_mirror_index,
|
||||
int nmethod_entry_patch_offset,
|
||||
const char* nmethod_mirror_name,
|
||||
FailedSpeculation** failed_speculations)
|
||||
{
|
||||
_failed_speculations = failed_speculations;
|
||||
_nmethod_mirror_index = nmethod_mirror_index;
|
||||
if (name != nullptr) {
|
||||
_nmethod_entry_patch_offset = nmethod_entry_patch_offset;
|
||||
if (nmethod_mirror_name != nullptr) {
|
||||
_has_name = true;
|
||||
char* dest = (char*) this->name();
|
||||
strcpy(dest, name);
|
||||
char* dest = (char*) name();
|
||||
strcpy(dest, nmethod_mirror_name);
|
||||
} else {
|
||||
_has_name = false;
|
||||
}
|
||||
}
|
||||
|
||||
void JVMCINMethodData::copy(JVMCINMethodData* data) {
|
||||
initialize(data->_nmethod_mirror_index, data->_nmethod_entry_patch_offset, data->name(), data->_failed_speculations);
|
||||
}
|
||||
|
||||
void JVMCINMethodData::add_failed_speculation(nmethod* nm, jlong speculation) {
|
||||
jlong index = speculation >> JVMCINMethodData::SPECULATION_LENGTH_BITS;
|
||||
guarantee(index >= 0 && index <= max_jint, "Encoded JVMCI speculation index is not a positive Java int: " INTPTR_FORMAT, index);
|
||||
@ -852,7 +863,7 @@ jlong JVMCIRuntime::make_oop_handle(const Handle& obj) {
|
||||
oop* ptr = OopHandle(object_handles(), obj()).ptr_raw();
|
||||
MutexLocker ml(_lock);
|
||||
_oop_handles.append(ptr);
|
||||
return (jlong) ptr;
|
||||
return reinterpret_cast<jlong>(ptr);
|
||||
}
|
||||
|
||||
int JVMCIRuntime::release_and_clear_oop_handles() {
|
||||
@ -933,22 +944,22 @@ int JVMCIRuntime::release_cleared_oop_handles() {
|
||||
next++;
|
||||
}
|
||||
}
|
||||
int to_release = next - num_alive;
|
||||
if (next != num_alive) {
|
||||
int to_release = next - num_alive;
|
||||
|
||||
// `next` is now the index of the first null handle
|
||||
// Example: to_release: 2
|
||||
// `next` is now the index of the first null handle
|
||||
// Example: to_release: 2
|
||||
|
||||
// Bulk release the handles with a null referent
|
||||
if (to_release != 0) {
|
||||
// Bulk release the handles with a null referent
|
||||
object_handles()->release(_oop_handles.adr_at(num_alive), to_release);
|
||||
|
||||
// Truncate oop handles to only those with a non-null referent
|
||||
JVMCI_event_1("compacted oop handles in JVMCI runtime %d from %d to %d", _id, _oop_handles.length(), num_alive);
|
||||
_oop_handles.trunc_to(num_alive);
|
||||
// Example: HHH
|
||||
|
||||
return to_release;
|
||||
}
|
||||
|
||||
// Truncate oop handles to only those with a non-null referent
|
||||
JVMCI_event_1("compacted oop handles in JVMCI runtime %d from %d to %d", _id, _oop_handles.length(), num_alive);
|
||||
_oop_handles.trunc_to(num_alive);
|
||||
// Example: HHH
|
||||
|
||||
return to_release;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@ -2077,7 +2088,8 @@ JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
|
||||
JVMCIObject nmethod_mirror,
|
||||
FailedSpeculation** failed_speculations,
|
||||
char* speculations,
|
||||
int speculations_len) {
|
||||
int speculations_len,
|
||||
int nmethod_entry_patch_offset) {
|
||||
JVMCI_EXCEPTION_CONTEXT;
|
||||
CompLevel comp_level = CompLevel_full_optimization;
|
||||
char* failure_detail = nullptr;
|
||||
@ -2145,6 +2157,10 @@ JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
|
||||
// as in C2, then it must be freed.
|
||||
//code_buffer->free_blob();
|
||||
} else {
|
||||
JVMCINMethodData* data = JVMCINMethodData::create(nmethod_mirror_index,
|
||||
nmethod_entry_patch_offset,
|
||||
nmethod_mirror_name,
|
||||
failed_speculations);
|
||||
nm = nmethod::new_nmethod(method,
|
||||
compile_id,
|
||||
entry_bci,
|
||||
@ -2154,8 +2170,7 @@ JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
|
||||
frame_words, oop_map_set,
|
||||
handler_table, implicit_exception_table,
|
||||
compiler, comp_level,
|
||||
speculations, speculations_len,
|
||||
nmethod_mirror_index, nmethod_mirror_name, failed_speculations);
|
||||
speculations, speculations_len, data);
|
||||
|
||||
|
||||
// Free codeBlobs
|
||||
|
@ -40,19 +40,26 @@ class JVMCICompiler;
|
||||
class JVMCICompileState;
|
||||
class MetadataHandles;
|
||||
|
||||
// Encapsulates the JVMCI metadata for an nmethod.
|
||||
// JVMCINMethodData objects are inlined into nmethods
|
||||
// at nmethod::_jvmci_data_offset.
|
||||
class JVMCINMethodData {
|
||||
// Encapsulates the JVMCI metadata for an nmethod. JVMCINMethodData objects are normally inlined
|
||||
// into nmethods at nmethod::_jvmci_data_offset but during construction of the nmethod they are
|
||||
// resource allocated so they can be passed into the nmethod constructor.
|
||||
class JVMCINMethodData : public ResourceObj {
|
||||
friend class JVMCIVMStructs;
|
||||
// Index for the HotSpotNmethod mirror in the nmethod's oops table.
|
||||
// This is -1 if there is no mirror in the oops table.
|
||||
int _nmethod_mirror_index;
|
||||
|
||||
// Is HotSpotNmethod.name non-null? If so, the value is
|
||||
// embedded in the end of this object.
|
||||
bool _has_name;
|
||||
|
||||
// Index for the HotSpotNmethod mirror in the nmethod's oops table.
|
||||
// This is -1 if there is no mirror in the oops table.
|
||||
int _nmethod_mirror_index;
|
||||
|
||||
// This is the offset of the patchable part of the nmethod entry barrier sequence. The meaning is
|
||||
// somewhat platform dependent as the way patching is done varies by architecture. Older JVMCI
|
||||
// based compilers didn't emit the entry barrier so having a positive value for this offset
|
||||
// confirms that the installed code supports the entry barrier.
|
||||
int _nmethod_entry_patch_offset;
|
||||
|
||||
// Address of the failed speculations list to which a speculation
|
||||
// is appended when it causes a deoptimization.
|
||||
FailedSpeculation** _failed_speculations;
|
||||
@ -65,7 +72,31 @@ class JVMCINMethodData {
|
||||
SPECULATION_LENGTH_MASK = (1 << SPECULATION_LENGTH_BITS) - 1
|
||||
};
|
||||
|
||||
// Allocate a temporary data object for use during installation
|
||||
void initialize(int nmethod_mirror_index,
|
||||
int nmethod_entry_patch_offset,
|
||||
const char* nmethod_mirror_name,
|
||||
FailedSpeculation** failed_speculations);
|
||||
|
||||
void* operator new(size_t size, const char* nmethod_mirror_name) {
|
||||
assert(size == sizeof(JVMCINMethodData), "must agree");
|
||||
size_t total_size = compute_size(nmethod_mirror_name);
|
||||
return (address)resource_allocate_bytes(total_size);
|
||||
}
|
||||
|
||||
public:
|
||||
static JVMCINMethodData* create(int nmethod_mirror_index,
|
||||
int nmethod_entry_patch_offset,
|
||||
const char* nmethod_mirror_name,
|
||||
FailedSpeculation** failed_speculations) {
|
||||
JVMCINMethodData* result = new (nmethod_mirror_name) JVMCINMethodData();
|
||||
result->initialize(nmethod_mirror_index,
|
||||
nmethod_entry_patch_offset,
|
||||
nmethod_mirror_name,
|
||||
failed_speculations);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Computes the size of a JVMCINMethodData object
|
||||
static int compute_size(const char* nmethod_mirror_name) {
|
||||
int size = sizeof(JVMCINMethodData);
|
||||
@ -75,9 +106,12 @@ public:
|
||||
return size;
|
||||
}
|
||||
|
||||
void initialize(int nmethod_mirror_index,
|
||||
const char* name,
|
||||
FailedSpeculation** failed_speculations);
|
||||
int size() {
|
||||
return compute_size(name());
|
||||
}
|
||||
|
||||
// Copy the contents of this object into data which is normally the storage allocated in the nmethod.
|
||||
void copy(JVMCINMethodData* data);
|
||||
|
||||
// Adds `speculation` to the failed speculations list.
|
||||
void add_failed_speculation(nmethod* nm, jlong speculation);
|
||||
@ -94,6 +128,15 @@ public:
|
||||
|
||||
// Sets the mirror in nm's oops table.
|
||||
void set_nmethod_mirror(nmethod* nm, oop mirror);
|
||||
|
||||
bool has_entry_barrier() {
|
||||
return _nmethod_entry_patch_offset != -1;
|
||||
}
|
||||
|
||||
int nmethod_entry_patch_offset() {
|
||||
guarantee(_nmethod_entry_patch_offset != -1, "missing entry barrier");
|
||||
return _nmethod_entry_patch_offset;
|
||||
}
|
||||
};
|
||||
|
||||
// A top level class that represents an initialized JVMCI runtime.
|
||||
@ -414,7 +457,8 @@ class JVMCIRuntime: public CHeapObj<mtJVMCI> {
|
||||
JVMCIObject nmethod_mirror,
|
||||
FailedSpeculation** failed_speculations,
|
||||
char* speculations,
|
||||
int speculations_len);
|
||||
int speculations_len,
|
||||
int nmethod_entry_patch_offset);
|
||||
|
||||
// Detach `thread` from this runtime and destroy this runtime's JavaVM
|
||||
// if using one JavaVM per JVMCI compilation .
|
||||
|
@ -211,7 +211,7 @@ bool JVMCIGlobals::enable_jvmci_product_mode(JVMFlagOrigin origin) {
|
||||
}
|
||||
|
||||
bool JVMCIGlobals::gc_supports_jvmci() {
|
||||
return UseSerialGC || UseParallelGC || UseG1GC;
|
||||
return UseSerialGC || UseParallelGC || UseG1GC || UseZGC;
|
||||
}
|
||||
|
||||
void JVMCIGlobals::check_jvmci_supported_gc() {
|
||||
|
@ -56,6 +56,24 @@
|
||||
static_field(CompilerToVM::Data, SharedRuntime_deopt_blob_unpack_with_exception_in_tls, \
|
||||
address) \
|
||||
static_field(CompilerToVM::Data, SharedRuntime_deopt_blob_uncommon_trap, address) \
|
||||
static_field(CompilerToVM::Data, SharedRuntime_polling_page_return_handler, \
|
||||
address) \
|
||||
\
|
||||
static_field(CompilerToVM::Data, nmethod_entry_barrier, address) \
|
||||
static_field(CompilerToVM::Data, thread_disarmed_guard_value_offset, int) \
|
||||
static_field(CompilerToVM::Data, thread_address_bad_mask_offset, int) \
|
||||
AARCH64_ONLY(static_field(CompilerToVM::Data, BarrierSetAssembler_nmethod_patching_type, int)) \
|
||||
\
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_oop_array, address) \
|
||||
static_field(CompilerToVM::Data, ZBarrierSetRuntime_clone, address) \
|
||||
\
|
||||
static_field(CompilerToVM::Data, continuations_enabled, bool) \
|
||||
\
|
||||
static_field(CompilerToVM::Data, ThreadLocalAllocBuffer_alignment_reserve, size_t) \
|
||||
\
|
||||
@ -184,6 +202,7 @@
|
||||
volatile_nonstatic_field(JavaThread, _is_method_handle_return, int) \
|
||||
volatile_nonstatic_field(JavaThread, _doing_unsafe_access, bool) \
|
||||
nonstatic_field(JavaThread, _osthread, OSThread*) \
|
||||
nonstatic_field(JavaThread, _saved_exception_pc, address) \
|
||||
nonstatic_field(JavaThread, _pending_deoptimization, int) \
|
||||
nonstatic_field(JavaThread, _pending_failed_speculation, jlong) \
|
||||
nonstatic_field(JavaThread, _pending_transfer_to_interpreter, bool) \
|
||||
@ -475,6 +494,7 @@
|
||||
declare_constant(CodeInstaller::EXCEPTION_HANDLER_ENTRY) \
|
||||
declare_constant(CodeInstaller::DEOPT_HANDLER_ENTRY) \
|
||||
declare_constant(CodeInstaller::FRAME_COMPLETE) \
|
||||
declare_constant(CodeInstaller::ENTRY_BARRIER_PATCH) \
|
||||
declare_constant(CodeInstaller::INVOKEINTERFACE) \
|
||||
declare_constant(CodeInstaller::INVOKEVIRTUAL) \
|
||||
declare_constant(CodeInstaller::INVOKESTATIC) \
|
||||
@ -694,6 +714,10 @@
|
||||
\
|
||||
declare_constant(MultiBranchData::per_case_cell_count) \
|
||||
\
|
||||
AARCH64_ONLY(declare_constant(NMethodPatchingType::stw_instruction_and_data_patch)) \
|
||||
AARCH64_ONLY(declare_constant(NMethodPatchingType::conc_instruction_and_data_patch)) \
|
||||
AARCH64_ONLY(declare_constant(NMethodPatchingType::conc_data_patch)) \
|
||||
\
|
||||
declare_constant(ReceiverTypeData::nonprofiled_count_off_set) \
|
||||
declare_constant(ReceiverTypeData::receiver_type_row_cell_count) \
|
||||
declare_constant(ReceiverTypeData::receiver0_offset) \
|
||||
|
@ -43,6 +43,7 @@
|
||||
template(jdk_vm_ci_hotspot_HotSpotResolvedJavaFieldImpl, "jdk/vm/ci/hotspot/HotSpotResolvedJavaFieldImpl") \
|
||||
template(jdk_vm_ci_hotspot_HotSpotCompressedNullConstant, "jdk/vm/ci/hotspot/HotSpotCompressedNullConstant") \
|
||||
template(jdk_vm_ci_hotspot_HotSpotObjectConstantImpl, "jdk/vm/ci/hotspot/HotSpotObjectConstantImpl") \
|
||||
template(jdk_vm_ci_hotspot_HotSpotMethodData, "jdk/vm/ci/hotspot/HotSpotMethodData") \
|
||||
template(jdk_vm_ci_hotspot_DirectHotSpotObjectConstantImpl, "jdk/vm/ci/hotspot/DirectHotSpotObjectConstantImpl") \
|
||||
template(jdk_vm_ci_hotspot_IndirectHotSpotObjectConstantImpl, "jdk/vm/ci/hotspot/IndirectHotSpotObjectConstantImpl") \
|
||||
template(jdk_vm_ci_hotspot_HotSpotStackFrameReference, "jdk/vm/ci/hotspot/HotSpotStackFrameReference") \
|
||||
@ -81,7 +82,7 @@
|
||||
template(compileMethod_signature, "(Ljdk/vm/ci/hotspot/HotSpotResolvedJavaMethod;IJI)Ljdk/vm/ci/hotspot/HotSpotCompilationRequestResult;") \
|
||||
template(isGCSupported_name, "isGCSupported") \
|
||||
template(fromMetaspace_name, "fromMetaspace") \
|
||||
template(method_fromMetaspace_signature, "(J)Ljdk/vm/ci/hotspot/HotSpotResolvedJavaMethod;") \
|
||||
template(method_fromMetaspace_signature, "(JLjdk/vm/ci/hotspot/HotSpotResolvedObjectTypeImpl;)Ljdk/vm/ci/hotspot/HotSpotResolvedJavaMethod;") \
|
||||
template(constantPool_fromMetaspace_signature, "(J)Ljdk/vm/ci/hotspot/HotSpotConstantPool;") \
|
||||
template(klass_fromMetaspace_signature, "(J)Ljdk/vm/ci/hotspot/HotSpotResolvedObjectTypeImpl;") \
|
||||
template(primitive_fromMetaspace_signature, "(Ljdk/vm/ci/hotspot/HotSpotObjectConstantImpl;C)Ljdk/vm/ci/hotspot/HotSpotResolvedPrimitiveType;") \
|
||||
|
@ -394,6 +394,10 @@ protected:
|
||||
static ByteSize modifier_flags_offset() { return in_ByteSize(offset_of(Klass, _modifier_flags)); }
|
||||
static ByteSize layout_helper_offset() { return in_ByteSize(offset_of(Klass, _layout_helper)); }
|
||||
static ByteSize access_flags_offset() { return in_ByteSize(offset_of(Klass, _access_flags)); }
|
||||
#if INCLUDE_JVMCI
|
||||
static ByteSize subklass_offset() { return in_ByteSize(offset_of(Klass, _subklass)); }
|
||||
static ByteSize next_sibling_offset() { return in_ByteSize(offset_of(Klass, _next_sibling)); }
|
||||
#endif
|
||||
|
||||
// Unpacking layout_helper:
|
||||
static const int _lh_neutral_value = 0; // neutral non-array non-instance value
|
||||
|
@ -1477,9 +1477,6 @@ void Arguments::set_use_compressed_oops() {
|
||||
if (UseCompressedOops && !FLAG_IS_DEFAULT(UseCompressedOops)) {
|
||||
warning("Max heap size too large for Compressed Oops");
|
||||
FLAG_SET_DEFAULT(UseCompressedOops, false);
|
||||
if (COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS) {
|
||||
FLAG_SET_DEFAULT(UseCompressedClassPointers, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // _LP64
|
||||
@ -1497,22 +1494,15 @@ void Arguments::set_use_compressed_klass_ptrs() {
|
||||
// be completely avoided instead. So it is better not to perform this trick. And by
|
||||
// not having that reliance, large heaps, or heaps not supporting compressed oops,
|
||||
// can still use compressed class pointers.
|
||||
if (COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS && !UseCompressedOops) {
|
||||
if (UseCompressedClassPointers) {
|
||||
warning("UseCompressedClassPointers requires UseCompressedOops");
|
||||
}
|
||||
FLAG_SET_DEFAULT(UseCompressedClassPointers, false);
|
||||
} else {
|
||||
// Turn on UseCompressedClassPointers too
|
||||
if (FLAG_IS_DEFAULT(UseCompressedClassPointers)) {
|
||||
FLAG_SET_ERGO(UseCompressedClassPointers, true);
|
||||
}
|
||||
// Check the CompressedClassSpaceSize to make sure we use compressed klass ptrs.
|
||||
if (UseCompressedClassPointers) {
|
||||
if (CompressedClassSpaceSize > KlassEncodingMetaspaceMax) {
|
||||
warning("CompressedClassSpaceSize is too large for UseCompressedClassPointers");
|
||||
FLAG_SET_DEFAULT(UseCompressedClassPointers, false);
|
||||
}
|
||||
// Turn on UseCompressedClassPointers too
|
||||
if (FLAG_IS_DEFAULT(UseCompressedClassPointers)) {
|
||||
FLAG_SET_ERGO(UseCompressedClassPointers, true);
|
||||
}
|
||||
// Check the CompressedClassSpaceSize to make sure we use compressed klass ptrs.
|
||||
if (UseCompressedClassPointers) {
|
||||
if (CompressedClassSpaceSize > KlassEncodingMetaspaceMax) {
|
||||
warning("CompressedClassSpaceSize is too large for UseCompressedClassPointers");
|
||||
FLAG_SET_DEFAULT(UseCompressedClassPointers, false);
|
||||
}
|
||||
}
|
||||
#endif // _LP64
|
||||
@ -1677,9 +1667,6 @@ void Arguments::set_heap_size() {
|
||||
"Please check the setting of MaxRAMPercentage %5.2f."
|
||||
,(size_t)reasonable_max, (size_t)max_coop_heap, MaxRAMPercentage);
|
||||
FLAG_SET_ERGO(UseCompressedOops, false);
|
||||
if (COMPRESSED_CLASS_POINTERS_DEPENDS_ON_COMPRESSED_OOPS) {
|
||||
FLAG_SET_ERGO(UseCompressedClassPointers, false);
|
||||
}
|
||||
} else {
|
||||
reasonable_max = MIN2(reasonable_max, max_coop_heap);
|
||||
}
|
||||
|
@ -884,23 +884,33 @@ final class CompilerToVM {
|
||||
|
||||
/**
|
||||
* Read a {@code Klass*} value from the memory location described by {@code base} plus
|
||||
* {@code displacement} and return the {@link HotSpotResolvedObjectTypeImpl} wrapping it. This
|
||||
* method does no checking that the memory location actually contains a valid pointer and may
|
||||
* crash the VM if an invalid location is provided. If the {@code base} is null then
|
||||
* {@code displacement} is used by itself. If {@code base} is a
|
||||
* {@link HotSpotResolvedJavaMethodImpl}, {@link HotSpotConstantPool} or
|
||||
* {@link HotSpotResolvedObjectTypeImpl} then the metaspace pointer is fetched from that object
|
||||
* and added to {@code displacement}. Any other non-null object type causes an
|
||||
* {@link IllegalArgumentException} to be thrown.
|
||||
* {@code displacement} and return the {@link HotSpotResolvedObjectTypeImpl} wrapping it. This method
|
||||
* only performs the read if the memory location is known to contain a valid Klass*. If
|
||||
* {@code base} is a {@link HotSpotConstantPool}, {@link HotSpotMethodData}, {@link HotSpotObjectConstantImpl},
|
||||
* or {@link HotSpotResolvedObjectTypeImpl} then the field
|
||||
* corresopnding to {@code displacement} is fetched using the appropriate HotSpot accessor. Any
|
||||
* other object type or an unexpected displacement causes an {@link IllegalArgumentException} to
|
||||
* be thrown. The set of fields which can be read in this fashion corresponds to the {@link VMField}
|
||||
* with type {@code Klass*} that are described in the {@link HotSpotVMConfigStore#getFields()}.
|
||||
* Additionally several injected fields in {@link Class} are also handled.
|
||||
*
|
||||
* @param base an object to read from or null
|
||||
* @param base an object to read from
|
||||
* @param displacement
|
||||
* @param compressed true if the location contains a compressed Klass*
|
||||
* @return null or the resolved method for this location
|
||||
* @throws NullPointerException if {@code base == null}
|
||||
*/
|
||||
private native HotSpotResolvedObjectTypeImpl getResolvedJavaType0(Object base, long displacement, boolean compressed);
|
||||
|
||||
HotSpotResolvedObjectTypeImpl getResolvedJavaType(MetaspaceObject base, long displacement, boolean compressed) {
|
||||
HotSpotResolvedObjectTypeImpl getResolvedJavaType(HotSpotConstantPool base, long displacement) {
|
||||
return getResolvedJavaType0(base, displacement, false);
|
||||
}
|
||||
|
||||
HotSpotResolvedObjectTypeImpl getResolvedJavaType(HotSpotMethodData base, long displacement) {
|
||||
return getResolvedJavaType0(base, displacement, false);
|
||||
}
|
||||
|
||||
HotSpotResolvedObjectTypeImpl getResolvedJavaType(HotSpotResolvedObjectTypeImpl base, long displacement, boolean compressed) {
|
||||
return getResolvedJavaType0(base, displacement, compressed);
|
||||
}
|
||||
|
||||
@ -932,6 +942,9 @@ final class CompilerToVM {
|
||||
*/
|
||||
native int methodDataProfileDataSize(long metaspaceMethodData, int position);
|
||||
|
||||
|
||||
native int methodDataExceptionSeen(long metaspaceMethodData, int bci);
|
||||
|
||||
/**
|
||||
* Return the amount of native stack required for the interpreter frames represented by
|
||||
* {@code frame}. This is used when emitting the stack banging code to ensure that there is
|
||||
|
@ -238,7 +238,7 @@ public final class HotSpotConstantPool implements ConstantPool, MetaspaceHandleO
|
||||
*/
|
||||
private HotSpotResolvedObjectType getHolder() {
|
||||
if (holder == null) {
|
||||
holder = compilerToVM().getResolvedJavaType(this, config().constantPoolHolderOffset, false);
|
||||
holder = compilerToVM().getResolvedJavaType(this, config().constantPoolHolderOffset);
|
||||
}
|
||||
return holder;
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ import jdk.vm.ci.meta.TriState;
|
||||
/**
|
||||
* Access to a HotSpot {@code MethodData} structure (defined in methodData.hpp).
|
||||
*/
|
||||
final class HotSpotMethodData {
|
||||
final class HotSpotMethodData implements MetaspaceObject {
|
||||
|
||||
/**
|
||||
* VM state that can be reset when building an AOT image.
|
||||
@ -174,6 +174,11 @@ final class HotSpotMethodData {
|
||||
this.state = VMState.instance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMetaspacePointer() {
|
||||
return methodDataPointer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return value of the MethodData::_data_size field
|
||||
*/
|
||||
@ -197,16 +202,19 @@ final class HotSpotMethodData {
|
||||
return normalDataSize() > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if there is an extra data section and the first tag is non-zero.
|
||||
*/
|
||||
public boolean hasExtraData() {
|
||||
return extraDataSize() > 0;
|
||||
return extraDataSize() > 0 && HotSpotMethodDataAccessor.readTag(state.config, this, getExtraDataBeginOffset()) != 0;
|
||||
}
|
||||
|
||||
public int getExtraDataBeginOffset() {
|
||||
private int getExtraDataBeginOffset() {
|
||||
return normalDataSize();
|
||||
}
|
||||
|
||||
public boolean isWithin(int position) {
|
||||
return position >= 0 && position < normalDataSize() + extraDataSize();
|
||||
return position >= 0 && position < normalDataSize();
|
||||
}
|
||||
|
||||
public int getDeoptimizationCount(DeoptimizationReason reason) {
|
||||
@ -241,17 +249,6 @@ final class HotSpotMethodData {
|
||||
return getData(position);
|
||||
}
|
||||
|
||||
public HotSpotMethodDataAccessor getExtraData(int position) {
|
||||
if (position >= normalDataSize() + extraDataSize()) {
|
||||
return null;
|
||||
}
|
||||
HotSpotMethodDataAccessor data = getData(position);
|
||||
if (data != null) {
|
||||
return data;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
public static HotSpotMethodDataAccessor getNoDataAccessor(boolean exceptionPossiblyNotRecorded) {
|
||||
if (exceptionPossiblyNotRecorded) {
|
||||
return VMState.instance().noDataExceptionPossiblyNotRecordedAccessor;
|
||||
@ -308,7 +305,7 @@ final class HotSpotMethodData {
|
||||
|
||||
private HotSpotResolvedObjectTypeImpl readKlass(int position, int offsetInBytes) {
|
||||
long fullOffsetInBytes = state.computeFullOffset(position, offsetInBytes);
|
||||
return compilerToVM().getResolvedJavaType(methodDataPointer + fullOffsetInBytes);
|
||||
return compilerToVM().getResolvedJavaType(this, fullOffsetInBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -344,20 +341,6 @@ final class HotSpotMethodData {
|
||||
}
|
||||
}
|
||||
|
||||
if (hasExtraData()) {
|
||||
int pos = getExtraDataBeginOffset();
|
||||
HotSpotMethodDataAccessor data;
|
||||
while ((data = getExtraData(pos)) != null) {
|
||||
if (pos == getExtraDataBeginOffset()) {
|
||||
sb.append(nl).append("--- Extra data:");
|
||||
}
|
||||
int bci = data.getBCI(this, pos);
|
||||
sb.append(String.format("%n%-6d bci: %-6d%-20s", pos, bci, data.getClass().getSimpleName()));
|
||||
sb.append(data.appendTo(new StringBuilder(), this, pos).toString().replace(nl, nlIndent));
|
||||
pos = pos + data.getSize(this, pos);
|
||||
}
|
||||
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,8 @@
|
||||
*/
|
||||
package jdk.vm.ci.hotspot;
|
||||
|
||||
import static jdk.vm.ci.hotspot.CompilerToVM.compilerToVM;
|
||||
|
||||
import jdk.vm.ci.meta.DeoptimizationReason;
|
||||
import jdk.vm.ci.meta.JavaMethodProfile;
|
||||
import jdk.vm.ci.meta.JavaTypeProfile;
|
||||
@ -77,7 +79,7 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
if (!isMature) {
|
||||
return null;
|
||||
}
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getTypeProfile(methodData, position);
|
||||
}
|
||||
|
||||
@ -86,7 +88,7 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
if (!isMature) {
|
||||
return null;
|
||||
}
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getMethodProfile(methodData, position);
|
||||
}
|
||||
|
||||
@ -95,7 +97,7 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
if (!isMature) {
|
||||
return -1;
|
||||
}
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getBranchTakenProbability(methodData, position);
|
||||
}
|
||||
|
||||
@ -104,19 +106,27 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
if (!isMature) {
|
||||
return null;
|
||||
}
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getSwitchProbabilities(methodData, position);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TriState getExceptionSeen(int bci) {
|
||||
findBCI(bci, true);
|
||||
if (!findBCI(bci)) {
|
||||
// There might data in the extra data section but all accesses to that memory must be
|
||||
// under a lock so go into VM to get the data.
|
||||
int exceptionSeen = compilerToVM().methodDataExceptionSeen(methodData.methodDataPointer, bci);
|
||||
if (exceptionSeen == -1) {
|
||||
return TriState.UNKNOWN;
|
||||
}
|
||||
return TriState.get(exceptionSeen != 0);
|
||||
}
|
||||
return dataAccessor.getExceptionSeen(methodData, position);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TriState getNullSeen(int bci) {
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getNullSeen(methodData, position);
|
||||
}
|
||||
|
||||
@ -125,7 +135,7 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
if (!isMature) {
|
||||
return -1;
|
||||
}
|
||||
findBCI(bci, false);
|
||||
findBCI(bci);
|
||||
return dataAccessor.getExecutionCount(methodData, position);
|
||||
}
|
||||
|
||||
@ -141,7 +151,7 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
return count;
|
||||
}
|
||||
|
||||
private void findBCI(int targetBCI, boolean searchExtraData) {
|
||||
private boolean findBCI(int targetBCI) {
|
||||
assert targetBCI >= 0 : "invalid BCI";
|
||||
|
||||
if (methodData.hasNormalData()) {
|
||||
@ -151,33 +161,15 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
int currentBCI = currentAccessor.getBCI(methodData, currentPosition);
|
||||
if (currentBCI == targetBCI) {
|
||||
normalDataFound(currentAccessor, currentPosition, currentBCI);
|
||||
return;
|
||||
return true;
|
||||
} else if (currentBCI > targetBCI) {
|
||||
break;
|
||||
}
|
||||
currentPosition = currentPosition + currentAccessor.getSize(methodData, currentPosition);
|
||||
}
|
||||
}
|
||||
|
||||
boolean exceptionPossiblyNotRecorded = false;
|
||||
if (searchExtraData && methodData.hasExtraData()) {
|
||||
int currentPosition = methodData.getExtraDataBeginOffset();
|
||||
HotSpotMethodDataAccessor currentAccessor;
|
||||
while ((currentAccessor = methodData.getExtraData(currentPosition)) != null) {
|
||||
int currentBCI = currentAccessor.getBCI(methodData, currentPosition);
|
||||
if (currentBCI == targetBCI) {
|
||||
extraDataFound(currentAccessor, currentPosition);
|
||||
return;
|
||||
}
|
||||
currentPosition = currentPosition + currentAccessor.getSize(methodData, currentPosition);
|
||||
}
|
||||
|
||||
if (!methodData.isWithin(currentPosition)) {
|
||||
exceptionPossiblyNotRecorded = true;
|
||||
}
|
||||
}
|
||||
|
||||
noDataFound(exceptionPossiblyNotRecorded);
|
||||
noDataFound(false);
|
||||
return false;
|
||||
}
|
||||
|
||||
private void normalDataFound(HotSpotMethodDataAccessor data, int pos, int bci) {
|
||||
@ -186,10 +178,6 @@ final class HotSpotProfilingInfo implements ProfilingInfo {
|
||||
this.hintBCI = bci;
|
||||
}
|
||||
|
||||
private void extraDataFound(HotSpotMethodDataAccessor data, int pos) {
|
||||
setCurrentData(data, pos);
|
||||
}
|
||||
|
||||
private void noDataFound(boolean exceptionPossiblyNotRecorded) {
|
||||
HotSpotMethodDataAccessor accessor = HotSpotMethodData.getNoDataAccessor(exceptionPossiblyNotRecorded);
|
||||
setCurrentData(accessor, -1);
|
||||
|
@ -85,22 +85,6 @@ final class HotSpotResolvedJavaMethodImpl extends HotSpotMethod implements HotSp
|
||||
*/
|
||||
private String nameCache;
|
||||
|
||||
/**
|
||||
* Gets the holder of a HotSpot metaspace method native object.
|
||||
*
|
||||
* @param metaspaceHandle a handle to a metaspace Method object
|
||||
* @return the {@link ResolvedJavaType} corresponding to the holder of the
|
||||
* {@code metaspaceMethod}
|
||||
*/
|
||||
private static HotSpotResolvedObjectTypeImpl getHolder(long metaspaceHandle) {
|
||||
HotSpotVMConfig config = config();
|
||||
long methodPointer = UNSAFE.getLong(metaspaceHandle);
|
||||
assert methodPointer != 0 : metaspaceHandle;
|
||||
final long constMethodPointer = UNSAFE.getAddress(methodPointer + config.methodConstMethodOffset);
|
||||
final long constantPoolPointer = UNSAFE.getAddress(constMethodPointer + config.constMethodConstantsOffset);
|
||||
return Objects.requireNonNull(compilerToVM().getResolvedJavaType(constantPoolPointer + config.constantPoolHolderOffset));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the JVMCI mirror from a HotSpot method. The VM is responsible for ensuring that the
|
||||
* Method* is kept alive for the duration of this call and the {@link HotSpotJVMCIRuntime} keeps
|
||||
@ -113,8 +97,7 @@ final class HotSpotResolvedJavaMethodImpl extends HotSpotMethod implements HotSp
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
@VMEntryPoint
|
||||
private static HotSpotResolvedJavaMethod fromMetaspace(long metaspaceHandle) {
|
||||
HotSpotResolvedObjectTypeImpl holder = getHolder(metaspaceHandle);
|
||||
private static HotSpotResolvedJavaMethod fromMetaspace(long metaspaceHandle, HotSpotResolvedObjectTypeImpl holder) {
|
||||
return holder.createMethod(metaspaceHandle);
|
||||
}
|
||||
|
||||
|
@ -230,7 +230,10 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
|
||||
HotSpotResolvedObjectTypeImpl type = this;
|
||||
while (type.isAbstract()) {
|
||||
HotSpotResolvedObjectTypeImpl subklass = type.getSubklass();
|
||||
if (subklass == null || UNSAFE.getAddress(subklass.getKlassPointer() + config.nextSiblingOffset) != 0) {
|
||||
if (subklass == null) {
|
||||
return null;
|
||||
}
|
||||
if (compilerToVM().getResolvedJavaType(subklass, config.nextSiblingOffset, false) != null) {
|
||||
return null;
|
||||
}
|
||||
type = subklass;
|
||||
@ -262,7 +265,7 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
|
||||
* @return true if the type is a leaf class
|
||||
*/
|
||||
private boolean isLeafClass() {
|
||||
return UNSAFE.getLong(this.getKlassPointer() + config().subklassOffset) == 0;
|
||||
return compilerToVM().getResolvedJavaType(this, config().subklassOffset, false) == null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -304,11 +304,6 @@ public class CompilerToVMHelper {
|
||||
return CTVM.getConstantPool(object);
|
||||
}
|
||||
|
||||
public static HotSpotResolvedObjectType getResolvedJavaType(MetaspaceObject base,
|
||||
long displacement, boolean compressed) {
|
||||
return CTVM.getResolvedJavaType(base, displacement, compressed);
|
||||
}
|
||||
|
||||
public static long getMetaspacePointer(Object o) {
|
||||
return ((MetaspaceObject) o).getMetaspacePointer();
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ public abstract class TestAssembler {
|
||||
DataPatch[] finishedDataPatches = dataPatches.toArray(new DataPatch[0]);
|
||||
int dataSectionAlignment = 8; // CodeBuffer::SECT_CONSTS code section alignment
|
||||
return new HotSpotCompiledNmethod(method.getName(), finishedCode, finishedCode.length, finishedSites, new Assumption[0], new ResolvedJavaMethod[]{method}, new Comment[0], finishedData, dataSectionAlignment,
|
||||
finishedDataPatches, false, frameSize, deoptRescue, method, 0, id, 0L, false);
|
||||
finishedDataPatches, false, frameSize, deoptRescue, method, -1, id, 0L, false);
|
||||
}
|
||||
|
||||
protected static class Buffer {
|
||||
|
374
test/hotspot/jtreg/compiler/jvmci/meta/ProfilingInfoTest.java
Normal file
374
test/hotspot/jtreg/compiler/jvmci/meta/ProfilingInfoTest.java
Normal file
@ -0,0 +1,374 @@
|
||||
/*
|
||||
* Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/**
|
||||
* These tests are explicitly testing the profiling behavior of the
|
||||
* interpreter. C1-based profiling differs slightly and when -Xcomp
|
||||
* is present, profiles will be created by C1 compiled code, not the
|
||||
* interpreter.
|
||||
*
|
||||
* @test
|
||||
* @requires vm.jvmci
|
||||
* @requires vm.compMode != "Xcomp"
|
||||
* @modules jdk.internal.vm.ci/jdk.vm.ci.meta
|
||||
* jdk.internal.vm.ci/jdk.vm.ci.runtime
|
||||
* @run junit/othervm -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI -XX:-UseJVMCICompiler -Xbootclasspath/a:. compiler.jvmci.meta.ProfilingInfoTest
|
||||
*/
|
||||
package compiler.jvmci.meta;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Executable;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
|
||||
import jdk.vm.ci.meta.JavaTypeProfile;
|
||||
import jdk.vm.ci.meta.MetaAccessProvider;
|
||||
import jdk.vm.ci.meta.ProfilingInfo;
|
||||
import jdk.vm.ci.meta.ResolvedJavaMethod;
|
||||
import jdk.vm.ci.meta.ResolvedJavaType;
|
||||
import jdk.vm.ci.meta.TriState;
|
||||
import jdk.vm.ci.runtime.JVMCI;
|
||||
|
||||
/**
|
||||
* Tests profiling information provided by the runtime.
|
||||
* <p>
|
||||
* NOTE: These tests are actually not very robust. The problem is that only partial profiling
|
||||
* information may be gathered for any given method. For example, HotSpot's advanced compilation
|
||||
* policy can decide to only gather partial profiles in a first level compilation (see
|
||||
* AdvancedThresholdPolicy::common(...) in advancedThresholdPolicy.cpp). Because of this,
|
||||
* occasionally tests for {@link ProfilingInfo#getNullSeen(int)} can fail since HotSpot only sets
|
||||
* the null_seen bit when doing full profiling.
|
||||
*/
|
||||
public class ProfilingInfoTest {
|
||||
|
||||
private static final int N = 10;
|
||||
private static final double DELTA = 1d / Integer.MAX_VALUE;
|
||||
|
||||
@Test
|
||||
public void testBranchTakenProbability() {
|
||||
ProfilingInfo info = profile("branchProbabilitySnippet", 0);
|
||||
Assert.assertEquals(0.0, info.getBranchTakenProbability(1), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(1));
|
||||
Assert.assertEquals(-1.0, info.getBranchTakenProbability(8), DELTA);
|
||||
Assert.assertEquals(0, info.getExecutionCount(8));
|
||||
|
||||
info = profile("branchProbabilitySnippet", 1);
|
||||
Assert.assertEquals(1.0, info.getBranchTakenProbability(1), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(1));
|
||||
Assert.assertEquals(0.0, info.getBranchTakenProbability(8), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(8));
|
||||
|
||||
info = profile("branchProbabilitySnippet", 2);
|
||||
Assert.assertEquals(1.0, info.getBranchTakenProbability(1), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(1));
|
||||
Assert.assertEquals(1.0, info.getBranchTakenProbability(8), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(8));
|
||||
|
||||
continueProfiling(3 * N, "branchProbabilitySnippet", 0);
|
||||
Assert.assertEquals(0.25, info.getBranchTakenProbability(1), DELTA);
|
||||
Assert.assertEquals(4 * N, info.getExecutionCount(1));
|
||||
Assert.assertEquals(1.0, info.getBranchTakenProbability(8), DELTA);
|
||||
Assert.assertEquals(N, info.getExecutionCount(8));
|
||||
|
||||
resetProfile("branchProbabilitySnippet");
|
||||
Assert.assertEquals(-1.0, info.getBranchTakenProbability(1), DELTA);
|
||||
Assert.assertEquals(0, info.getExecutionCount(1));
|
||||
Assert.assertEquals(-1.0, info.getBranchTakenProbability(8), DELTA);
|
||||
Assert.assertEquals(0, info.getExecutionCount(8));
|
||||
}
|
||||
|
||||
public static int branchProbabilitySnippet(int value) {
|
||||
if (value == 0) {
|
||||
return -1;
|
||||
} else if (value == 1) {
|
||||
return -2;
|
||||
} else {
|
||||
return -3;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSwitchProbabilities() {
|
||||
ProfilingInfo info = profile("switchProbabilitySnippet", 0);
|
||||
Assert.assertArrayEquals(new double[]{1.0, 0.0, 0.0}, info.getSwitchProbabilities(1), DELTA);
|
||||
|
||||
info = profile("switchProbabilitySnippet", 1);
|
||||
Assert.assertArrayEquals(new double[]{0.0, 1.0, 0.0}, info.getSwitchProbabilities(1), DELTA);
|
||||
|
||||
info = profile("switchProbabilitySnippet", 2);
|
||||
Assert.assertArrayEquals(new double[]{0.0, 0.0, 1.0}, info.getSwitchProbabilities(1), DELTA);
|
||||
|
||||
resetProfile("switchProbabilitySnippet");
|
||||
Assert.assertNull(info.getSwitchProbabilities(1));
|
||||
}
|
||||
|
||||
public static int switchProbabilitySnippet(int value) {
|
||||
switch (value) {
|
||||
case 0:
|
||||
return -1;
|
||||
case 1:
|
||||
return -2;
|
||||
default:
|
||||
return -3;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProfileInvokeVirtual() {
|
||||
testTypeProfile("invokeVirtualSnippet", 1);
|
||||
}
|
||||
|
||||
public static int invokeVirtualSnippet(Object obj) {
|
||||
return obj.hashCode();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeProfileInvokeInterface() {
|
||||
testTypeProfile("invokeInterfaceSnippet", 1);
|
||||
}
|
||||
|
||||
public static int invokeInterfaceSnippet(CharSequence a) {
|
||||
return a.length();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeProfileCheckCast() {
|
||||
testTypeProfile("checkCastSnippet", 1);
|
||||
}
|
||||
|
||||
public static Serializable checkCastSnippet(Object obj) {
|
||||
try {
|
||||
return (Serializable) obj;
|
||||
} catch (ClassCastException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeProfileInstanceOf() {
|
||||
testTypeProfile("instanceOfSnippet", 1);
|
||||
}
|
||||
|
||||
public static boolean instanceOfSnippet(Object obj) {
|
||||
return obj instanceof Serializable;
|
||||
}
|
||||
|
||||
private void testTypeProfile(String testSnippet, int bci) {
|
||||
MetaAccessProvider metaAccess = JVMCI.getRuntime().getHostJVMCIBackend().getMetaAccess();
|
||||
ResolvedJavaType stringType = metaAccess.lookupJavaType(String.class);
|
||||
ResolvedJavaType stringBuilderType = metaAccess.lookupJavaType(StringBuilder.class);
|
||||
|
||||
ProfilingInfo info = profile(testSnippet, "ABC");
|
||||
JavaTypeProfile typeProfile = info.getTypeProfile(bci);
|
||||
Assert.assertEquals(0.0, typeProfile.getNotRecordedProbability(), DELTA);
|
||||
Assert.assertEquals(1, typeProfile.getTypes().length);
|
||||
Assert.assertEquals(stringType, typeProfile.getTypes()[0].getType());
|
||||
Assert.assertEquals(1.0, typeProfile.getTypes()[0].getProbability(), DELTA);
|
||||
|
||||
continueProfiling(testSnippet, new StringBuilder());
|
||||
typeProfile = info.getTypeProfile(bci);
|
||||
Assert.assertEquals(0.0, typeProfile.getNotRecordedProbability(), DELTA);
|
||||
Assert.assertEquals(2, typeProfile.getTypes().length);
|
||||
Assert.assertEquals(stringType, typeProfile.getTypes()[0].getType());
|
||||
Assert.assertEquals(stringBuilderType, typeProfile.getTypes()[1].getType());
|
||||
Assert.assertEquals(0.5, typeProfile.getTypes()[0].getProbability(), DELTA);
|
||||
Assert.assertEquals(0.5, typeProfile.getTypes()[1].getProbability(), DELTA);
|
||||
|
||||
resetProfile(testSnippet);
|
||||
typeProfile = info.getTypeProfile(bci);
|
||||
Assert.assertNull(typeProfile);
|
||||
}
|
||||
|
||||
public ProfilingInfoTest() {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExceptionSeen() {
|
||||
// NullPointerException
|
||||
ProfilingInfo info = profile("nullPointerExceptionSnippet", 5);
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
|
||||
info = profile("nullPointerExceptionSnippet", (Object) null);
|
||||
Assert.assertEquals(TriState.TRUE, info.getExceptionSeen(1));
|
||||
|
||||
resetProfile("nullPointerExceptionSnippet");
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
|
||||
// ArrayOutOfBoundsException
|
||||
info = profile("arrayIndexOutOfBoundsExceptionSnippet", new int[1]);
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(2));
|
||||
|
||||
info = profile("arrayIndexOutOfBoundsExceptionSnippet", new int[0]);
|
||||
Assert.assertEquals(TriState.TRUE, info.getExceptionSeen(2));
|
||||
|
||||
resetProfile("arrayIndexOutOfBoundsExceptionSnippet");
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(2));
|
||||
|
||||
// CheckCastException
|
||||
info = profile("checkCastExceptionSnippet", "ABC");
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
|
||||
info = profile("checkCastExceptionSnippet", 5);
|
||||
Assert.assertEquals(TriState.TRUE, info.getExceptionSeen(1));
|
||||
|
||||
resetProfile("checkCastExceptionSnippet");
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
|
||||
// Invoke with exception
|
||||
info = profile("invokeWithExceptionSnippet", false);
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
|
||||
info = profile("invokeWithExceptionSnippet", true);
|
||||
Assert.assertEquals(TriState.TRUE, info.getExceptionSeen(1));
|
||||
|
||||
resetProfile("invokeWithExceptionSnippet");
|
||||
Assert.assertEquals(TriState.FALSE, info.getExceptionSeen(1));
|
||||
}
|
||||
|
||||
public static int nullPointerExceptionSnippet(Object obj) {
|
||||
try {
|
||||
return obj.hashCode();
|
||||
} catch (NullPointerException e) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static int arrayIndexOutOfBoundsExceptionSnippet(int[] array) {
|
||||
try {
|
||||
return array[0];
|
||||
} catch (ArrayIndexOutOfBoundsException e) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static int checkCastExceptionSnippet(Object obj) {
|
||||
try {
|
||||
return ((String) obj).length();
|
||||
} catch (ClassCastException e) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
public static int invokeWithExceptionSnippet(boolean doThrow) {
|
||||
try {
|
||||
return throwException(doThrow);
|
||||
} catch (IllegalArgumentException e) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static int throwException(boolean doThrow) {
|
||||
if (doThrow) {
|
||||
throw new IllegalArgumentException();
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullSeen() {
|
||||
testNullSeen("instanceOfSnippet");
|
||||
testNullSeen("checkCastSnippet");
|
||||
}
|
||||
|
||||
private void testNullSeen(String snippet) {
|
||||
ProfilingInfo info = profile(snippet, 1);
|
||||
Assert.assertEquals(TriState.FALSE, info.getNullSeen(1));
|
||||
|
||||
continueProfiling(snippet, "ABC");
|
||||
Assert.assertEquals(TriState.FALSE, info.getNullSeen(1));
|
||||
|
||||
continueProfiling(snippet, new Object());
|
||||
Assert.assertEquals(TriState.FALSE, info.getNullSeen(1));
|
||||
|
||||
if (TriState.TRUE == info.getNullSeen(1)) {
|
||||
// See the javadoc comment for ProfilingInfoTest.
|
||||
continueProfiling(snippet, (Object) null);
|
||||
Assert.assertEquals(TriState.TRUE, info.getNullSeen(1));
|
||||
|
||||
continueProfiling(snippet, 0.0);
|
||||
Assert.assertEquals(TriState.TRUE, info.getNullSeen(1));
|
||||
|
||||
continueProfiling(snippet, new Object());
|
||||
Assert.assertEquals(TriState.TRUE, info.getNullSeen(1));
|
||||
}
|
||||
|
||||
resetProfile(snippet);
|
||||
Assert.assertEquals(TriState.FALSE, info.getNullSeen(1));
|
||||
}
|
||||
|
||||
private ProfilingInfo profile(String methodName, Object... args) {
|
||||
return profile(true, N, methodName, args);
|
||||
}
|
||||
|
||||
private void continueProfiling(String methodName, Object... args) {
|
||||
profile(false, N, methodName, args);
|
||||
}
|
||||
|
||||
private void continueProfiling(int executions, String methodName, Object... args) {
|
||||
profile(false, executions, methodName, args);
|
||||
}
|
||||
|
||||
private ProfilingInfo profile(boolean resetProfile, int executions, String methodName, Object... args) {
|
||||
MetaAccessProvider metaAccess = JVMCI.getRuntime().getHostJVMCIBackend().getMetaAccess();
|
||||
Method method = getMethod(methodName);
|
||||
ResolvedJavaMethod javaMethod = metaAccess.lookupJavaMethod(method);
|
||||
Assert.assertTrue(javaMethod.isStatic());
|
||||
if (resetProfile) {
|
||||
javaMethod.reprofile();
|
||||
}
|
||||
|
||||
for (int i = 0; i < executions; ++i) {
|
||||
try {
|
||||
method.invoke(null, args);
|
||||
} catch (Throwable e) {
|
||||
Assert.fail("method should not throw an exception: " + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
ProfilingInfo info = javaMethod.getProfilingInfo();
|
||||
// The execution counts are low so force maturity
|
||||
info.setMature();
|
||||
return info;
|
||||
}
|
||||
|
||||
static Method getMethod(String methodName) {
|
||||
for (Method method : ProfilingInfoTest.class.getDeclaredMethods()) {
|
||||
if (method.getName().equals(methodName)) {
|
||||
return method;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
private void resetProfile(String methodName) {
|
||||
MetaAccessProvider metaAccess = JVMCI.getRuntime().getHostJVMCIBackend().getMetaAccess();
|
||||
ResolvedJavaMethod javaMethod = metaAccess.lookupJavaMethod(getMethod(methodName));
|
||||
javaMethod.reprofile();
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user