8048721: -XX:+PrintCompilation prints negative bci for non entrant OSR methods
Removed 'InvalidOSREntryBci' and checking nmethod::_state instead to determine if an osr method is non-entrant. Reviewed-by: kvn, vlivanov, drchase
This commit is contained in:
parent
f082ff287e
commit
dd6285d984
@ -107,7 +107,6 @@ public class VM {
|
||||
private Runtime1 runtime1;
|
||||
/** These constants come from globalDefinitions.hpp */
|
||||
private int invocationEntryBCI;
|
||||
private int invalidOSREntryBCI;
|
||||
private ReversePtrs revPtrs;
|
||||
private VMRegImpl vmregImpl;
|
||||
private int reserveForAllocationPrefetch;
|
||||
@ -295,7 +294,6 @@ public class VM {
|
||||
|
||||
stackBias = db.lookupIntConstant("STACK_BIAS").intValue();
|
||||
invocationEntryBCI = db.lookupIntConstant("InvocationEntryBci").intValue();
|
||||
invalidOSREntryBCI = db.lookupIntConstant("InvalidOSREntryBci").intValue();
|
||||
|
||||
// We infer the presence of C1 or C2 from a couple of fields we
|
||||
// already have present in the type database
|
||||
@ -733,11 +731,6 @@ public class VM {
|
||||
return invocationEntryBCI;
|
||||
}
|
||||
|
||||
/** FIXME: figure out where to stick this */
|
||||
public int getInvalidOSREntryBCI() {
|
||||
return invalidOSREntryBCI;
|
||||
}
|
||||
|
||||
// FIXME: figure out where to stick this
|
||||
public boolean wizardMode() {
|
||||
return true;
|
||||
|
@ -1166,9 +1166,9 @@ void InterpreterMacroAssembler::test_backedge_count_for_osr(Register backedge_co
|
||||
beq(CCR0, overflow_with_error);
|
||||
|
||||
// Has the nmethod been invalidated already?
|
||||
lwz(Rtmp, nmethod::entry_bci_offset(), R3_RET);
|
||||
cmpwi(CCR0, Rtmp, InvalidOSREntryBci);
|
||||
beq(CCR0, overflow_with_error);
|
||||
lbz(Rtmp, nmethod::state_offset(), R3_RET);
|
||||
cmpwi(CCR0, Rtmp, nmethod::in_use);
|
||||
bne(CCR0, overflow_with_error);
|
||||
|
||||
// Migrate the interpreter frame off of the stack.
|
||||
// We can use all registers because we will not return to interpreter from this point.
|
||||
|
@ -1674,9 +1674,9 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
__ beq(CCR0, Lforward);
|
||||
|
||||
// Has the nmethod been invalidated already?
|
||||
__ lwz(R0, nmethod::entry_bci_offset(), R3_RET);
|
||||
__ cmpwi(CCR0, R0, InvalidOSREntryBci);
|
||||
__ beq(CCR0, Lforward);
|
||||
__ lbz(R0, nmethod::state_offset(), R3_RET);
|
||||
__ cmpwi(CCR0, R0, nmethod::in_use);
|
||||
__ bne(CCR0, Lforward);
|
||||
|
||||
// Migrate the interpreter frame off of the stack.
|
||||
// We can use all registers because we will not return to interpreter from this point.
|
||||
|
@ -2407,8 +2407,8 @@ void InterpreterMacroAssembler::test_backedge_count_for_osr( Register backedge_c
|
||||
br_null_short(O0, Assembler::pn, overflow_with_error);
|
||||
|
||||
// Has the nmethod been invalidated already?
|
||||
ld(O0, nmethod::entry_bci_offset(), O2);
|
||||
cmp_and_br_short(O2, InvalidOSREntryBci, Assembler::equal, Assembler::pn, overflow_with_error);
|
||||
ldub(O0, nmethod::state_offset(), O2);
|
||||
cmp_and_br_short(O2, nmethod::in_use, Assembler::notEqual, Assembler::pn, overflow_with_error);
|
||||
|
||||
// migrate the interpreter frame off of the stack
|
||||
|
||||
|
@ -1636,8 +1636,8 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
__ br_null_short(O0, Assembler::pn, Lforward);
|
||||
|
||||
// Has the nmethod been invalidated already?
|
||||
__ ld(O0, nmethod::entry_bci_offset(), O2);
|
||||
__ cmp_and_br_short(O2, InvalidOSREntryBci, Assembler::equal, Assembler::pn, Lforward);
|
||||
__ ldub(O0, nmethod::state_offset(), O2);
|
||||
__ cmp_and_br_short(O2, nmethod::in_use, Assembler::notEqual, Assembler::pn, Lforward);
|
||||
|
||||
// migrate the interpreter frame off of the stack
|
||||
|
||||
|
@ -1724,9 +1724,8 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
__ testptr(rax, rax); // test result
|
||||
__ jcc(Assembler::zero, dispatch); // no osr if null
|
||||
// nmethod may have been invalidated (VM may block upon call_VM return)
|
||||
__ movl(rcx, Address(rax, nmethod::entry_bci_offset()));
|
||||
__ cmpl(rcx, InvalidOSREntryBci);
|
||||
__ jcc(Assembler::equal, dispatch);
|
||||
__ cmpb(Address(rax, nmethod::state_offset()), nmethod::in_use);
|
||||
__ jcc(Assembler::notEqual, dispatch);
|
||||
|
||||
// We have the address of an on stack replacement routine in rax,
|
||||
// We need to prepare to execute the OSR method. First we must
|
||||
@ -1734,8 +1733,7 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
|
||||
__ mov(rbx, rax); // save the nmethod
|
||||
|
||||
const Register thread = rcx;
|
||||
__ get_thread(thread);
|
||||
__ get_thread(rcx);
|
||||
call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin));
|
||||
// rax, is OSR buffer, move it to expected parameter location
|
||||
__ mov(rcx, rax);
|
||||
|
@ -1751,9 +1751,8 @@ void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
__ testptr(rax, rax); // test result
|
||||
__ jcc(Assembler::zero, dispatch); // no osr if null
|
||||
// nmethod may have been invalidated (VM may block upon call_VM return)
|
||||
__ movl(rcx, Address(rax, nmethod::entry_bci_offset()));
|
||||
__ cmpl(rcx, InvalidOSREntryBci);
|
||||
__ jcc(Assembler::equal, dispatch);
|
||||
__ cmpb(Address(rax, nmethod::state_offset()), nmethod::in_use);
|
||||
__ jcc(Assembler::notEqual, dispatch);
|
||||
|
||||
// We have the address of an on stack replacement routine in eax
|
||||
// We need to prepare to execute the OSR method. First we must
|
||||
|
@ -1364,8 +1364,6 @@ void nmethod::invalidate_osr_method() {
|
||||
// Remove from list of active nmethods
|
||||
if (method() != NULL)
|
||||
method()->method_holder()->remove_osr_nmethod(this);
|
||||
// Set entry as invalid
|
||||
_entry_bci = InvalidOSREntryBci;
|
||||
}
|
||||
|
||||
void nmethod::log_state_change() const {
|
||||
|
@ -202,13 +202,6 @@ class nmethod : public CodeBlob {
|
||||
bool _oops_are_stale; // indicates that it's no longer safe to access oops section
|
||||
#endif
|
||||
|
||||
enum { in_use = 0, // executable nmethod
|
||||
not_entrant = 1, // marked for deoptimization but activations may still exist,
|
||||
// will be transformed to zombie when all activations are gone
|
||||
zombie = 2, // no activations exist, nmethod is ready for purge
|
||||
unloaded = 3 }; // there should be no activations, should not be called,
|
||||
// will be transformed to zombie immediately
|
||||
|
||||
jbyte _scavenge_root_state;
|
||||
|
||||
#if INCLUDE_RTM_OPT
|
||||
@ -431,6 +424,13 @@ class nmethod : public CodeBlob {
|
||||
address entry_point() const { return _entry_point; } // normal entry point
|
||||
address verified_entry_point() const { return _verified_entry_point; } // if klass is correct
|
||||
|
||||
enum { in_use = 0, // executable nmethod
|
||||
not_entrant = 1, // marked for deoptimization but activations may still exist,
|
||||
// will be transformed to zombie when all activations are gone
|
||||
zombie = 2, // no activations exist, nmethod is ready for purge
|
||||
unloaded = 3 }; // there should be no activations, should not be called,
|
||||
// will be transformed to zombie immediately
|
||||
|
||||
// flag accessing and manipulation
|
||||
bool is_in_use() const { return _state == in_use; }
|
||||
bool is_alive() const { return _state == in_use || _state == not_entrant; }
|
||||
@ -759,7 +759,7 @@ public:
|
||||
// support for code generation
|
||||
static int verified_entry_point_offset() { return offset_of(nmethod, _verified_entry_point); }
|
||||
static int osr_entry_point_offset() { return offset_of(nmethod, _osr_entry_point); }
|
||||
static int entry_bci_offset() { return offset_of(nmethod, _entry_bci); }
|
||||
static int state_offset() { return offset_of(nmethod, _state); }
|
||||
|
||||
// RedefineClasses support. Mark metadata in nmethods as on_stack so that
|
||||
// redefine classes doesn't purge it.
|
||||
|
@ -330,7 +330,7 @@
|
||||
if (do_OSR) { \
|
||||
nmethod* osr_nmethod; \
|
||||
OSR_REQUEST(osr_nmethod, branch_pc); \
|
||||
if (osr_nmethod != NULL && osr_nmethod->osr_entry_bci() != InvalidOSREntryBci) { \
|
||||
if (osr_nmethod != NULL && osr_nmethod->is_in_use()) { \
|
||||
intptr_t* buf; \
|
||||
/* Call OSR migration with last java frame only, no checks. */ \
|
||||
CALL_VM_NAKED_LJF(buf=SharedRuntime::OSR_migration_begin(THREAD)); \
|
||||
|
@ -2518,7 +2518,6 @@ typedef TwoOopHashtable<Symbol*, mtClass> SymbolTwoOopHashtable;
|
||||
/*********************************************/ \
|
||||
\
|
||||
declare_constant(InvocationEntryBci) \
|
||||
declare_constant(InvalidOSREntryBci) \
|
||||
\
|
||||
/***************/ \
|
||||
/* OopMapValue */ \
|
||||
|
@ -882,8 +882,7 @@ enum JavaThreadState {
|
||||
|
||||
// Handy constants for deciding which compiler mode to use.
|
||||
enum MethodCompilation {
|
||||
InvocationEntryBci = -1, // i.e., not a on-stack replacement compilation
|
||||
InvalidOSREntryBci = -2
|
||||
InvocationEntryBci = -1 // i.e., not a on-stack replacement compilation
|
||||
};
|
||||
|
||||
// Enumeration to distinguish tiers of compilation
|
||||
|
Loading…
x
Reference in New Issue
Block a user