6986046: C1 valuestack cleanup
Fixes an historical oddity in C1 with inlining where all of the expression stacks are kept in the topmost ValueStack instead of being in their respective ValueStacks. Reviewed-by: never
This commit is contained in:
parent
bce771e0fe
commit
9b131fbbb8
@ -32,6 +32,7 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
|
||||
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
|
||||
, _index(index)
|
||||
{
|
||||
assert(info != NULL, "must have info");
|
||||
_info = new CodeEmitInfo(info);
|
||||
}
|
||||
|
||||
|
@ -311,7 +311,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
|
||||
|
||||
|
||||
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
bool needs_range_check = true;
|
||||
bool use_length = x->length() != NULL;
|
||||
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
|
||||
@ -386,7 +386,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
|
||||
|
||||
|
||||
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
LIRItem obj(x->obj(), this);
|
||||
obj.load_item();
|
||||
|
||||
@ -398,7 +398,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
|
||||
CodeEmitInfo* info_for_exception = NULL;
|
||||
if (x->needs_null_check()) {
|
||||
info_for_exception = state_for(x, x->lock_stack_before());
|
||||
info_for_exception = state_for(x);
|
||||
}
|
||||
|
||||
// this CodeEmitInfo must not have the xhandlers because here the
|
||||
@ -409,7 +409,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
|
||||
|
||||
void LIRGenerator::do_MonitorExit(MonitorExit* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
LIRItem obj(x->obj(), this);
|
||||
obj.dont_load_item();
|
||||
|
||||
@ -871,10 +871,11 @@ void LIRGenerator::do_NewInstance(NewInstance* x) {
|
||||
// This instruction can be deoptimized in the slow path : use
|
||||
// O0 as result register.
|
||||
const LIR_Opr reg = result_register_for(x->type());
|
||||
|
||||
#ifndef PRODUCT
|
||||
if (PrintNotLoaded && !x->klass()->is_loaded()) {
|
||||
tty->print_cr(" ###class not loaded at new bci %d", x->bci());
|
||||
tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
|
||||
}
|
||||
#endif
|
||||
CodeEmitInfo* info = state_for(x, x->state());
|
||||
LIR_Opr tmp1 = FrameMap::G1_oop_opr;
|
||||
LIR_Opr tmp2 = FrameMap::G3_oop_opr;
|
||||
@ -1018,7 +1019,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
|
||||
obj.load_item();
|
||||
LIR_Opr out_reg = rlock_result(x);
|
||||
CodeStub* stub;
|
||||
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks());
|
||||
CodeEmitInfo* info_for_exception = state_for(x);
|
||||
|
||||
if (x->is_incompatible_class_change_check()) {
|
||||
assert(patching_info == NULL, "can't patch this");
|
||||
|
@ -83,7 +83,8 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
|
||||
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
|
||||
, _index(index)
|
||||
{
|
||||
_info = info == NULL ? NULL : new CodeEmitInfo(info);
|
||||
assert(info != NULL, "must have info");
|
||||
_info = new CodeEmitInfo(info);
|
||||
}
|
||||
|
||||
|
||||
|
@ -107,7 +107,7 @@ bool LIRGenerator::can_store_as_constant(Value v, BasicType type) const {
|
||||
return false;
|
||||
}
|
||||
Constant* c = v->as_Constant();
|
||||
if (c && c->state() == NULL) {
|
||||
if (c && c->state_before() == NULL) {
|
||||
// constants of any type can be stored directly, except for
|
||||
// unloaded object constants.
|
||||
return true;
|
||||
@ -250,7 +250,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
|
||||
|
||||
|
||||
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
bool needs_range_check = true;
|
||||
bool use_length = x->length() != NULL;
|
||||
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
|
||||
@ -325,7 +325,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
|
||||
|
||||
|
||||
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
LIRItem obj(x->obj(), this);
|
||||
obj.load_item();
|
||||
|
||||
@ -341,7 +341,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
|
||||
CodeEmitInfo* info_for_exception = NULL;
|
||||
if (x->needs_null_check()) {
|
||||
info_for_exception = state_for(x, x->lock_stack_before());
|
||||
info_for_exception = state_for(x);
|
||||
}
|
||||
// this CodeEmitInfo must not have the xhandlers because here the
|
||||
// object is already locked (xhandlers expect object to be unlocked)
|
||||
@ -352,7 +352,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
|
||||
|
||||
|
||||
void LIRGenerator::do_MonitorExit(MonitorExit* x) {
|
||||
assert(x->is_root(),"");
|
||||
assert(x->is_pinned(),"");
|
||||
|
||||
LIRItem obj(x->obj(), this);
|
||||
obj.dont_load_item();
|
||||
@ -984,9 +984,11 @@ void LIRGenerator::do_Convert(Convert* x) {
|
||||
|
||||
|
||||
void LIRGenerator::do_NewInstance(NewInstance* x) {
|
||||
#ifndef PRODUCT
|
||||
if (PrintNotLoaded && !x->klass()->is_loaded()) {
|
||||
tty->print_cr(" ###class not loaded at new bci %d", x->bci());
|
||||
tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
|
||||
}
|
||||
#endif
|
||||
CodeEmitInfo* info = state_for(x, x->state());
|
||||
LIR_Opr reg = result_register_for(x->type());
|
||||
LIR_Opr klass_reg = new_register(objectType);
|
||||
@ -1127,7 +1129,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
|
||||
obj.load_item();
|
||||
|
||||
// info for exceptions
|
||||
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks());
|
||||
CodeEmitInfo* info_for_exception = state_for(x);
|
||||
|
||||
CodeStub* stub;
|
||||
if (x->is_incompatible_class_change_check()) {
|
||||
|
@ -174,31 +174,6 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
|
||||
int index;
|
||||
Value value;
|
||||
|
||||
if (state->stack_size() > 0) {
|
||||
print_begin("stack");
|
||||
print("size %d", state->stack_size());
|
||||
|
||||
for_each_stack_value(state, index, value) {
|
||||
ip.print_phi(index, value, block);
|
||||
print_operand(value);
|
||||
output()->cr();
|
||||
}
|
||||
|
||||
print_end("stack");
|
||||
}
|
||||
|
||||
if (state->locks_size() > 0) {
|
||||
print_begin("locks");
|
||||
print("size %d", state->locks_size());
|
||||
|
||||
for_each_lock_value(state, index, value) {
|
||||
ip.print_phi(index, value, block);
|
||||
print_operand(value);
|
||||
output()->cr();
|
||||
}
|
||||
print_end("locks");
|
||||
}
|
||||
|
||||
for_each_state(state) {
|
||||
print_begin("locals");
|
||||
print("size %d", state->locals_size());
|
||||
@ -210,6 +185,33 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
|
||||
output()->cr();
|
||||
}
|
||||
print_end("locals");
|
||||
|
||||
if (state->stack_size() > 0) {
|
||||
print_begin("stack");
|
||||
print("size %d", state->stack_size());
|
||||
print("method \"%s\"", method_name(state->scope()->method()));
|
||||
|
||||
for_each_stack_value(state, index, value) {
|
||||
ip.print_phi(index, value, block);
|
||||
print_operand(value);
|
||||
output()->cr();
|
||||
}
|
||||
|
||||
print_end("stack");
|
||||
}
|
||||
|
||||
if (state->locks_size() > 0) {
|
||||
print_begin("locks");
|
||||
print("size %d", state->locks_size());
|
||||
print("method \"%s\"", method_name(state->scope()->method()));
|
||||
|
||||
for_each_lock_value(state, index, value) {
|
||||
ip.print_phi(index, value, block);
|
||||
print_operand(value);
|
||||
output()->cr();
|
||||
}
|
||||
print_end("locks");
|
||||
}
|
||||
}
|
||||
|
||||
print_end("states");
|
||||
@ -230,7 +232,8 @@ void CFGPrinterOutput::print_HIR(Value instr) {
|
||||
if (instr->is_pinned()) {
|
||||
output()->put('.');
|
||||
}
|
||||
output()->print("%d %d ", instr->bci(), instr->use_count());
|
||||
|
||||
output()->print("%d %d ", instr->printable_bci(), instr->use_count());
|
||||
|
||||
print_operand(instr);
|
||||
|
||||
@ -271,7 +274,7 @@ void CFGPrinterOutput::print_block(BlockBegin* block) {
|
||||
print("name \"B%d\"", block->block_id());
|
||||
|
||||
print("from_bci %d", block->bci());
|
||||
print("to_bci %d", (block->end() == NULL ? -1 : block->end()->bci()));
|
||||
print("to_bci %d", (block->end() == NULL ? -1 : block->end()->printable_bci()));
|
||||
|
||||
output()->indent();
|
||||
output()->print("predecessors ");
|
||||
|
@ -205,7 +205,7 @@ void Canonicalizer::do_StoreField (StoreField* x) {
|
||||
// limit this optimization to current block
|
||||
if (value != NULL && in_current_block(conv)) {
|
||||
set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(),
|
||||
x->lock_stack(), x->state_before(), x->is_loaded(), x->is_initialized()));
|
||||
x->state_before(), x->is_loaded(), x->is_initialized()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -256,7 +256,7 @@ void Canonicalizer::do_StoreIndexed (StoreIndexed* x) {
|
||||
// limit this optimization to current block
|
||||
if (value != NULL && in_current_block(conv)) {
|
||||
set_canonical(new StoreIndexed(x->array(), x->index(), x->length(),
|
||||
x->elt_type(), value, x->lock_stack()));
|
||||
x->elt_type(), value, x->state_before()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -667,7 +667,7 @@ void Canonicalizer::do_If(If* x) {
|
||||
}
|
||||
}
|
||||
set_canonical(canon);
|
||||
set_bci(cmp->bci());
|
||||
set_bci(cmp->state_before()->bci());
|
||||
}
|
||||
}
|
||||
} else if (l->as_InstanceOf() != NULL) {
|
||||
@ -685,7 +685,7 @@ void Canonicalizer::do_If(If* x) {
|
||||
set_canonical(new Goto(is_inst_sux, x->state_before(), x->is_safepoint()));
|
||||
} else {
|
||||
// successors differ => simplify to: IfInstanceOf
|
||||
set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->bci(), is_inst_sux, no_inst_sux));
|
||||
set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->state_before()->bci(), is_inst_sux, no_inst_sux));
|
||||
}
|
||||
}
|
||||
} else if (rt == objectNull && (l->as_NewInstance() || l->as_NewArray())) {
|
||||
|
@ -22,7 +22,6 @@
|
||||
*
|
||||
*/
|
||||
|
||||
class BlockBegin;
|
||||
class CompilationResourceObj;
|
||||
class XHandlers;
|
||||
class ExceptionInfo;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -58,9 +58,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
// BlockEnds.
|
||||
BlockBegin* _continuation;
|
||||
|
||||
// Without return value of inlined method on stack
|
||||
ValueStack* _continuation_state;
|
||||
|
||||
// Was this ScopeData created only for the parsing and inlining of
|
||||
// a jsr?
|
||||
bool _parsing_jsr;
|
||||
@ -125,14 +122,10 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
void set_stream(ciBytecodeStream* stream) { _stream = stream; }
|
||||
|
||||
intx max_inline_size() const { return _max_inline_size; }
|
||||
int caller_stack_size() const;
|
||||
|
||||
BlockBegin* continuation() const { return _continuation; }
|
||||
void set_continuation(BlockBegin* cont) { _continuation = cont; }
|
||||
|
||||
ValueStack* continuation_state() const { return _continuation_state; }
|
||||
void set_continuation_state(ValueStack* s) { _continuation_state = s; }
|
||||
|
||||
// Indicates whether this ScopeData was pushed only for the
|
||||
// parsing and inlining of a jsr
|
||||
bool parsing_jsr() const { return _parsing_jsr; }
|
||||
@ -163,7 +156,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
|
||||
// for all GraphBuilders
|
||||
static bool _can_trap[Bytecodes::number_of_java_codes];
|
||||
static bool _is_async[Bytecodes::number_of_java_codes];
|
||||
|
||||
// for each instance of GraphBuilder
|
||||
ScopeData* _scope_data; // Per-scope data; used for inlining
|
||||
@ -179,7 +171,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
// for each call to connect_to_end; can also be set by inliner
|
||||
BlockBegin* _block; // the current block
|
||||
ValueStack* _state; // the current execution state
|
||||
ValueStack* _exception_state; // state that will be used by handle_exception
|
||||
Instruction* _last; // the last instruction added
|
||||
bool _skip_block; // skip processing of the rest of this block
|
||||
|
||||
@ -194,8 +185,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
ValueStack* state() const { return _state; }
|
||||
void set_state(ValueStack* state) { _state = state; }
|
||||
IRScope* scope() const { return scope_data()->scope(); }
|
||||
ValueStack* exception_state() const { return _exception_state; }
|
||||
void set_exception_state(ValueStack* s) { _exception_state = s; }
|
||||
ciMethod* method() const { return scope()->method(); }
|
||||
ciBytecodeStream* stream() const { return scope_data()->stream(); }
|
||||
Instruction* last() const { return _last; }
|
||||
@ -230,7 +219,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
void load_indexed (BasicType type);
|
||||
void store_indexed(BasicType type);
|
||||
void stack_op(Bytecodes::Code code);
|
||||
void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* lock_stack = NULL);
|
||||
void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before = NULL);
|
||||
void negate_op(ValueType* type);
|
||||
void shift_op(ValueType* type, Bytecodes::Code code);
|
||||
void logic_op(ValueType* type, Bytecodes::Code code);
|
||||
@ -267,12 +256,8 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
Instruction* append_split(StateSplit* instr);
|
||||
|
||||
// other helpers
|
||||
static bool is_async(Bytecodes::Code code) {
|
||||
assert(0 <= code && code < Bytecodes::number_of_java_codes, "illegal bytecode");
|
||||
return _is_async[code];
|
||||
}
|
||||
BlockBegin* block_at(int bci) { return scope_data()->block_at(bci); }
|
||||
XHandlers* handle_exception(int bci);
|
||||
XHandlers* handle_exception(Instruction* instruction);
|
||||
void connect_to_end(BlockBegin* beg);
|
||||
void null_check(Value value);
|
||||
void eliminate_redundant_phis(BlockBegin* start);
|
||||
@ -283,7 +268,28 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
|
||||
void kill_all();
|
||||
|
||||
ValueStack* lock_stack();
|
||||
// use of state copy routines (try to minimize unnecessary state
|
||||
// object allocations):
|
||||
|
||||
// - if the instruction unconditionally needs a full copy of the
|
||||
// state (for patching for example), then use copy_state_before*
|
||||
|
||||
// - if the instruction needs a full copy of the state only for
|
||||
// handler generation (Instruction::needs_exception_state() returns
|
||||
// false) then use copy_state_exhandling*
|
||||
|
||||
// - if the instruction needs either a full copy of the state for
|
||||
// handler generation and a least a minimal copy of the state (as
|
||||
// returned by Instruction::exception_state()) for debug info
|
||||
// generation (that is when Instruction::needs_exception_state()
|
||||
// returns true) then use copy_state_for_exception*
|
||||
|
||||
ValueStack* copy_state_before_with_bci(int bci);
|
||||
ValueStack* copy_state_before();
|
||||
ValueStack* copy_state_exhandling_with_bci(int bci);
|
||||
ValueStack* copy_state_exhandling();
|
||||
ValueStack* copy_state_for_exception_with_bci(int bci);
|
||||
ValueStack* copy_state_for_exception();
|
||||
|
||||
//
|
||||
// Inlining support
|
||||
@ -292,9 +298,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
|
||||
// accessors
|
||||
bool parsing_jsr() const { return scope_data()->parsing_jsr(); }
|
||||
BlockBegin* continuation() const { return scope_data()->continuation(); }
|
||||
ValueStack* continuation_state() const { return scope_data()->continuation_state(); }
|
||||
BlockBegin* jsr_continuation() const { return scope_data()->jsr_continuation(); }
|
||||
int caller_stack_size() const { return scope_data()->caller_stack_size(); }
|
||||
void set_continuation(BlockBegin* continuation) { scope_data()->set_continuation(continuation); }
|
||||
void set_inline_cleanup_info(BlockBegin* block,
|
||||
Instruction* return_prev,
|
||||
|
@ -116,24 +116,6 @@ bool XHandler::equals(XHandler* other) const {
|
||||
|
||||
|
||||
// Implementation of IRScope
|
||||
|
||||
BlockBegin* IRScope::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
|
||||
if (entry == NULL) return NULL;
|
||||
assert(entry->is_set(f), "entry/flag mismatch");
|
||||
// create header block
|
||||
BlockBegin* h = new BlockBegin(entry->bci());
|
||||
BlockEnd* g = new Goto(entry, false);
|
||||
h->set_next(g, entry->bci());
|
||||
h->set_end(g);
|
||||
h->set(f);
|
||||
// setup header block end state
|
||||
ValueStack* s = state->copy(); // can use copy since stack is empty (=> no phis)
|
||||
assert(s->stack_is_empty(), "must have empty stack at entry point");
|
||||
g->set_state(s);
|
||||
return h;
|
||||
}
|
||||
|
||||
|
||||
BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
|
||||
GraphBuilder gm(compilation, this);
|
||||
NOT_PRODUCT(if (PrintValueNumbering && Verbose) gm.print_stats());
|
||||
@ -145,12 +127,9 @@ BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
|
||||
IRScope::IRScope(Compilation* compilation, IRScope* caller, int caller_bci, ciMethod* method, int osr_bci, bool create_graph)
|
||||
: _callees(2)
|
||||
, _compilation(compilation)
|
||||
, _lock_stack_size(-1)
|
||||
, _requires_phi_function(method->max_locals())
|
||||
{
|
||||
_caller = caller;
|
||||
_caller_bci = caller == NULL ? -1 : caller_bci;
|
||||
_caller_state = NULL; // Must be set later if needed
|
||||
_level = caller == NULL ? 0 : caller->level() + 1;
|
||||
_method = method;
|
||||
_xhandlers = new XHandlers(method);
|
||||
@ -182,32 +161,6 @@ int IRScope::max_stack() const {
|
||||
}
|
||||
|
||||
|
||||
void IRScope::compute_lock_stack_size() {
|
||||
if (!InlineMethodsWithExceptionHandlers) {
|
||||
_lock_stack_size = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Figure out whether we have to preserve expression stack elements
|
||||
// for parent scopes, and if so, how many
|
||||
IRScope* cur_scope = this;
|
||||
while (cur_scope != NULL && !cur_scope->xhandlers()->has_handlers()) {
|
||||
cur_scope = cur_scope->caller();
|
||||
}
|
||||
_lock_stack_size = (cur_scope == NULL ? 0 :
|
||||
(cur_scope->caller_state() == NULL ? 0 :
|
||||
cur_scope->caller_state()->stack_size()));
|
||||
}
|
||||
|
||||
int IRScope::top_scope_bci() const {
|
||||
assert(!is_top_scope(), "no correct answer for top scope possible");
|
||||
const IRScope* scope = this;
|
||||
while (!scope->caller()->is_top_scope()) {
|
||||
scope = scope->caller();
|
||||
}
|
||||
return scope->caller_bci();
|
||||
}
|
||||
|
||||
bool IRScopeDebugInfo::should_reexecute() {
|
||||
ciMethod* cur_method = scope()->method();
|
||||
int cur_bci = bci();
|
||||
@ -222,37 +175,24 @@ bool IRScopeDebugInfo::should_reexecute() {
|
||||
// Implementation of CodeEmitInfo
|
||||
|
||||
// Stack must be NON-null
|
||||
CodeEmitInfo::CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers)
|
||||
CodeEmitInfo::CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers)
|
||||
: _scope(stack->scope())
|
||||
, _bci(bci)
|
||||
, _scope_debug_info(NULL)
|
||||
, _oop_map(NULL)
|
||||
, _stack(stack)
|
||||
, _exception_handlers(exception_handlers)
|
||||
, _next(NULL)
|
||||
, _id(-1)
|
||||
, _is_method_handle_invoke(false) {
|
||||
assert(_stack != NULL, "must be non null");
|
||||
assert(_bci == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(_bci)), "make sure bci points at a real bytecode");
|
||||
}
|
||||
|
||||
|
||||
CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only)
|
||||
CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack)
|
||||
: _scope(info->_scope)
|
||||
, _exception_handlers(NULL)
|
||||
, _bci(info->_bci)
|
||||
, _scope_debug_info(NULL)
|
||||
, _oop_map(NULL)
|
||||
, _stack(stack == NULL ? info->_stack : stack)
|
||||
, _is_method_handle_invoke(info->_is_method_handle_invoke) {
|
||||
if (lock_stack_only) {
|
||||
if (info->_stack != NULL) {
|
||||
_stack = info->_stack->copy_locks();
|
||||
} else {
|
||||
_stack = NULL;
|
||||
}
|
||||
} else {
|
||||
_stack = info->_stack;
|
||||
}
|
||||
|
||||
// deep copy of exception handlers
|
||||
if (info->_exception_handlers != NULL) {
|
||||
@ -273,8 +213,6 @@ void CodeEmitInfo::add_register_oop(LIR_Opr opr) {
|
||||
assert(_oop_map != NULL, "oop map must already exist");
|
||||
assert(opr->is_single_cpu(), "should not call otherwise");
|
||||
|
||||
int frame_size = frame_map()->framesize();
|
||||
int arg_count = frame_map()->oop_map_arg_count();
|
||||
VMReg name = frame_map()->regname(opr);
|
||||
_oop_map->set_oop(name);
|
||||
}
|
||||
@ -383,8 +321,7 @@ class UseCountComputer: public ValueVisitor, BlockClosure {
|
||||
void visit(Value* n) {
|
||||
// Local instructions and Phis for expression stack values at the
|
||||
// start of basic blocks are not added to the instruction list
|
||||
if ((*n)->bci() == -99 && (*n)->as_Local() == NULL &&
|
||||
(*n)->as_Phi() == NULL) {
|
||||
if (!(*n)->is_linked()&& (*n)->can_be_linked()) {
|
||||
assert(false, "a node was not appended to the graph");
|
||||
Compilation::current()->bailout("a node was not appended to the graph");
|
||||
}
|
||||
@ -1338,7 +1275,7 @@ void SubstitutionResolver::block_do(BlockBegin* block) {
|
||||
// need to remove this instruction from the instruction stream
|
||||
if (n->subst() != n) {
|
||||
assert(last != NULL, "must have last");
|
||||
last->set_next(n->next(), n->next()->bci());
|
||||
last->set_next(n->next());
|
||||
} else {
|
||||
last = n;
|
||||
}
|
||||
|
@ -132,8 +132,6 @@ class IRScope: public CompilationResourceObj {
|
||||
// hierarchy
|
||||
Compilation* _compilation; // the current compilation
|
||||
IRScope* _caller; // the caller scope, or NULL
|
||||
int _caller_bci; // the caller bci of the corresponding (inlined) invoke, or < 0
|
||||
ValueStack* _caller_state; // the caller state, or NULL
|
||||
int _level; // the inlining level
|
||||
ciMethod* _method; // the corresponding method
|
||||
IRScopeList _callees; // the inlined method scopes
|
||||
@ -144,15 +142,9 @@ class IRScope: public CompilationResourceObj {
|
||||
bool _monitor_pairing_ok; // the monitor pairing info
|
||||
BlockBegin* _start; // the start block, successsors are method entries
|
||||
|
||||
// lock stack management
|
||||
int _lock_stack_size; // number of expression stack elements which, if present,
|
||||
// must be spilled to the stack because of exception
|
||||
// handling inside inlined methods
|
||||
|
||||
BitMap _requires_phi_function; // bit is set if phi functions at loop headers are necessary for a local variable
|
||||
|
||||
// helper functions
|
||||
BlockBegin* header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state);
|
||||
BlockBegin* build_graph(Compilation* compilation, int osr_bci);
|
||||
|
||||
public:
|
||||
@ -162,33 +154,16 @@ class IRScope: public CompilationResourceObj {
|
||||
// accessors
|
||||
Compilation* compilation() const { return _compilation; }
|
||||
IRScope* caller() const { return _caller; }
|
||||
int caller_bci() const { return _caller_bci; }
|
||||
ValueStack* caller_state() const { return _caller_state; }
|
||||
int level() const { return _level; }
|
||||
ciMethod* method() const { return _method; }
|
||||
int max_stack() const; // NOTE: expensive
|
||||
int lock_stack_size() const {
|
||||
assert(_lock_stack_size != -1, "uninitialized");
|
||||
return _lock_stack_size;
|
||||
}
|
||||
BitMap& requires_phi_function() { return _requires_phi_function; }
|
||||
|
||||
// mutators
|
||||
// Needed because caller state is not ready at time of IRScope construction
|
||||
void set_caller_state(ValueStack* state) { _caller_state = state; }
|
||||
// Needed because caller state changes after IRScope construction.
|
||||
// Computes number of expression stack elements whose state must be
|
||||
// preserved in the case of an exception; these may be seen by
|
||||
// caller scopes. Zero when inlining of methods containing exception
|
||||
// handlers is disabled, otherwise a conservative approximation.
|
||||
void compute_lock_stack_size();
|
||||
|
||||
// hierarchy
|
||||
bool is_top_scope() const { return _caller == NULL; }
|
||||
void add_callee(IRScope* callee) { _callees.append(callee); }
|
||||
int number_of_callees() const { return _callees.length(); }
|
||||
IRScope* callee_no(int i) const { return _callees.at(i); }
|
||||
int top_scope_bci() const;
|
||||
|
||||
// accessors, graph
|
||||
bool is_valid() const { return start() != NULL; }
|
||||
@ -266,9 +241,6 @@ class CodeEmitInfo: public CompilationResourceObj {
|
||||
XHandlers* _exception_handlers;
|
||||
OopMap* _oop_map;
|
||||
ValueStack* _stack; // used by deoptimization (contains also monitors
|
||||
int _bci;
|
||||
CodeEmitInfo* _next;
|
||||
int _id;
|
||||
bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site.
|
||||
|
||||
FrameMap* frame_map() const { return scope()->compilation()->frame_map(); }
|
||||
@ -277,23 +249,10 @@ class CodeEmitInfo: public CompilationResourceObj {
|
||||
public:
|
||||
|
||||
// use scope from ValueStack
|
||||
CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers);
|
||||
|
||||
// used by natives
|
||||
CodeEmitInfo(IRScope* scope, int bci)
|
||||
: _scope(scope)
|
||||
, _bci(bci)
|
||||
, _oop_map(NULL)
|
||||
, _scope_debug_info(NULL)
|
||||
, _stack(NULL)
|
||||
, _exception_handlers(NULL)
|
||||
, _next(NULL)
|
||||
, _id(-1)
|
||||
, _is_method_handle_invoke(false) {
|
||||
}
|
||||
CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers);
|
||||
|
||||
// make a copy
|
||||
CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only = false);
|
||||
CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack = NULL);
|
||||
|
||||
// accessors
|
||||
OopMap* oop_map() { return _oop_map; }
|
||||
@ -301,17 +260,10 @@ class CodeEmitInfo: public CompilationResourceObj {
|
||||
IRScope* scope() const { return _scope; }
|
||||
XHandlers* exception_handlers() const { return _exception_handlers; }
|
||||
ValueStack* stack() const { return _stack; }
|
||||
int bci() const { return _bci; }
|
||||
|
||||
void add_register_oop(LIR_Opr opr);
|
||||
void record_debug_info(DebugInformationRecorder* recorder, int pc_offset);
|
||||
|
||||
CodeEmitInfo* next() const { return _next; }
|
||||
void set_next(CodeEmitInfo* next) { _next = next; }
|
||||
|
||||
int id() const { return _id; }
|
||||
void set_id(int id) { _id = id; }
|
||||
|
||||
bool is_method_handle_invoke() const { return _is_method_handle_invoke; }
|
||||
void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; }
|
||||
};
|
||||
|
@ -29,13 +29,6 @@
|
||||
// Implementation of Instruction
|
||||
|
||||
|
||||
#ifdef ASSERT
|
||||
void Instruction::create_hi_word() {
|
||||
assert(type()->is_double_word() && _hi_word == NULL, "only double word has high word");
|
||||
_hi_word = new HiWord(this);
|
||||
}
|
||||
#endif
|
||||
|
||||
Instruction::Condition Instruction::mirror(Condition cond) {
|
||||
switch (cond) {
|
||||
case eql: return eql;
|
||||
@ -63,6 +56,15 @@ Instruction::Condition Instruction::negate(Condition cond) {
|
||||
return eql;
|
||||
}
|
||||
|
||||
void Instruction::update_exception_state(ValueStack* state) {
|
||||
if (state != NULL && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) {
|
||||
assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->jvmti_can_access_local_variables(), "unexpected state kind");
|
||||
_exception_state = state;
|
||||
} else {
|
||||
_exception_state = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Instruction* Instruction::prev(BlockBegin* block) {
|
||||
Instruction* p = NULL;
|
||||
@ -75,7 +77,24 @@ Instruction* Instruction::prev(BlockBegin* block) {
|
||||
}
|
||||
|
||||
|
||||
void Instruction::state_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) {
|
||||
state_before()->values_do(f);
|
||||
}
|
||||
if (exception_state() != NULL){
|
||||
exception_state()->values_do(f);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#ifndef PRODUCT
|
||||
void Instruction::check_state(ValueStack* state) {
|
||||
if (state != NULL) {
|
||||
state->verify();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Instruction::print() {
|
||||
InstructionPrinter ip;
|
||||
print(ip);
|
||||
@ -190,35 +209,6 @@ ciType* CheckCast::exact_type() const {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
void ArithmeticOp::other_values_do(ValueVisitor* f) {
|
||||
if (lock_stack() != NULL) lock_stack()->values_do(f);
|
||||
}
|
||||
|
||||
void NullCheck::other_values_do(ValueVisitor* f) {
|
||||
lock_stack()->values_do(f);
|
||||
}
|
||||
|
||||
void AccessArray::other_values_do(ValueVisitor* f) {
|
||||
if (lock_stack() != NULL) lock_stack()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of AccessField
|
||||
|
||||
void AccessField::other_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) state_before()->values_do(f);
|
||||
if (lock_stack() != NULL) lock_stack()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of StoreIndexed
|
||||
|
||||
IRScope* StoreIndexed::scope() const {
|
||||
return lock_stack()->scope();
|
||||
}
|
||||
|
||||
|
||||
// Implementation of ArithmeticOp
|
||||
|
||||
bool ArithmeticOp::is_commutative() const {
|
||||
@ -266,13 +256,6 @@ bool LogicOp::is_commutative() const {
|
||||
}
|
||||
|
||||
|
||||
// Implementation of CompareOp
|
||||
|
||||
void CompareOp::other_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) state_before()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of IfOp
|
||||
|
||||
bool IfOp::is_commutative() const {
|
||||
@ -301,6 +284,7 @@ IRScope* StateSplit::scope() const {
|
||||
|
||||
|
||||
void StateSplit::state_values_do(ValueVisitor* f) {
|
||||
Instruction::state_values_do(f);
|
||||
if (state() != NULL) state()->values_do(f);
|
||||
}
|
||||
|
||||
@ -316,30 +300,17 @@ void BlockBegin::state_values_do(ValueVisitor* f) {
|
||||
}
|
||||
|
||||
|
||||
void MonitorEnter::state_values_do(ValueVisitor* f) {
|
||||
StateSplit::state_values_do(f);
|
||||
_lock_stack_before->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
void Intrinsic::state_values_do(ValueVisitor* f) {
|
||||
StateSplit::state_values_do(f);
|
||||
if (lock_stack() != NULL) lock_stack()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of Invoke
|
||||
|
||||
|
||||
Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
|
||||
int vtable_index, ciMethod* target, ValueStack* state_before)
|
||||
: StateSplit(result_type)
|
||||
: StateSplit(result_type, state_before)
|
||||
, _code(code)
|
||||
, _recv(recv)
|
||||
, _args(args)
|
||||
, _vtable_index(vtable_index)
|
||||
, _target(target)
|
||||
, _state_before(state_before)
|
||||
{
|
||||
set_flag(TargetIsLoadedFlag, target->is_loaded());
|
||||
set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method());
|
||||
@ -376,7 +347,7 @@ void Invoke::state_values_do(ValueVisitor* f) {
|
||||
|
||||
// Implementation of Contant
|
||||
intx Constant::hash() const {
|
||||
if (_state == NULL) {
|
||||
if (state_before() == NULL) {
|
||||
switch (type()->tag()) {
|
||||
case intTag:
|
||||
return HASH2(name(), type()->as_IntConstant()->value());
|
||||
@ -499,25 +470,6 @@ BlockBegin* Constant::compare(Instruction::Condition cond, Value right,
|
||||
}
|
||||
|
||||
|
||||
void Constant::other_values_do(ValueVisitor* f) {
|
||||
if (state() != NULL) state()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of NewArray
|
||||
|
||||
void NewArray::other_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) state_before()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of TypeCheck
|
||||
|
||||
void TypeCheck::other_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) state_before()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of BlockBegin
|
||||
|
||||
void BlockBegin::set_end(BlockEnd* end) {
|
||||
@ -604,23 +556,14 @@ void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
|
||||
// of the inserted block, without recomputing the values of the other blocks
|
||||
// in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless.
|
||||
BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) {
|
||||
// Try to make the bci close to a block with a single pred or sux,
|
||||
// since this make the block layout algorithm work better.
|
||||
int bci = -1;
|
||||
if (sux->number_of_preds() == 1) {
|
||||
bci = sux->bci();
|
||||
} else {
|
||||
bci = end()->bci();
|
||||
}
|
||||
|
||||
BlockBegin* new_sux = new BlockBegin(bci);
|
||||
BlockBegin* new_sux = new BlockBegin(-99);
|
||||
|
||||
// mark this block (special treatment when block order is computed)
|
||||
new_sux->set(critical_edge_split_flag);
|
||||
|
||||
// This goto is not a safepoint.
|
||||
Goto* e = new Goto(sux, false);
|
||||
new_sux->set_next(e, bci);
|
||||
new_sux->set_next(e, end()->state()->bci());
|
||||
new_sux->set_end(e);
|
||||
// setup states
|
||||
ValueStack* s = end()->state();
|
||||
@ -763,7 +706,7 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
|
||||
}
|
||||
|
||||
// copy state because it is altered
|
||||
new_state = new_state->copy();
|
||||
new_state = new_state->copy(ValueStack::BlockBeginState, bci());
|
||||
|
||||
// Use method liveness to invalidate dead locals
|
||||
MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci());
|
||||
@ -800,19 +743,9 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
|
||||
// initialize state of block
|
||||
set_state(new_state);
|
||||
|
||||
} else if (existing_state->is_same_across_scopes(new_state)) {
|
||||
} else if (existing_state->is_same(new_state)) {
|
||||
TRACE_PHI(tty->print_cr("exisiting state found"));
|
||||
|
||||
// Inlining may cause the local state not to match up, so walk up
|
||||
// the new state until we get to the same scope as the
|
||||
// existing and then start processing from there.
|
||||
while (existing_state->scope() != new_state->scope()) {
|
||||
new_state = new_state->caller_state();
|
||||
assert(new_state != NULL, "could not match up scopes");
|
||||
|
||||
assert(false, "check if this is necessary");
|
||||
}
|
||||
|
||||
assert(existing_state->scope() == new_state->scope(), "not matching");
|
||||
assert(existing_state->locals_size() == new_state->locals_size(), "not matching");
|
||||
assert(existing_state->stack_size() == new_state->stack_size(), "not matching");
|
||||
@ -969,11 +902,6 @@ void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
|
||||
}
|
||||
|
||||
|
||||
void BlockEnd::other_values_do(ValueVisitor* f) {
|
||||
if (state_before() != NULL) state_before()->values_do(f);
|
||||
}
|
||||
|
||||
|
||||
// Implementation of Phi
|
||||
|
||||
// Normal phi functions take their operands from the last instruction of the
|
||||
@ -1006,11 +934,6 @@ int Phi::operand_count() const {
|
||||
}
|
||||
|
||||
|
||||
// Implementation of Throw
|
||||
|
||||
void Throw::state_values_do(ValueVisitor* f) {
|
||||
BlockEnd::state_values_do(f);
|
||||
}
|
||||
|
||||
void ProfileInvoke::state_values_do(ValueVisitor* f) {
|
||||
if (state() != NULL) state()->values_do(f);
|
||||
|
@ -38,7 +38,6 @@ typedef LIR_OprDesc* LIR_Opr;
|
||||
// serve factoring.
|
||||
|
||||
class Instruction;
|
||||
class HiWord;
|
||||
class Phi;
|
||||
class Local;
|
||||
class Constant;
|
||||
@ -149,7 +148,6 @@ class BlockList: public _BlockList {
|
||||
|
||||
class InstructionVisitor: public StackObj {
|
||||
public:
|
||||
void do_HiWord (HiWord* x) { ShouldNotReachHere(); }
|
||||
virtual void do_Phi (Phi* x) = 0;
|
||||
virtual void do_Local (Local* x) = 0;
|
||||
virtual void do_Constant (Constant* x) = 0;
|
||||
@ -272,7 +270,9 @@ class InstructionVisitor: public StackObj {
|
||||
class Instruction: public CompilationResourceObj {
|
||||
private:
|
||||
int _id; // the unique instruction id
|
||||
int _bci; // the instruction bci
|
||||
#ifndef PRODUCT
|
||||
int _printable_bci; // the bci of the instruction for printing
|
||||
#endif
|
||||
int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
|
||||
int _pin_state; // set of PinReason describing the reason for pinning
|
||||
ValueType* _type; // the instruction value type
|
||||
@ -281,17 +281,18 @@ class Instruction: public CompilationResourceObj {
|
||||
LIR_Opr _operand; // LIR specific information
|
||||
unsigned int _flags; // Flag bits
|
||||
|
||||
ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
|
||||
ValueStack* _exception_state; // Copy of state for exception handling
|
||||
XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
|
||||
|
||||
#ifdef ASSERT
|
||||
HiWord* _hi_word;
|
||||
#endif
|
||||
|
||||
friend class UseCountComputer;
|
||||
friend class BlockBegin;
|
||||
|
||||
void update_exception_state(ValueStack* state);
|
||||
|
||||
bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); }
|
||||
|
||||
protected:
|
||||
void set_bci(int bci) { assert(bci == SynchronizationEntryBCI || bci >= 0, "illegal bci"); _bci = bci; }
|
||||
void set_type(ValueType* type) {
|
||||
assert(type != NULL, "type must exist");
|
||||
_type = type;
|
||||
@ -325,6 +326,7 @@ class Instruction: public CompilationResourceObj {
|
||||
NeedsPatchingFlag,
|
||||
ThrowIncompatibleClassChangeErrorFlag,
|
||||
ProfileMDOFlag,
|
||||
IsLinkedInBlockFlag,
|
||||
InstructionLastFlag
|
||||
};
|
||||
|
||||
@ -356,31 +358,31 @@ class Instruction: public CompilationResourceObj {
|
||||
}
|
||||
|
||||
// creation
|
||||
Instruction(ValueType* type, bool type_is_constant = false, bool create_hi = true)
|
||||
: _bci(-99)
|
||||
, _use_count(0)
|
||||
Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false, bool create_hi = true)
|
||||
: _use_count(0)
|
||||
#ifndef PRODUCT
|
||||
, _printable_bci(-99)
|
||||
#endif
|
||||
, _pin_state(0)
|
||||
, _type(type)
|
||||
, _next(NULL)
|
||||
, _subst(NULL)
|
||||
, _flags(0)
|
||||
, _operand(LIR_OprFact::illegalOpr)
|
||||
, _state_before(state_before)
|
||||
, _exception_handlers(NULL)
|
||||
#ifdef ASSERT
|
||||
, _hi_word(NULL)
|
||||
#endif
|
||||
{
|
||||
check_state(state_before);
|
||||
assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
|
||||
#ifdef ASSERT
|
||||
if (create_hi && type->is_double_word()) {
|
||||
create_hi_word();
|
||||
}
|
||||
#endif
|
||||
update_exception_state(_state_before);
|
||||
}
|
||||
|
||||
// accessors
|
||||
int id() const { return _id; }
|
||||
int bci() const { return _bci; }
|
||||
#ifndef PRODUCT
|
||||
int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
|
||||
void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) }
|
||||
#endif
|
||||
int use_count() const { return _use_count; }
|
||||
int pin_state() const { return _pin_state; }
|
||||
bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
|
||||
@ -393,9 +395,13 @@ class Instruction: public CompilationResourceObj {
|
||||
|
||||
void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
|
||||
bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
|
||||
bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
|
||||
bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
|
||||
|
||||
bool has_uses() const { return use_count() > 0; }
|
||||
bool is_root() const { return is_pinned() || use_count() > 1; }
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
ValueStack* exception_state() const { return _exception_state; }
|
||||
virtual bool needs_exception_state() const { return true; }
|
||||
XHandlers* exception_handlers() const { return _exception_handlers; }
|
||||
|
||||
// manipulation
|
||||
@ -403,19 +409,25 @@ class Instruction: public CompilationResourceObj {
|
||||
void pin() { _pin_state |= PinUnknown; }
|
||||
// DANGEROUS: only used by EliminateStores
|
||||
void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
|
||||
virtual void set_lock_stack(ValueStack* l) { /* do nothing*/ }
|
||||
virtual ValueStack* lock_stack() const { return NULL; }
|
||||
|
||||
Instruction* set_next(Instruction* next, int bci) {
|
||||
if (next != NULL) {
|
||||
assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
|
||||
assert(next->as_Phi() == NULL && next->as_Local() == NULL, "shouldn't link these instructions into list");
|
||||
next->set_bci(bci);
|
||||
}
|
||||
Instruction* set_next(Instruction* next) {
|
||||
assert(next->has_printable_bci(), "_printable_bci should have been set");
|
||||
assert(next != NULL, "must not be NULL");
|
||||
assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
|
||||
assert(next->can_be_linked(), "shouldn't link these instructions into list");
|
||||
|
||||
next->set_flag(Instruction::IsLinkedInBlockFlag, true);
|
||||
_next = next;
|
||||
return next;
|
||||
}
|
||||
|
||||
Instruction* set_next(Instruction* next, int bci) {
|
||||
#ifndef PRODUCT
|
||||
next->set_printable_bci(bci);
|
||||
#endif
|
||||
return set_next(next);
|
||||
}
|
||||
|
||||
void set_subst(Instruction* subst) {
|
||||
assert(subst == NULL ||
|
||||
type()->base() == subst->type()->base() ||
|
||||
@ -423,14 +435,7 @@ class Instruction: public CompilationResourceObj {
|
||||
_subst = subst;
|
||||
}
|
||||
void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
|
||||
|
||||
#ifdef ASSERT
|
||||
// HiWord is used for debugging and is allocated early to avoid
|
||||
// allocation at inconvenient points
|
||||
HiWord* hi_word() { return _hi_word; }
|
||||
void create_hi_word();
|
||||
#endif
|
||||
|
||||
void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
|
||||
|
||||
// machine-specifics
|
||||
void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
|
||||
@ -438,7 +443,6 @@ class Instruction: public CompilationResourceObj {
|
||||
|
||||
// generic
|
||||
virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
|
||||
virtual HiWord* as_HiWord() { return NULL; }
|
||||
virtual Phi* as_Phi() { return NULL; }
|
||||
virtual Local* as_Local() { return NULL; }
|
||||
virtual Constant* as_Constant() { return NULL; }
|
||||
@ -493,7 +497,7 @@ class Instruction: public CompilationResourceObj {
|
||||
virtual bool can_trap() const { return false; }
|
||||
|
||||
virtual void input_values_do(ValueVisitor* f) = 0;
|
||||
virtual void state_values_do(ValueVisitor* f) { /* usually no state - override on demand */ }
|
||||
virtual void state_values_do(ValueVisitor* f);
|
||||
virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
|
||||
void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
|
||||
|
||||
@ -505,6 +509,7 @@ class Instruction: public CompilationResourceObj {
|
||||
HASHING1(Instruction, false, id()) // hashing disabled by default
|
||||
|
||||
// debugging
|
||||
static void check_state(ValueStack* state) PRODUCT_RETURN;
|
||||
void print() PRODUCT_RETURN;
|
||||
void print_line() PRODUCT_RETURN;
|
||||
void print(InstructionPrinter& ip) PRODUCT_RETURN;
|
||||
@ -541,40 +546,6 @@ class AssertValues: public ValueVisitor {
|
||||
#endif // ASSERT
|
||||
|
||||
|
||||
// A HiWord occupies the 'high word' of a 2-word
|
||||
// expression stack entry. Hi & lo words must be
|
||||
// paired on the expression stack (otherwise the
|
||||
// bytecode sequence is illegal). Note that 'hi'
|
||||
// refers to the IR expression stack format and
|
||||
// does *not* imply a machine word ordering. No
|
||||
// HiWords are used in optimized mode for speed,
|
||||
// but NULL pointers are used instead.
|
||||
|
||||
LEAF(HiWord, Instruction)
|
||||
private:
|
||||
Value _lo_word;
|
||||
|
||||
public:
|
||||
// creation
|
||||
HiWord(Value lo_word)
|
||||
: Instruction(illegalType, false, false),
|
||||
_lo_word(lo_word) {
|
||||
// hi-words are also allowed for illegal lo-words
|
||||
assert(lo_word->type()->is_double_word() || lo_word->type()->is_illegal(),
|
||||
"HiWord must be used for 2-word values only");
|
||||
}
|
||||
|
||||
// accessors
|
||||
Value lo_word() const { return _lo_word->subst(); }
|
||||
|
||||
// for invalidating of HiWords
|
||||
void make_illegal() { set_type(illegalType); }
|
||||
|
||||
// generic
|
||||
virtual void input_values_do(ValueVisitor* f) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
|
||||
// A Phi is a phi function in the sense of SSA form. It stands for
|
||||
// the value of a local variable at the beginning of a join block.
|
||||
// A Phi consists of n operands, one for every incoming branch.
|
||||
@ -656,31 +627,25 @@ LEAF(Local, Instruction)
|
||||
|
||||
|
||||
LEAF(Constant, Instruction)
|
||||
ValueStack* _state;
|
||||
|
||||
public:
|
||||
// creation
|
||||
Constant(ValueType* type):
|
||||
Instruction(type, true)
|
||||
, _state(NULL) {
|
||||
Instruction(type, NULL, true)
|
||||
{
|
||||
assert(type->is_constant(), "must be a constant");
|
||||
}
|
||||
|
||||
Constant(ValueType* type, ValueStack* state):
|
||||
Instruction(type, true)
|
||||
, _state(state) {
|
||||
assert(state != NULL, "only used for constants which need patching");
|
||||
Constant(ValueType* type, ValueStack* state_before):
|
||||
Instruction(type, state_before, true)
|
||||
{
|
||||
assert(state_before != NULL, "only used for constants which need patching");
|
||||
assert(type->is_constant(), "must be a constant");
|
||||
// since it's patching it needs to be pinned
|
||||
pin();
|
||||
}
|
||||
|
||||
ValueStack* state() const { return _state; }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return state() != NULL; }
|
||||
virtual bool can_trap() const { return state_before() != NULL; }
|
||||
virtual void input_values_do(ValueVisitor* f) { /* no values */ }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
|
||||
virtual intx hash() const;
|
||||
virtual bool is_equal(Value v) const;
|
||||
@ -695,20 +660,16 @@ BASE(AccessField, Instruction)
|
||||
Value _obj;
|
||||
int _offset;
|
||||
ciField* _field;
|
||||
ValueStack* _state_before; // state is set only for unloaded or uninitialized fields
|
||||
ValueStack* _lock_stack; // contains lock and scope information
|
||||
NullCheck* _explicit_null_check; // For explicit null check elimination
|
||||
|
||||
public:
|
||||
// creation
|
||||
AccessField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack,
|
||||
AccessField(Value obj, int offset, ciField* field, bool is_static,
|
||||
ValueStack* state_before, bool is_loaded, bool is_initialized)
|
||||
: Instruction(as_ValueType(field->type()->basic_type()))
|
||||
: Instruction(as_ValueType(field->type()->basic_type()), state_before)
|
||||
, _obj(obj)
|
||||
, _offset(offset)
|
||||
, _field(field)
|
||||
, _lock_stack(lock_stack)
|
||||
, _state_before(state_before)
|
||||
, _explicit_null_check(NULL)
|
||||
{
|
||||
set_needs_null_check(!is_static);
|
||||
@ -734,13 +695,11 @@ BASE(AccessField, Instruction)
|
||||
bool is_static() const { return check_flag(IsStaticFlag); }
|
||||
bool is_loaded() const { return check_flag(IsLoadedFlag); }
|
||||
bool is_initialized() const { return check_flag(IsInitializedFlag); }
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
ValueStack* lock_stack() const { return _lock_stack; }
|
||||
NullCheck* explicit_null_check() const { return _explicit_null_check; }
|
||||
bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
|
||||
|
||||
// manipulation
|
||||
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
|
||||
|
||||
// Under certain circumstances, if a previous NullCheck instruction
|
||||
// proved the target object non-null, we can eliminate the explicit
|
||||
// null check and do an implicit one, simply specifying the debug
|
||||
@ -751,16 +710,15 @@ BASE(AccessField, Instruction)
|
||||
// generic
|
||||
virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
|
||||
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
};
|
||||
|
||||
|
||||
LEAF(LoadField, AccessField)
|
||||
public:
|
||||
// creation
|
||||
LoadField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack,
|
||||
LoadField(Value obj, int offset, ciField* field, bool is_static,
|
||||
ValueStack* state_before, bool is_loaded, bool is_initialized)
|
||||
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized)
|
||||
: AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
|
||||
{}
|
||||
|
||||
ciType* declared_type() const;
|
||||
@ -777,9 +735,9 @@ LEAF(StoreField, AccessField)
|
||||
|
||||
public:
|
||||
// creation
|
||||
StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, ValueStack* lock_stack,
|
||||
StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
|
||||
ValueStack* state_before, bool is_loaded, bool is_initialized)
|
||||
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized)
|
||||
: AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
|
||||
, _value(value)
|
||||
{
|
||||
set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
|
||||
@ -799,29 +757,23 @@ LEAF(StoreField, AccessField)
|
||||
BASE(AccessArray, Instruction)
|
||||
private:
|
||||
Value _array;
|
||||
ValueStack* _lock_stack;
|
||||
|
||||
public:
|
||||
// creation
|
||||
AccessArray(ValueType* type, Value array, ValueStack* lock_stack)
|
||||
: Instruction(type)
|
||||
AccessArray(ValueType* type, Value array, ValueStack* state_before)
|
||||
: Instruction(type, state_before)
|
||||
, _array(array)
|
||||
, _lock_stack(lock_stack) {
|
||||
{
|
||||
set_needs_null_check(true);
|
||||
ASSERT_VALUES
|
||||
pin(); // instruction with side effect (null exception or range check throwing)
|
||||
}
|
||||
|
||||
Value array() const { return _array; }
|
||||
ValueStack* lock_stack() const { return _lock_stack; }
|
||||
|
||||
// setters
|
||||
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return needs_null_check(); }
|
||||
virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
};
|
||||
|
||||
|
||||
@ -831,8 +783,8 @@ LEAF(ArrayLength, AccessArray)
|
||||
|
||||
public:
|
||||
// creation
|
||||
ArrayLength(Value array, ValueStack* lock_stack)
|
||||
: AccessArray(intType, array, lock_stack)
|
||||
ArrayLength(Value array, ValueStack* state_before)
|
||||
: AccessArray(intType, array, state_before)
|
||||
, _explicit_null_check(NULL) {}
|
||||
|
||||
// accessors
|
||||
@ -855,8 +807,8 @@ BASE(AccessIndexed, AccessArray)
|
||||
|
||||
public:
|
||||
// creation
|
||||
AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack)
|
||||
: AccessArray(as_ValueType(elt_type), array, lock_stack)
|
||||
AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
|
||||
: AccessArray(as_ValueType(elt_type), array, state_before)
|
||||
, _index(index)
|
||||
, _length(length)
|
||||
, _elt_type(elt_type)
|
||||
@ -883,8 +835,8 @@ LEAF(LoadIndexed, AccessIndexed)
|
||||
|
||||
public:
|
||||
// creation
|
||||
LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack)
|
||||
: AccessIndexed(array, index, length, elt_type, lock_stack)
|
||||
LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
|
||||
: AccessIndexed(array, index, length, elt_type, state_before)
|
||||
, _explicit_null_check(NULL) {}
|
||||
|
||||
// accessors
|
||||
@ -910,8 +862,8 @@ LEAF(StoreIndexed, AccessIndexed)
|
||||
int _profiled_bci;
|
||||
public:
|
||||
// creation
|
||||
StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* lock_stack)
|
||||
: AccessIndexed(array, index, length, elt_type, lock_stack)
|
||||
StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
|
||||
: AccessIndexed(array, index, length, elt_type, state_before)
|
||||
, _value(value), _profiled_method(NULL), _profiled_bci(0)
|
||||
{
|
||||
set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
|
||||
@ -922,7 +874,6 @@ LEAF(StoreIndexed, AccessIndexed)
|
||||
|
||||
// accessors
|
||||
Value value() const { return _value; }
|
||||
IRScope* scope() const; // the state's scope
|
||||
bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
|
||||
bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
|
||||
// Helpers for methodDataOop profiling
|
||||
@ -963,7 +914,12 @@ BASE(Op2, Instruction)
|
||||
|
||||
public:
|
||||
// creation
|
||||
Op2(ValueType* type, Bytecodes::Code op, Value x, Value y) : Instruction(type), _op(op), _x(x), _y(y) {
|
||||
Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
|
||||
: Instruction(type, state_before)
|
||||
, _op(op)
|
||||
, _x(x)
|
||||
, _y(y)
|
||||
{
|
||||
ASSERT_VALUES
|
||||
}
|
||||
|
||||
@ -985,28 +941,21 @@ BASE(Op2, Instruction)
|
||||
|
||||
|
||||
LEAF(ArithmeticOp, Op2)
|
||||
private:
|
||||
ValueStack* _lock_stack; // used only for division operations
|
||||
public:
|
||||
// creation
|
||||
ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* lock_stack)
|
||||
: Op2(x->type()->meet(y->type()), op, x, y)
|
||||
, _lock_stack(lock_stack) {
|
||||
ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
|
||||
: Op2(x->type()->meet(y->type()), op, x, y, state_before)
|
||||
{
|
||||
set_flag(IsStrictfpFlag, is_strictfp);
|
||||
if (can_trap()) pin();
|
||||
}
|
||||
|
||||
// accessors
|
||||
ValueStack* lock_stack() const { return _lock_stack; }
|
||||
bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
|
||||
|
||||
// setters
|
||||
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
|
||||
|
||||
// generic
|
||||
virtual bool is_commutative() const;
|
||||
virtual bool can_trap() const;
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
HASHING3(Op2, true, op(), x()->subst(), y()->subst())
|
||||
};
|
||||
|
||||
@ -1033,21 +982,14 @@ LEAF(LogicOp, Op2)
|
||||
|
||||
|
||||
LEAF(CompareOp, Op2)
|
||||
private:
|
||||
ValueStack* _state_before; // for deoptimization, when canonicalizing
|
||||
public:
|
||||
// creation
|
||||
CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
|
||||
: Op2(intType, op, x, y)
|
||||
, _state_before(state_before)
|
||||
: Op2(intType, op, x, y, state_before)
|
||||
{}
|
||||
|
||||
// accessors
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
|
||||
// generic
|
||||
HASHING3(Op2, true, op(), x()->subst(), y()->subst())
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
};
|
||||
|
||||
|
||||
@ -1103,11 +1045,13 @@ LEAF(Convert, Instruction)
|
||||
LEAF(NullCheck, Instruction)
|
||||
private:
|
||||
Value _obj;
|
||||
ValueStack* _lock_stack;
|
||||
|
||||
public:
|
||||
// creation
|
||||
NullCheck(Value obj, ValueStack* lock_stack) : Instruction(obj->type()->base()), _obj(obj), _lock_stack(lock_stack) {
|
||||
NullCheck(Value obj, ValueStack* state_before)
|
||||
: Instruction(obj->type()->base(), state_before)
|
||||
, _obj(obj)
|
||||
{
|
||||
ASSERT_VALUES
|
||||
set_can_trap(true);
|
||||
assert(_obj->type()->is_object(), "null check must be applied to objects only");
|
||||
@ -1116,16 +1060,13 @@ LEAF(NullCheck, Instruction)
|
||||
|
||||
// accessors
|
||||
Value obj() const { return _obj; }
|
||||
ValueStack* lock_stack() const { return _lock_stack; }
|
||||
|
||||
// setters
|
||||
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
|
||||
void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
|
||||
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
HASHING1(NullCheck, true, obj()->subst())
|
||||
};
|
||||
|
||||
@ -1139,7 +1080,10 @@ BASE(StateSplit, Instruction)
|
||||
|
||||
public:
|
||||
// creation
|
||||
StateSplit(ValueType* type) : Instruction(type), _state(NULL) {
|
||||
StateSplit(ValueType* type, ValueStack* state_before = NULL)
|
||||
: Instruction(type, state_before)
|
||||
, _state(NULL)
|
||||
{
|
||||
pin(PinStateSplitConstructor);
|
||||
}
|
||||
|
||||
@ -1148,7 +1092,7 @@ BASE(StateSplit, Instruction)
|
||||
IRScope* scope() const; // the state's scope
|
||||
|
||||
// manipulation
|
||||
void set_state(ValueStack* state) { _state = state; }
|
||||
void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
|
||||
|
||||
// generic
|
||||
virtual void input_values_do(ValueVisitor* f) { /* no values */ }
|
||||
@ -1164,7 +1108,6 @@ LEAF(Invoke, StateSplit)
|
||||
BasicTypeList* _signature;
|
||||
int _vtable_index;
|
||||
ciMethod* _target;
|
||||
ValueStack* _state_before; // Required for deoptimization.
|
||||
|
||||
public:
|
||||
// creation
|
||||
@ -1180,7 +1123,6 @@ LEAF(Invoke, StateSplit)
|
||||
int vtable_index() const { return _vtable_index; }
|
||||
BasicTypeList* signature() const { return _signature; }
|
||||
ciMethod* target() const { return _target; }
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
|
||||
// Returns false if target is not loaded
|
||||
bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
|
||||
@ -1191,6 +1133,8 @@ LEAF(Invoke, StateSplit)
|
||||
// JSR 292 support
|
||||
bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
|
||||
|
||||
virtual bool needs_exception_state() const { return false; }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
virtual void input_values_do(ValueVisitor* f) {
|
||||
@ -1208,11 +1152,16 @@ LEAF(NewInstance, StateSplit)
|
||||
|
||||
public:
|
||||
// creation
|
||||
NewInstance(ciInstanceKlass* klass) : StateSplit(instanceType), _klass(klass) {}
|
||||
NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
|
||||
: StateSplit(instanceType, state_before)
|
||||
, _klass(klass)
|
||||
{}
|
||||
|
||||
// accessors
|
||||
ciInstanceKlass* klass() const { return _klass; }
|
||||
|
||||
virtual bool needs_exception_state() const { return false; }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
ciType* exact_type() const;
|
||||
@ -1222,22 +1171,24 @@ LEAF(NewInstance, StateSplit)
|
||||
BASE(NewArray, StateSplit)
|
||||
private:
|
||||
Value _length;
|
||||
ValueStack* _state_before;
|
||||
|
||||
public:
|
||||
// creation
|
||||
NewArray(Value length, ValueStack* state_before) : StateSplit(objectType), _length(length), _state_before(state_before) {
|
||||
NewArray(Value length, ValueStack* state_before)
|
||||
: StateSplit(objectType, state_before)
|
||||
, _length(length)
|
||||
{
|
||||
// Do not ASSERT_VALUES since length is NULL for NewMultiArray
|
||||
}
|
||||
|
||||
// accessors
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
Value length() const { return _length; }
|
||||
|
||||
virtual bool needs_exception_state() const { return false; }
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
};
|
||||
|
||||
|
||||
@ -1247,7 +1198,10 @@ LEAF(NewTypeArray, NewArray)
|
||||
|
||||
public:
|
||||
// creation
|
||||
NewTypeArray(Value length, BasicType elt_type) : NewArray(length, NULL), _elt_type(elt_type) {}
|
||||
NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
|
||||
: NewArray(length, state_before)
|
||||
, _elt_type(elt_type)
|
||||
{}
|
||||
|
||||
// accessors
|
||||
BasicType elt_type() const { return _elt_type; }
|
||||
@ -1303,7 +1257,6 @@ BASE(TypeCheck, StateSplit)
|
||||
private:
|
||||
ciKlass* _klass;
|
||||
Value _obj;
|
||||
ValueStack* _state_before;
|
||||
|
||||
ciMethod* _profiled_method;
|
||||
int _profiled_bci;
|
||||
@ -1311,14 +1264,13 @@ BASE(TypeCheck, StateSplit)
|
||||
public:
|
||||
// creation
|
||||
TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
|
||||
: StateSplit(type), _klass(klass), _obj(obj), _state_before(state_before),
|
||||
: StateSplit(type, state_before), _klass(klass), _obj(obj),
|
||||
_profiled_method(NULL), _profiled_bci(0) {
|
||||
ASSERT_VALUES
|
||||
set_direct_compare(false);
|
||||
}
|
||||
|
||||
// accessors
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
ciKlass* klass() const { return _klass; }
|
||||
Value obj() const { return _obj; }
|
||||
bool is_loaded() const { return klass() != NULL; }
|
||||
@ -1330,7 +1282,6 @@ BASE(TypeCheck, StateSplit)
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
|
||||
// Helpers for methodDataOop profiling
|
||||
void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
|
||||
@ -1364,6 +1315,8 @@ LEAF(InstanceOf, TypeCheck)
|
||||
public:
|
||||
// creation
|
||||
InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
|
||||
|
||||
virtual bool needs_exception_state() const { return false; }
|
||||
};
|
||||
|
||||
|
||||
@ -1374,8 +1327,8 @@ BASE(AccessMonitor, StateSplit)
|
||||
|
||||
public:
|
||||
// creation
|
||||
AccessMonitor(Value obj, int monitor_no)
|
||||
: StateSplit(illegalType)
|
||||
AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
|
||||
: StateSplit(illegalType, state_before)
|
||||
, _obj(obj)
|
||||
, _monitor_no(monitor_no)
|
||||
{
|
||||
@ -1393,22 +1346,14 @@ BASE(AccessMonitor, StateSplit)
|
||||
|
||||
|
||||
LEAF(MonitorEnter, AccessMonitor)
|
||||
private:
|
||||
ValueStack* _lock_stack_before;
|
||||
|
||||
public:
|
||||
// creation
|
||||
MonitorEnter(Value obj, int monitor_no, ValueStack* lock_stack_before)
|
||||
: AccessMonitor(obj, monitor_no)
|
||||
, _lock_stack_before(lock_stack_before)
|
||||
MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
|
||||
: AccessMonitor(obj, monitor_no, state_before)
|
||||
{
|
||||
ASSERT_VALUES
|
||||
}
|
||||
|
||||
// accessors
|
||||
ValueStack* lock_stack_before() const { return _lock_stack_before; }
|
||||
virtual void state_values_do(ValueVisitor* f);
|
||||
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
};
|
||||
@ -1417,7 +1362,11 @@ LEAF(MonitorEnter, AccessMonitor)
|
||||
LEAF(MonitorExit, AccessMonitor)
|
||||
public:
|
||||
// creation
|
||||
MonitorExit(Value obj, int monitor_no) : AccessMonitor(obj, monitor_no) {}
|
||||
MonitorExit(Value obj, int monitor_no)
|
||||
: AccessMonitor(obj, monitor_no, NULL)
|
||||
{
|
||||
ASSERT_VALUES
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -1425,7 +1374,6 @@ LEAF(Intrinsic, StateSplit)
|
||||
private:
|
||||
vmIntrinsics::ID _id;
|
||||
Values* _args;
|
||||
ValueStack* _lock_stack;
|
||||
Value _recv;
|
||||
|
||||
public:
|
||||
@ -1440,13 +1388,12 @@ LEAF(Intrinsic, StateSplit)
|
||||
vmIntrinsics::ID id,
|
||||
Values* args,
|
||||
bool has_receiver,
|
||||
ValueStack* lock_stack,
|
||||
ValueStack* state_before,
|
||||
bool preserves_state,
|
||||
bool cantrap = true)
|
||||
: StateSplit(type)
|
||||
: StateSplit(type, state_before)
|
||||
, _id(id)
|
||||
, _args(args)
|
||||
, _lock_stack(lock_stack)
|
||||
, _recv(NULL)
|
||||
{
|
||||
assert(args != NULL, "args must exist");
|
||||
@ -1468,7 +1415,6 @@ LEAF(Intrinsic, StateSplit)
|
||||
vmIntrinsics::ID id() const { return _id; }
|
||||
int number_of_arguments() const { return _args->length(); }
|
||||
Value argument_at(int i) const { return _args->at(i); }
|
||||
ValueStack* lock_stack() const { return _lock_stack; }
|
||||
|
||||
bool has_receiver() const { return (_recv != NULL); }
|
||||
Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
|
||||
@ -1480,8 +1426,6 @@ LEAF(Intrinsic, StateSplit)
|
||||
StateSplit::input_values_do(f);
|
||||
for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
|
||||
}
|
||||
virtual void state_values_do(ValueVisitor* f);
|
||||
|
||||
};
|
||||
|
||||
|
||||
@ -1490,6 +1434,7 @@ class LIR_List;
|
||||
LEAF(BlockBegin, StateSplit)
|
||||
private:
|
||||
int _block_id; // the unique block id
|
||||
int _bci; // start-bci of block
|
||||
int _depth_first_number; // number of this block in a depth-first ordering
|
||||
int _linear_scan_number; // number of this block in linear-scan ordering
|
||||
int _loop_depth; // the loop nesting level of this block
|
||||
@ -1546,6 +1491,7 @@ LEAF(BlockBegin, StateSplit)
|
||||
// creation
|
||||
BlockBegin(int bci)
|
||||
: StateSplit(illegalType)
|
||||
, _bci(bci)
|
||||
, _depth_first_number(-1)
|
||||
, _linear_scan_number(-1)
|
||||
, _loop_depth(0)
|
||||
@ -1570,11 +1516,14 @@ LEAF(BlockBegin, StateSplit)
|
||||
, _total_preds(0)
|
||||
, _stores_to_locals()
|
||||
{
|
||||
set_bci(bci);
|
||||
#ifndef PRODUCT
|
||||
set_printable_bci(bci);
|
||||
#endif
|
||||
}
|
||||
|
||||
// accessors
|
||||
int block_id() const { return _block_id; }
|
||||
int bci() const { return _bci; }
|
||||
BlockList* successors() { return &_successors; }
|
||||
BlockBegin* dominator() const { return _dominator; }
|
||||
int loop_depth() const { return _loop_depth; }
|
||||
@ -1596,7 +1545,6 @@ LEAF(BlockBegin, StateSplit)
|
||||
BitMap& stores_to_locals() { return _stores_to_locals; }
|
||||
|
||||
// manipulation
|
||||
void set_bci(int bci) { Instruction::set_bci(bci); }
|
||||
void set_dominator(BlockBegin* dom) { _dominator = dom; }
|
||||
void set_loop_depth(int d) { _loop_depth = d; }
|
||||
void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
|
||||
@ -1694,7 +1642,6 @@ BASE(BlockEnd, StateSplit)
|
||||
private:
|
||||
BlockBegin* _begin;
|
||||
BlockList* _sux;
|
||||
ValueStack* _state_before;
|
||||
|
||||
protected:
|
||||
BlockList* sux() const { return _sux; }
|
||||
@ -1710,24 +1657,20 @@ BASE(BlockEnd, StateSplit)
|
||||
public:
|
||||
// creation
|
||||
BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
|
||||
: StateSplit(type)
|
||||
: StateSplit(type, state_before)
|
||||
, _begin(NULL)
|
||||
, _sux(NULL)
|
||||
, _state_before(state_before) {
|
||||
{
|
||||
set_flag(IsSafepointFlag, is_safepoint);
|
||||
}
|
||||
|
||||
// accessors
|
||||
ValueStack* state_before() const { return _state_before; }
|
||||
bool is_safepoint() const { return check_flag(IsSafepointFlag); }
|
||||
BlockBegin* begin() const { return _begin; }
|
||||
|
||||
// manipulation
|
||||
void set_begin(BlockBegin* begin);
|
||||
|
||||
// generic
|
||||
virtual void other_values_do(ValueVisitor* f);
|
||||
|
||||
// successors
|
||||
int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
|
||||
BlockBegin* sux_at(int i) const { return _sux->at(i); }
|
||||
@ -1919,6 +1862,8 @@ BASE(Switch, BlockEnd)
|
||||
Value tag() const { return _tag; }
|
||||
int length() const { return number_of_sux() - 1; }
|
||||
|
||||
virtual bool needs_exception_state() const { return false; }
|
||||
|
||||
// generic
|
||||
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
|
||||
};
|
||||
@ -1996,7 +1941,6 @@ LEAF(Throw, BlockEnd)
|
||||
// generic
|
||||
virtual bool can_trap() const { return true; }
|
||||
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
|
||||
virtual void state_values_do(ValueVisitor* f);
|
||||
};
|
||||
|
||||
|
||||
@ -2091,7 +2035,6 @@ BASE(UnsafeOp, Instruction)
|
||||
|
||||
// generic
|
||||
virtual void input_values_do(ValueVisitor* f) { }
|
||||
virtual void other_values_do(ValueVisitor* f) { }
|
||||
};
|
||||
|
||||
|
||||
|
@ -316,7 +316,7 @@ void InstructionPrinter::print_head() {
|
||||
void InstructionPrinter::print_line(Instruction* instr) {
|
||||
// print instruction data on one line
|
||||
if (instr->is_pinned()) output()->put('.');
|
||||
fill_to(bci_pos ); output()->print("%d", instr->bci());
|
||||
fill_to(bci_pos ); output()->print("%d", instr->printable_bci());
|
||||
fill_to(use_pos ); output()->print("%d", instr->use_count());
|
||||
fill_to(temp_pos ); print_temp(instr);
|
||||
fill_to(instr_pos); print_instr(instr);
|
||||
@ -569,7 +569,7 @@ void InstructionPrinter::do_BlockBegin(BlockBegin* x) {
|
||||
if (printed_flag) output()->print(") ");
|
||||
|
||||
// print block bci range
|
||||
output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->bci()));
|
||||
output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->printable_bci()));
|
||||
|
||||
// print block successors
|
||||
if (end != NULL && end->number_of_sux() > 0) {
|
||||
|
@ -1520,7 +1520,7 @@ static void print_block(BlockBegin* x) {
|
||||
if (x->is_set(BlockBegin::linear_scan_loop_end_flag)) tty->print("le ");
|
||||
|
||||
// print block bci range
|
||||
tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->bci()));
|
||||
tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->printable_bci()));
|
||||
|
||||
// print predecessors and successors
|
||||
if (x->number_of_preds() > 0) {
|
||||
@ -1576,7 +1576,7 @@ void LIR_Op::print_on(outputStream* out) const {
|
||||
}
|
||||
out->print(name()); out->print(" ");
|
||||
print_instr(out);
|
||||
if (info() != NULL) out->print(" [bci:%d]", info()->bci());
|
||||
if (info() != NULL) out->print(" [bci:%d]", info()->stack()->bci());
|
||||
#ifdef ASSERT
|
||||
if (Verbose && _file != NULL) {
|
||||
out->print(" (%s:%d)", _file, _line);
|
||||
@ -1781,7 +1781,7 @@ void LIR_OpBranch::print_instr(outputStream* out) const {
|
||||
out->print("[");
|
||||
stub()->print_name(out);
|
||||
out->print(": 0x%x]", stub());
|
||||
if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->bci());
|
||||
if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->stack()->bci());
|
||||
} else {
|
||||
out->print("[label:0x%x] ", label());
|
||||
}
|
||||
@ -1896,7 +1896,7 @@ void LIR_OpTypeCheck::print_instr(outputStream* out) const {
|
||||
tmp2()->print(out); out->print(" ");
|
||||
tmp3()->print(out); out->print(" ");
|
||||
result_opr()->print(out); out->print(" ");
|
||||
if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->bci());
|
||||
if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->stack()->bci());
|
||||
}
|
||||
|
||||
|
||||
|
@ -35,7 +35,7 @@ void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_cod
|
||||
append_patching_stub(patch);
|
||||
|
||||
#ifdef ASSERT
|
||||
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci());
|
||||
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
|
||||
if (patch->id() == PatchingStub::access_field_id) {
|
||||
switch (code) {
|
||||
case Bytecodes::_putstatic:
|
||||
@ -221,7 +221,7 @@ void LIR_Assembler::emit_block(BlockBegin* block) {
|
||||
#ifndef PRODUCT
|
||||
if (CommentedAssembly) {
|
||||
stringStream st;
|
||||
st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->bci());
|
||||
st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->printable_bci());
|
||||
_masm->block_comment(st.as_string());
|
||||
}
|
||||
#endif
|
||||
@ -312,7 +312,7 @@ void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {
|
||||
static ValueStack* debug_info(Instruction* ins) {
|
||||
StateSplit* ss = ins->as_StateSplit();
|
||||
if (ss != NULL) return ss->state();
|
||||
return ins->lock_stack();
|
||||
return ins->state_before();
|
||||
}
|
||||
|
||||
void LIR_Assembler::process_debug_info(LIR_Op* op) {
|
||||
@ -327,8 +327,7 @@ void LIR_Assembler::process_debug_info(LIR_Op* op) {
|
||||
if (vstack == NULL) return;
|
||||
if (_pending_non_safepoint != NULL) {
|
||||
// Got some old debug info. Get rid of it.
|
||||
if (_pending_non_safepoint->bci() == src->bci() &&
|
||||
debug_info(_pending_non_safepoint) == vstack) {
|
||||
if (debug_info(_pending_non_safepoint) == vstack) {
|
||||
_pending_non_safepoint_offset = pc_offset;
|
||||
return;
|
||||
}
|
||||
@ -358,7 +357,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
|
||||
ValueStack* tc = t->caller_state();
|
||||
if (tc == NULL) return s;
|
||||
t = tc;
|
||||
bci_result = s->scope()->caller_bci();
|
||||
bci_result = tc->bci();
|
||||
s = s->caller_state();
|
||||
}
|
||||
}
|
||||
@ -366,7 +365,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
|
||||
void LIR_Assembler::record_non_safepoint_debug_info() {
|
||||
int pc_offset = _pending_non_safepoint_offset;
|
||||
ValueStack* vstack = debug_info(_pending_non_safepoint);
|
||||
int bci = _pending_non_safepoint->bci();
|
||||
int bci = vstack->bci();
|
||||
|
||||
DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();
|
||||
assert(debug_info->recording_non_safepoints(), "sanity");
|
||||
@ -380,7 +379,7 @@ void LIR_Assembler::record_non_safepoint_debug_info() {
|
||||
if (s == NULL) break;
|
||||
IRScope* scope = s->scope();
|
||||
//Always pass false for reexecute since these ScopeDescs are never used for deopt
|
||||
debug_info->describe_scope(pc_offset, scope->method(), s_bci, false/*reexecute*/);
|
||||
debug_info->describe_scope(pc_offset, scope->method(), s->bci(), false/*reexecute*/);
|
||||
}
|
||||
|
||||
debug_info->end_non_safepoint(pc_offset);
|
||||
|
@ -386,18 +386,26 @@ void LIRGenerator::walk(Value instr) {
|
||||
|
||||
|
||||
CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) {
|
||||
int index;
|
||||
Value value;
|
||||
for_each_stack_value(state, index, value) {
|
||||
assert(value->subst() == value, "missed substition");
|
||||
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
|
||||
walk(value);
|
||||
assert(value->operand()->is_valid(), "must be evaluated now");
|
||||
}
|
||||
}
|
||||
assert(state != NULL, "state must be defined");
|
||||
|
||||
ValueStack* s = state;
|
||||
int bci = x->bci();
|
||||
for_each_state(s) {
|
||||
if (s->kind() == ValueStack::EmptyExceptionState) {
|
||||
assert(s->stack_size() == 0 && s->locals_size() == 0 && (s->locks_size() == 0 || s->locks_size() == 1), "state must be empty");
|
||||
continue;
|
||||
}
|
||||
|
||||
int index;
|
||||
Value value;
|
||||
for_each_stack_value(s, index, value) {
|
||||
assert(value->subst() == value, "missed substitution");
|
||||
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
|
||||
walk(value);
|
||||
assert(value->operand()->is_valid(), "must be evaluated now");
|
||||
}
|
||||
}
|
||||
|
||||
int bci = s->bci();
|
||||
IRScope* scope = s->scope();
|
||||
ciMethod* method = scope->method();
|
||||
|
||||
@ -428,15 +436,14 @@ CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ig
|
||||
}
|
||||
}
|
||||
}
|
||||
bci = scope->caller_bci();
|
||||
}
|
||||
|
||||
return new CodeEmitInfo(x->bci(), state, ignore_xhandler ? NULL : x->exception_handlers());
|
||||
return new CodeEmitInfo(state, ignore_xhandler ? NULL : x->exception_handlers());
|
||||
}
|
||||
|
||||
|
||||
CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
|
||||
return state_for(x, x->lock_stack());
|
||||
return state_for(x, x->exception_state());
|
||||
}
|
||||
|
||||
|
||||
@ -900,18 +907,14 @@ void LIRGenerator::move_to_phi(ValueStack* cur_state) {
|
||||
Value sux_value;
|
||||
int index;
|
||||
|
||||
assert(cur_state->scope() == sux_state->scope(), "not matching");
|
||||
assert(cur_state->locals_size() == sux_state->locals_size(), "not matching");
|
||||
assert(cur_state->stack_size() == sux_state->stack_size(), "not matching");
|
||||
|
||||
for_each_stack_value(sux_state, index, sux_value) {
|
||||
move_to_phi(&resolver, cur_state->stack_at(index), sux_value);
|
||||
}
|
||||
|
||||
// Inlining may cause the local state not to match up, so walk up
|
||||
// the caller state until we get to the same scope as the
|
||||
// successor and then start processing from there.
|
||||
while (cur_state->scope() != sux_state->scope()) {
|
||||
cur_state = cur_state->caller_state();
|
||||
assert(cur_state != NULL, "scopes don't match up");
|
||||
}
|
||||
|
||||
for_each_local_value(sux_state, index, sux_value) {
|
||||
move_to_phi(&resolver, cur_state->local_at(index), sux_value);
|
||||
}
|
||||
@ -1023,10 +1026,10 @@ void LIRGenerator::do_Phi(Phi* x) {
|
||||
|
||||
// Code for a constant is generated lazily unless the constant is frequently used and can't be inlined.
|
||||
void LIRGenerator::do_Constant(Constant* x) {
|
||||
if (x->state() != NULL) {
|
||||
if (x->state_before() != NULL) {
|
||||
// Any constant with a ValueStack requires patching so emit the patch here
|
||||
LIR_Opr reg = rlock_result(x);
|
||||
CodeEmitInfo* info = state_for(x, x->state());
|
||||
CodeEmitInfo* info = state_for(x, x->state_before());
|
||||
__ oop2reg_patch(NULL, reg, info);
|
||||
} else if (x->use_count() > 1 && !can_inline_as_constant(x)) {
|
||||
if (!x->is_pinned()) {
|
||||
@ -1102,7 +1105,7 @@ void LIRGenerator::do_getClass(Intrinsic* x) {
|
||||
// need to perform the null check on the rcvr
|
||||
CodeEmitInfo* info = NULL;
|
||||
if (x->needs_null_check()) {
|
||||
info = state_for(x, x->state()->copy_locks());
|
||||
info = state_for(x);
|
||||
}
|
||||
__ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info);
|
||||
__ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() +
|
||||
@ -1481,7 +1484,7 @@ void LIRGenerator::do_StoreField(StoreField* x) {
|
||||
} else if (x->needs_null_check()) {
|
||||
NullCheck* nc = x->explicit_null_check();
|
||||
if (nc == NULL) {
|
||||
info = state_for(x, x->lock_stack());
|
||||
info = state_for(x);
|
||||
} else {
|
||||
info = state_for(nc);
|
||||
}
|
||||
@ -1509,10 +1512,12 @@ void LIRGenerator::do_StoreField(StoreField* x) {
|
||||
|
||||
set_no_result(x);
|
||||
|
||||
#ifndef PRODUCT
|
||||
if (PrintNotLoaded && needs_patching) {
|
||||
tty->print_cr(" ###class not loaded at store_%s bci %d",
|
||||
x->is_static() ? "static" : "field", x->bci());
|
||||
x->is_static() ? "static" : "field", x->printable_bci());
|
||||
}
|
||||
#endif
|
||||
|
||||
if (x->needs_null_check() &&
|
||||
(needs_patching ||
|
||||
@ -1575,7 +1580,7 @@ void LIRGenerator::do_LoadField(LoadField* x) {
|
||||
} else if (x->needs_null_check()) {
|
||||
NullCheck* nc = x->explicit_null_check();
|
||||
if (nc == NULL) {
|
||||
info = state_for(x, x->lock_stack());
|
||||
info = state_for(x);
|
||||
} else {
|
||||
info = state_for(nc);
|
||||
}
|
||||
@ -1585,10 +1590,12 @@ void LIRGenerator::do_LoadField(LoadField* x) {
|
||||
|
||||
object.load_item();
|
||||
|
||||
#ifndef PRODUCT
|
||||
if (PrintNotLoaded && needs_patching) {
|
||||
tty->print_cr(" ###class not loaded at load_%s bci %d",
|
||||
x->is_static() ? "static" : "field", x->bci());
|
||||
x->is_static() ? "static" : "field", x->printable_bci());
|
||||
}
|
||||
#endif
|
||||
|
||||
if (x->needs_null_check() &&
|
||||
(needs_patching ||
|
||||
@ -1781,7 +1788,7 @@ void LIRGenerator::do_Throw(Throw* x) {
|
||||
if (GenerateCompilerNullChecks &&
|
||||
(x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) {
|
||||
// if the exception object wasn't created using new then it might be null.
|
||||
__ null_check(exception_opr, new CodeEmitInfo(info, true));
|
||||
__ null_check(exception_opr, new CodeEmitInfo(info, x->state()->copy(ValueStack::ExceptionState, x->state()->bci())));
|
||||
}
|
||||
|
||||
if (compilation()->env()->jvmti_can_post_on_exceptions()) {
|
||||
@ -2127,7 +2134,6 @@ void LIRGenerator::do_TableSwitch(TableSwitch* x) {
|
||||
int lo_key = x->lo_key();
|
||||
int hi_key = x->hi_key();
|
||||
int len = x->length();
|
||||
CodeEmitInfo* info = state_for(x, x->state());
|
||||
LIR_Opr value = tag.result();
|
||||
if (UseTableRanges) {
|
||||
do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux());
|
||||
@ -2186,7 +2192,7 @@ void LIRGenerator::do_Goto(Goto* x) {
|
||||
|
||||
// increment backedge counter if needed
|
||||
CodeEmitInfo* info = state_for(x, state);
|
||||
increment_backedge_counter(info, info->bci());
|
||||
increment_backedge_counter(info, info->stack()->bci());
|
||||
CodeEmitInfo* safepoint_info = state_for(x, state);
|
||||
__ safepoint(safepoint_poll_register(), safepoint_info);
|
||||
}
|
||||
@ -2293,7 +2299,7 @@ void LIRGenerator::do_Base(Base* x) {
|
||||
LIR_Opr lock = new_register(T_INT);
|
||||
__ load_stack_address_monitor(0, lock);
|
||||
|
||||
CodeEmitInfo* info = new CodeEmitInfo(SynchronizationEntryBCI, scope()->start()->state(), NULL);
|
||||
CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
|
||||
CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
|
||||
|
||||
// receiver is guaranteed non-NULL so don't need CodeEmitInfo
|
||||
@ -2303,7 +2309,7 @@ void LIRGenerator::do_Base(Base* x) {
|
||||
|
||||
// increment invocation counters if needed
|
||||
if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
|
||||
CodeEmitInfo* info = new CodeEmitInfo(InvocationEntryBci, scope()->start()->state(), NULL);
|
||||
CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state(), NULL);
|
||||
increment_invocation_counter(info);
|
||||
}
|
||||
|
||||
@ -2463,7 +2469,7 @@ void LIRGenerator::do_Invoke(Invoke* x) {
|
||||
break;
|
||||
case Bytecodes::_invokedynamic: {
|
||||
ciBytecodeStream bcs(x->scope()->method());
|
||||
bcs.force_bci(x->bci());
|
||||
bcs.force_bci(x->state()->bci());
|
||||
assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream");
|
||||
ciCPCache* cpcache = bcs.get_cpcache();
|
||||
|
||||
|
@ -2274,8 +2274,8 @@ void assert_equal(IRScopeDebugInfo* d1, IRScopeDebugInfo* d2) {
|
||||
}
|
||||
|
||||
void check_stack_depth(CodeEmitInfo* info, int stack_end) {
|
||||
if (info->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) {
|
||||
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci());
|
||||
if (info->stack()->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) {
|
||||
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
|
||||
switch (code) {
|
||||
case Bytecodes::_ifnull : // fall through
|
||||
case Bytecodes::_ifnonnull : // fall through
|
||||
@ -2379,7 +2379,7 @@ OopMap* LinearScan::compute_oop_map(IntervalWalker* iw, LIR_Op* op, CodeEmitInfo
|
||||
|
||||
// add oops from lock stack
|
||||
assert(info->stack() != NULL, "CodeEmitInfo must always have a stack");
|
||||
int locks_count = info->stack()->locks_size();
|
||||
int locks_count = info->stack()->total_locks_size();
|
||||
for (int i = 0; i < locks_count; i++) {
|
||||
map->set_oop(frame_map()->monitor_object_regname(i));
|
||||
}
|
||||
@ -2762,19 +2762,13 @@ int LinearScan::append_scope_value(int op_id, Value value, GrowableArray<ScopeVa
|
||||
}
|
||||
|
||||
|
||||
IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end) {
|
||||
IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state) {
|
||||
IRScopeDebugInfo* caller_debug_info = NULL;
|
||||
int stack_begin, locks_begin;
|
||||
|
||||
ValueStack* caller_state = cur_scope->caller_state();
|
||||
ValueStack* caller_state = cur_state->caller_state();
|
||||
if (caller_state != NULL) {
|
||||
// process recursively to compute outermost scope first
|
||||
stack_begin = caller_state->stack_size();
|
||||
locks_begin = caller_state->locks_size();
|
||||
caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state, cur_scope->caller_bci(), stack_begin, locks_begin);
|
||||
} else {
|
||||
stack_begin = 0;
|
||||
locks_begin = 0;
|
||||
caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state);
|
||||
}
|
||||
|
||||
// initialize these to null.
|
||||
@ -2785,7 +2779,7 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
|
||||
GrowableArray<MonitorValue*>* monitors = NULL;
|
||||
|
||||
// describe local variable values
|
||||
int nof_locals = cur_scope->method()->max_locals();
|
||||
int nof_locals = cur_state->locals_size();
|
||||
if (nof_locals > 0) {
|
||||
locals = new GrowableArray<ScopeValue*>(nof_locals);
|
||||
|
||||
@ -2800,45 +2794,41 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
|
||||
}
|
||||
assert(locals->length() == cur_scope->method()->max_locals(), "wrong number of locals");
|
||||
assert(locals->length() == cur_state->locals_size(), "wrong number of locals");
|
||||
} else if (cur_scope->method()->max_locals() > 0) {
|
||||
assert(cur_state->kind() == ValueStack::EmptyExceptionState, "should be");
|
||||
nof_locals = cur_scope->method()->max_locals();
|
||||
locals = new GrowableArray<ScopeValue*>(nof_locals);
|
||||
for(int i = 0; i < nof_locals; i++) {
|
||||
locals->append(&_illegal_value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// describe expression stack
|
||||
//
|
||||
// When we inline methods containing exception handlers, the
|
||||
// "lock_stacks" are changed to preserve expression stack values
|
||||
// in caller scopes when exception handlers are present. This
|
||||
// can cause callee stacks to be smaller than caller stacks.
|
||||
if (stack_end > innermost_state->stack_size()) {
|
||||
stack_end = innermost_state->stack_size();
|
||||
}
|
||||
|
||||
|
||||
|
||||
int nof_stack = stack_end - stack_begin;
|
||||
int nof_stack = cur_state->stack_size();
|
||||
if (nof_stack > 0) {
|
||||
expressions = new GrowableArray<ScopeValue*>(nof_stack);
|
||||
|
||||
int pos = stack_begin;
|
||||
while (pos < stack_end) {
|
||||
Value expression = innermost_state->stack_at_inc(pos);
|
||||
int pos = 0;
|
||||
while (pos < nof_stack) {
|
||||
Value expression = cur_state->stack_at_inc(pos);
|
||||
append_scope_value(op_id, expression, expressions);
|
||||
|
||||
assert(expressions->length() + stack_begin == pos, "must match");
|
||||
assert(expressions->length() == pos, "must match");
|
||||
}
|
||||
assert(expressions->length() == cur_state->stack_size(), "wrong number of stack entries");
|
||||
}
|
||||
|
||||
// describe monitors
|
||||
assert(locks_begin <= locks_end, "error in scope iteration");
|
||||
int nof_locks = locks_end - locks_begin;
|
||||
int nof_locks = cur_state->locks_size();
|
||||
if (nof_locks > 0) {
|
||||
int lock_offset = cur_state->caller_state() != NULL ? cur_state->caller_state()->total_locks_size() : 0;
|
||||
monitors = new GrowableArray<MonitorValue*>(nof_locks);
|
||||
for (int i = locks_begin; i < locks_end; i++) {
|
||||
monitors->append(location_for_monitor_index(i));
|
||||
for (int i = 0; i < nof_locks; i++) {
|
||||
monitors->append(location_for_monitor_index(lock_offset + i));
|
||||
}
|
||||
}
|
||||
|
||||
return new IRScopeDebugInfo(cur_scope, cur_bci, locals, expressions, monitors, caller_debug_info);
|
||||
return new IRScopeDebugInfo(cur_scope, cur_state->bci(), locals, expressions, monitors, caller_debug_info);
|
||||
}
|
||||
|
||||
|
||||
@ -2850,17 +2840,14 @@ void LinearScan::compute_debug_info(CodeEmitInfo* info, int op_id) {
|
||||
|
||||
assert(innermost_scope != NULL && innermost_state != NULL, "why is it missing?");
|
||||
|
||||
int stack_end = innermost_state->stack_size();
|
||||
int locks_end = innermost_state->locks_size();
|
||||
|
||||
DEBUG_ONLY(check_stack_depth(info, stack_end));
|
||||
DEBUG_ONLY(check_stack_depth(info, innermost_state->stack_size()));
|
||||
|
||||
if (info->_scope_debug_info == NULL) {
|
||||
// compute debug information
|
||||
info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end);
|
||||
info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state);
|
||||
} else {
|
||||
// debug information already set. Check that it is correct from the current point of view
|
||||
DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end)));
|
||||
DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -346,7 +346,7 @@ class LinearScan : public CompilationResourceObj {
|
||||
int append_scope_value_for_operand(LIR_Opr opr, GrowableArray<ScopeValue*>* scope_values);
|
||||
int append_scope_value(int op_id, Value value, GrowableArray<ScopeValue*>* scope_values);
|
||||
|
||||
IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end);
|
||||
IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state);
|
||||
void compute_debug_info(CodeEmitInfo* info, int op_id);
|
||||
|
||||
void assign_reg_num(LIR_OpList* instructions, IntervalWalker* iw);
|
||||
|
@ -140,25 +140,27 @@ class CE_Eliminator: public BlockClosure {
|
||||
// with an IfOp followed by a Goto
|
||||
// cut if_ away and get node before
|
||||
Instruction* cur_end = if_->prev(block);
|
||||
int bci = if_->bci();
|
||||
|
||||
// append constants of true- and false-block if necessary
|
||||
// clone constants because original block must not be destroyed
|
||||
assert((t_value != f_const && f_value != t_const) || t_const == f_const, "mismatch");
|
||||
if (t_value == t_const) {
|
||||
t_value = new Constant(t_const->type());
|
||||
cur_end = cur_end->set_next(t_value, bci);
|
||||
NOT_PRODUCT(t_value->set_printable_bci(if_->printable_bci()));
|
||||
cur_end = cur_end->set_next(t_value);
|
||||
}
|
||||
if (f_value == f_const) {
|
||||
f_value = new Constant(f_const->type());
|
||||
cur_end = cur_end->set_next(f_value, bci);
|
||||
NOT_PRODUCT(f_value->set_printable_bci(if_->printable_bci()));
|
||||
cur_end = cur_end->set_next(f_value);
|
||||
}
|
||||
|
||||
// it is very unlikely that the condition can be statically decided
|
||||
// (this was checked previously by the Canonicalizer), so always
|
||||
// append IfOp
|
||||
Value result = new IfOp(if_->x(), if_->cond(), if_->y(), t_value, f_value);
|
||||
cur_end = cur_end->set_next(result, bci);
|
||||
NOT_PRODUCT(result->set_printable_bci(if_->printable_bci()));
|
||||
cur_end = cur_end->set_next(result);
|
||||
|
||||
// append Goto to successor
|
||||
ValueStack* state_before = if_->is_safepoint() ? if_->state_before() : NULL;
|
||||
@ -167,16 +169,15 @@ class CE_Eliminator: public BlockClosure {
|
||||
// prepare state for Goto
|
||||
ValueStack* goto_state = if_->state();
|
||||
while (sux_state->scope() != goto_state->scope()) {
|
||||
goto_state = goto_state->pop_scope();
|
||||
goto_state = goto_state->caller_state();
|
||||
assert(goto_state != NULL, "states do not match up");
|
||||
}
|
||||
goto_state = goto_state->copy();
|
||||
goto_state = goto_state->copy(ValueStack::StateAfter, goto_state->bci());
|
||||
goto_state->push(result->type(), result);
|
||||
assert(goto_state->is_same_across_scopes(sux_state), "states must match now");
|
||||
assert(goto_state->is_same(sux_state), "states must match now");
|
||||
goto_->set_state(goto_state);
|
||||
|
||||
// Steal the bci for the goto from the sux
|
||||
cur_end = cur_end->set_next(goto_, sux->bci());
|
||||
cur_end = cur_end->set_next(goto_, goto_state->bci());
|
||||
|
||||
// Adjust control flow graph
|
||||
BlockBegin::disconnect_edge(block, t_block);
|
||||
@ -251,10 +252,8 @@ class BlockMerger: public BlockClosure {
|
||||
// no phi functions must be present at beginning of sux
|
||||
ValueStack* sux_state = sux->state();
|
||||
ValueStack* end_state = end->state();
|
||||
while (end_state->scope() != sux_state->scope()) {
|
||||
// match up inlining level
|
||||
end_state = end_state->pop_scope();
|
||||
}
|
||||
|
||||
assert(end_state->scope() == sux_state->scope(), "scopes must match");
|
||||
assert(end_state->stack_size() == sux_state->stack_size(), "stack not equal");
|
||||
assert(end_state->locals_size() == sux_state->locals_size(), "locals not equal");
|
||||
|
||||
@ -273,7 +272,7 @@ class BlockMerger: public BlockClosure {
|
||||
Instruction* prev = end->prev(block);
|
||||
Instruction* next = sux->next();
|
||||
assert(prev->as_BlockEnd() == NULL, "must not be a BlockEnd");
|
||||
prev->set_next(next, next->bci());
|
||||
prev->set_next(next);
|
||||
sux->disconnect_from_graph();
|
||||
block->set_end(sux->end());
|
||||
// add exception handlers of deleted block, if any
|
||||
@ -337,7 +336,8 @@ class BlockMerger: public BlockClosure {
|
||||
newif->set_state(if_->state()->copy());
|
||||
|
||||
assert(prev->next() == if_, "must be guaranteed by above search");
|
||||
prev->set_next(newif, if_->bci());
|
||||
NOT_PRODUCT(newif->set_printable_bci(if_->printable_bci()));
|
||||
prev->set_next(newif);
|
||||
block->set_end(newif);
|
||||
|
||||
_merge_count++;
|
||||
@ -705,7 +705,7 @@ void NullCheckEliminator::iterate_one(BlockBegin* block) {
|
||||
// visiting instructions which are references in other blocks or
|
||||
// visiting instructions more than once.
|
||||
mark_visitable(instr);
|
||||
if (instr->is_root() || instr->can_trap() || (instr->as_NullCheck() != NULL)) {
|
||||
if (instr->is_pinned() || instr->can_trap() || (instr->as_NullCheck() != NULL)) {
|
||||
mark_visited(instr);
|
||||
instr->input_values_do(this);
|
||||
instr->visit(&_visitor);
|
||||
|
@ -28,55 +28,60 @@
|
||||
|
||||
// Implementation of ValueStack
|
||||
|
||||
ValueStack::ValueStack(IRScope* scope, int locals_size, int max_stack_size)
|
||||
ValueStack::ValueStack(IRScope* scope, ValueStack* caller_state)
|
||||
: _scope(scope)
|
||||
, _locals(locals_size, NULL)
|
||||
, _stack(max_stack_size)
|
||||
, _lock_stack(false)
|
||||
, _locks(1)
|
||||
, _caller_state(caller_state)
|
||||
, _bci(-99)
|
||||
, _kind(Parsing)
|
||||
, _locals(scope->method()->max_locals(), NULL)
|
||||
, _stack(scope->method()->max_stack())
|
||||
, _locks()
|
||||
{
|
||||
assert(scope != NULL, "scope must exist");
|
||||
}
|
||||
|
||||
ValueStack* ValueStack::copy() {
|
||||
ValueStack* s = new ValueStack(scope(), locals_size(), max_stack_size());
|
||||
s->_stack.appendAll(&_stack);
|
||||
s->_locks.appendAll(&_locks);
|
||||
s->replace_locals(this);
|
||||
return s;
|
||||
verify();
|
||||
}
|
||||
|
||||
|
||||
ValueStack* ValueStack::copy_locks() {
|
||||
int sz = scope()->lock_stack_size();
|
||||
if (stack_size() == 0) {
|
||||
sz = 0;
|
||||
ValueStack::ValueStack(ValueStack* copy_from, Kind kind, int bci)
|
||||
: _scope(copy_from->scope())
|
||||
, _caller_state(copy_from->caller_state())
|
||||
, _bci(bci)
|
||||
, _kind(kind)
|
||||
, _locals()
|
||||
, _stack()
|
||||
, _locks(copy_from->locks_size())
|
||||
{
|
||||
assert(kind != EmptyExceptionState || !Compilation::current()->env()->jvmti_can_access_local_variables(), "need locals");
|
||||
if (kind != EmptyExceptionState) {
|
||||
// only allocate space if we need to copy the locals-array
|
||||
_locals = Values(copy_from->locals_size());
|
||||
_locals.appendAll(©_from->_locals);
|
||||
}
|
||||
ValueStack* s = new ValueStack(scope(), locals_size(), sz);
|
||||
s->_lock_stack = true;
|
||||
s->_locks.appendAll(&_locks);
|
||||
s->replace_locals(this);
|
||||
if (sz > 0) {
|
||||
assert(sz <= stack_size(), "lock stack underflow");
|
||||
for (int i = 0; i < sz; i++) {
|
||||
s->_stack.append(_stack[i]);
|
||||
|
||||
if (kind != ExceptionState && kind != EmptyExceptionState) {
|
||||
if (kind == Parsing) {
|
||||
// stack will be modified, so reserve enough space to avoid resizing
|
||||
_stack = Values(scope()->method()->max_stack());
|
||||
} else {
|
||||
// stack will not be modified, so do not waste space
|
||||
_stack = Values(copy_from->stack_size());
|
||||
}
|
||||
_stack.appendAll(©_from->_stack);
|
||||
}
|
||||
return s;
|
||||
|
||||
_locks.appendAll(©_from->_locks);
|
||||
|
||||
verify();
|
||||
}
|
||||
|
||||
|
||||
bool ValueStack::is_same(ValueStack* s) {
|
||||
assert(s != NULL, "state must exist");
|
||||
assert(scope () == s->scope (), "scopes must correspond");
|
||||
assert(locals_size() == s->locals_size(), "locals sizes must correspond");
|
||||
return is_same_across_scopes(s);
|
||||
}
|
||||
if (scope() != s->scope()) return false;
|
||||
if (caller_state() != s->caller_state()) return false;
|
||||
|
||||
if (locals_size() != s->locals_size()) return false;
|
||||
if (stack_size() != s->stack_size()) return false;
|
||||
if (locks_size() != s->locks_size()) return false;
|
||||
|
||||
bool ValueStack::is_same_across_scopes(ValueStack* s) {
|
||||
assert(s != NULL, "state must exist");
|
||||
assert(stack_size () == s->stack_size (), "stack sizes must correspond");
|
||||
assert(locks_size () == s->locks_size (), "locks sizes must correspond");
|
||||
// compare each stack element with the corresponding stack element of s
|
||||
int index;
|
||||
Value value;
|
||||
@ -89,12 +94,6 @@ bool ValueStack::is_same_across_scopes(ValueStack* s) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
ValueStack* ValueStack::caller_state() const {
|
||||
return scope()->caller_state();
|
||||
}
|
||||
|
||||
|
||||
void ValueStack::clear_locals() {
|
||||
for (int i = _locals.length() - 1; i >= 0; i--) {
|
||||
_locals.at_put(i, NULL);
|
||||
@ -102,13 +101,6 @@ void ValueStack::clear_locals() {
|
||||
}
|
||||
|
||||
|
||||
void ValueStack::replace_locals(ValueStack* with) {
|
||||
assert(locals_size() == with->locals_size(), "number of locals must match");
|
||||
for (int i = locals_size() - 1; i >= 0; i--) {
|
||||
_locals.at_put(i, with->_locals.at(i));
|
||||
}
|
||||
}
|
||||
|
||||
void ValueStack::pin_stack_for_linear_scan() {
|
||||
for_each_state_value(this, v,
|
||||
if (v->as_Constant() == NULL && v->as_Local() == NULL) {
|
||||
@ -123,33 +115,25 @@ void ValueStack::apply(Values list, ValueVisitor* f) {
|
||||
for (int i = 0; i < list.length(); i++) {
|
||||
Value* va = list.adr_at(i);
|
||||
Value v0 = *va;
|
||||
if (v0 != NULL) {
|
||||
if (!v0->type()->is_illegal()) {
|
||||
assert(v0->as_HiWord() == NULL, "should never see HiWord during traversal");
|
||||
f->visit(va);
|
||||
if (v0 != NULL && !v0->type()->is_illegal()) {
|
||||
f->visit(va);
|
||||
#ifdef ASSERT
|
||||
Value v1 = *va;
|
||||
if (v0 != v1) {
|
||||
assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match");
|
||||
if (v0->type()->is_double_word()) {
|
||||
list.at_put(i + 1, v0->hi_word());
|
||||
}
|
||||
}
|
||||
Value v1 = *va;
|
||||
assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match");
|
||||
assert(!v1->type()->is_double_word() || list.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
|
||||
#endif
|
||||
if (v0->type()->is_double_word()) i++;
|
||||
}
|
||||
if (v0->type()->is_double_word()) i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ValueStack::values_do(ValueVisitor* f) {
|
||||
apply(_stack, f);
|
||||
apply(_locks, f);
|
||||
|
||||
ValueStack* state = this;
|
||||
for_each_state(state) {
|
||||
apply(state->_locals, f);
|
||||
apply(state->_stack, f);
|
||||
apply(state->_locks, f);
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,52 +148,26 @@ Values* ValueStack::pop_arguments(int argument_size) {
|
||||
}
|
||||
|
||||
|
||||
int ValueStack::lock(IRScope* scope, Value obj) {
|
||||
int ValueStack::total_locks_size() const {
|
||||
int num_locks = 0;
|
||||
const ValueStack* state = this;
|
||||
for_each_state(state) {
|
||||
num_locks += state->locks_size();
|
||||
}
|
||||
return num_locks;
|
||||
}
|
||||
|
||||
int ValueStack::lock(Value obj) {
|
||||
_locks.push(obj);
|
||||
scope->set_min_number_of_locks(locks_size());
|
||||
return locks_size() - 1;
|
||||
int num_locks = total_locks_size();
|
||||
scope()->set_min_number_of_locks(num_locks);
|
||||
return num_locks - 1;
|
||||
}
|
||||
|
||||
|
||||
int ValueStack::unlock() {
|
||||
_locks.pop();
|
||||
return locks_size();
|
||||
}
|
||||
|
||||
|
||||
ValueStack* ValueStack::push_scope(IRScope* scope) {
|
||||
assert(scope->caller() == _scope, "scopes must have caller/callee relationship");
|
||||
ValueStack* res = new ValueStack(scope,
|
||||
scope->method()->max_locals(),
|
||||
max_stack_size() + scope->method()->max_stack());
|
||||
// Preserves stack and monitors.
|
||||
res->_stack.appendAll(&_stack);
|
||||
res->_locks.appendAll(&_locks);
|
||||
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
ValueStack* ValueStack::pop_scope() {
|
||||
assert(_scope->caller() != NULL, "scope must have caller");
|
||||
IRScope* scope = _scope->caller();
|
||||
int max_stack = max_stack_size() - _scope->method()->max_stack();
|
||||
assert(max_stack >= 0, "stack underflow");
|
||||
ValueStack* res = new ValueStack(scope,
|
||||
scope->method()->max_locals(),
|
||||
max_stack);
|
||||
// Preserves stack and monitors. Restores local and store state from caller scope.
|
||||
res->_stack.appendAll(&_stack);
|
||||
res->_locks.appendAll(&_locks);
|
||||
ValueStack* caller = caller_state();
|
||||
if (caller != NULL) {
|
||||
for (int i = 0; i < caller->_locals.length(); i++) {
|
||||
res->_locals.at_put(i, caller->_locals.at(i));
|
||||
}
|
||||
assert(res->_locals.length() == res->scope()->method()->max_locals(), "just checking");
|
||||
}
|
||||
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
|
||||
return res;
|
||||
return total_locks_size();
|
||||
}
|
||||
|
||||
|
||||
@ -220,11 +178,7 @@ void ValueStack::setup_phi_for_stack(BlockBegin* b, int index) {
|
||||
Value phi = new Phi(t, b, -index - 1);
|
||||
_stack[index] = phi;
|
||||
|
||||
#ifdef ASSERT
|
||||
if (t->is_double_word()) {
|
||||
_stack[index + 1] = phi->hi_word();
|
||||
}
|
||||
#endif
|
||||
assert(!t->is_double_word() || _stack.at(index + 1) == NULL, "hi-word of doubleword value must be NULL");
|
||||
}
|
||||
|
||||
void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
|
||||
@ -236,7 +190,9 @@ void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
|
||||
}
|
||||
|
||||
#ifndef PRODUCT
|
||||
|
||||
void ValueStack::print() {
|
||||
scope()->method()->print_name();
|
||||
if (stack_is_empty()) {
|
||||
tty->print_cr("empty stack");
|
||||
} else {
|
||||
@ -244,18 +200,20 @@ void ValueStack::print() {
|
||||
for (int i = 0; i < stack_size();) {
|
||||
Value t = stack_at_inc(i);
|
||||
tty->print("%2d ", i);
|
||||
tty->print("%c%d ", t->type()->tchar(), t->id());
|
||||
ip.print_instr(t);
|
||||
tty->cr();
|
||||
}
|
||||
}
|
||||
if (!no_active_locks()) {
|
||||
InstructionPrinter ip;
|
||||
for (int i = 0; i < locks_size(); i--) {
|
||||
for (int i = 0; i < locks_size(); i++) {
|
||||
Value t = lock_at(i);
|
||||
tty->print("lock %2d ", i);
|
||||
if (t == NULL) {
|
||||
tty->print("this");
|
||||
} else {
|
||||
tty->print("%c%d ", t->type()->tchar(), t->id());
|
||||
ip.print_instr(t);
|
||||
}
|
||||
tty->cr();
|
||||
@ -270,16 +228,55 @@ void ValueStack::print() {
|
||||
tty->print("null");
|
||||
i ++;
|
||||
} else {
|
||||
tty->print("%c%d ", l->type()->tchar(), l->id());
|
||||
ip.print_instr(l);
|
||||
if (l->type()->is_illegal() || l->type()->is_single_word()) i ++; else i += 2;
|
||||
}
|
||||
tty->cr();
|
||||
}
|
||||
}
|
||||
|
||||
if (caller_state() != NULL) {
|
||||
caller_state()->print();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ValueStack::verify() {
|
||||
Unimplemented();
|
||||
assert(scope() != NULL, "scope must exist");
|
||||
if (caller_state() != NULL) {
|
||||
assert(caller_state()->scope() == scope()->caller(), "invalid caller scope");
|
||||
caller_state()->verify();
|
||||
}
|
||||
|
||||
if (kind() == Parsing) {
|
||||
assert(bci() == -99, "bci not defined during parsing");
|
||||
} else {
|
||||
assert(bci() >= -1, "bci out of range");
|
||||
assert(bci() < scope()->method()->code_size(), "bci out of range");
|
||||
assert(bci() == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(bci())), "make sure bci points at a real bytecode");
|
||||
assert(scope()->method()->liveness_at_bci(bci()).is_valid(), "liveness at bci must be valid");
|
||||
}
|
||||
|
||||
int i;
|
||||
for (i = 0; i < stack_size(); i++) {
|
||||
Value v = _stack.at(i);
|
||||
if (v == NULL) {
|
||||
assert(_stack.at(i - 1)->type()->is_double_word(), "only hi-words are NULL on stack");
|
||||
} else if (v->type()->is_double_word()) {
|
||||
assert(_stack.at(i + 1) == NULL, "hi-word must be NULL");
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < locals_size(); i++) {
|
||||
Value v = _locals.at(i);
|
||||
if (v != NULL && v->type()->is_double_word()) {
|
||||
assert(_locals.at(i + 1) == NULL, "hi-word must be NULL");
|
||||
}
|
||||
}
|
||||
|
||||
for_each_state_value(this, v,
|
||||
assert(v != NULL, "just test if state-iteration succeeds");
|
||||
);
|
||||
}
|
||||
#endif // PRODUCT
|
||||
|
@ -23,9 +23,23 @@
|
||||
*/
|
||||
|
||||
class ValueStack: public CompilationResourceObj {
|
||||
public:
|
||||
enum Kind {
|
||||
Parsing, // During abstract interpretation in GraphBuilder
|
||||
CallerState, // Caller state when inlining
|
||||
StateBefore, // Before before execution of instruction
|
||||
StateAfter, // After execution of instruction
|
||||
ExceptionState, // Exception handling of instruction
|
||||
EmptyExceptionState, // Exception handling of instructions not covered by an xhandler
|
||||
BlockBeginState // State of BlockBegin instruction with phi functions of this block
|
||||
};
|
||||
|
||||
private:
|
||||
IRScope* _scope; // the enclosing scope
|
||||
bool _lock_stack; // indicates that this ValueStack is for an exception site
|
||||
ValueStack* _caller_state;
|
||||
int _bci;
|
||||
Kind _kind;
|
||||
|
||||
Values _locals; // the locals
|
||||
Values _stack; // the expression stack
|
||||
Values _locks; // the monitor stack (holding the locked values)
|
||||
@ -36,100 +50,79 @@ class ValueStack: public CompilationResourceObj {
|
||||
}
|
||||
|
||||
Value check(ValueTag tag, Value t, Value h) {
|
||||
assert(h->as_HiWord()->lo_word() == t, "incorrect stack pair");
|
||||
assert(h == NULL, "hi-word of doubleword value must be NULL");
|
||||
return check(tag, t);
|
||||
}
|
||||
|
||||
// helper routine
|
||||
static void apply(Values list, ValueVisitor* f);
|
||||
|
||||
// for simplified copying
|
||||
ValueStack(ValueStack* copy_from, Kind kind, int bci);
|
||||
|
||||
public:
|
||||
// creation
|
||||
ValueStack(IRScope* scope, int locals_size, int max_stack_size);
|
||||
ValueStack(IRScope* scope, ValueStack* caller_state);
|
||||
|
||||
ValueStack* copy() { return new ValueStack(this, _kind, _bci); }
|
||||
ValueStack* copy(Kind new_kind, int new_bci) { return new ValueStack(this, new_kind, new_bci); }
|
||||
ValueStack* copy_for_parsing() { return new ValueStack(this, Parsing, -99); }
|
||||
|
||||
void set_caller_state(ValueStack* s) { assert(kind() == EmptyExceptionState, "only EmptyExceptionStates can be modified"); _caller_state = s; }
|
||||
|
||||
// merging
|
||||
ValueStack* copy(); // returns a copy of this w/ cleared locals
|
||||
ValueStack* copy_locks(); // returns a copy of this w/ cleared locals and stack
|
||||
// Note that when inlining of methods with exception
|
||||
// handlers is enabled, this stack may have a
|
||||
// non-empty expression stack (size defined by
|
||||
// scope()->lock_stack_size())
|
||||
bool is_same(ValueStack* s); // returns true if this & s's types match (w/o checking locals)
|
||||
bool is_same_across_scopes(ValueStack* s); // same as is_same but returns true even if stacks are in different scopes (used for block merging w/inlining)
|
||||
|
||||
// accessors
|
||||
IRScope* scope() const { return _scope; }
|
||||
bool is_lock_stack() const { return _lock_stack; }
|
||||
ValueStack* caller_state() const { return _caller_state; }
|
||||
int bci() const { return _bci; }
|
||||
Kind kind() const { return _kind; }
|
||||
|
||||
int locals_size() const { return _locals.length(); }
|
||||
int stack_size() const { return _stack.length(); }
|
||||
int locks_size() const { return _locks.length(); }
|
||||
int max_stack_size() const { return _stack.capacity(); }
|
||||
bool stack_is_empty() const { return _stack.is_empty(); }
|
||||
bool no_active_locks() const { return _locks.is_empty(); }
|
||||
ValueStack* caller_state() const;
|
||||
int total_locks_size() const;
|
||||
|
||||
// locals access
|
||||
void clear_locals(); // sets all locals to NULL;
|
||||
|
||||
// Kill local i. Also kill local i+1 if i was a long or double.
|
||||
void invalidate_local(int i) {
|
||||
Value x = _locals.at(i);
|
||||
if (x != NULL && x->type()->is_double_word()) {
|
||||
assert(_locals.at(i + 1)->as_HiWord()->lo_word() == x, "locals inconsistent");
|
||||
_locals.at_put(i + 1, NULL);
|
||||
}
|
||||
assert(_locals.at(i)->type()->is_single_word() ||
|
||||
_locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
|
||||
_locals.at_put(i, NULL);
|
||||
}
|
||||
|
||||
|
||||
Value load_local(int i) const {
|
||||
Value local_at(int i) const {
|
||||
Value x = _locals.at(i);
|
||||
if (x != NULL && x->type()->is_illegal()) return NULL;
|
||||
assert(x == NULL || x->as_HiWord() == NULL, "index points to hi word");
|
||||
assert(x == NULL || x->type()->is_illegal() || x->type()->is_single_word() || x == _locals.at(i+1)->as_HiWord()->lo_word(), "locals inconsistent");
|
||||
assert(x == NULL || x->type()->is_single_word() ||
|
||||
_locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
|
||||
return x;
|
||||
}
|
||||
|
||||
Value local_at(int i) const { return _locals.at(i); }
|
||||
|
||||
// Store x into local i.
|
||||
void store_local(int i, Value x) {
|
||||
// Kill the old value
|
||||
invalidate_local(i);
|
||||
_locals.at_put(i, x);
|
||||
|
||||
// Writing a double word can kill other locals
|
||||
if (x != NULL && x->type()->is_double_word()) {
|
||||
// If x + i was the start of a double word local then kill i + 2.
|
||||
Value x2 = _locals.at(i + 1);
|
||||
if (x2 != NULL && x2->type()->is_double_word()) {
|
||||
_locals.at_put(i + 2, NULL);
|
||||
}
|
||||
|
||||
// If x is a double word local, also update i + 1.
|
||||
#ifdef ASSERT
|
||||
_locals.at_put(i + 1, x->hi_word());
|
||||
#else
|
||||
_locals.at_put(i + 1, NULL);
|
||||
#endif
|
||||
}
|
||||
// If x - 1 was the start of a double word local then kill i - 1.
|
||||
// When overwriting local i, check if i - 1 was the start of a
|
||||
// double word local and kill it.
|
||||
if (i > 0) {
|
||||
Value prev = _locals.at(i - 1);
|
||||
if (prev != NULL && prev->type()->is_double_word()) {
|
||||
_locals.at_put(i - 1, NULL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void replace_locals(ValueStack* with);
|
||||
_locals.at_put(i, x);
|
||||
if (x->type()->is_double_word()) {
|
||||
// hi-word of doubleword value is always NULL
|
||||
_locals.at_put(i + 1, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
// stack access
|
||||
Value stack_at(int i) const {
|
||||
Value x = _stack.at(i);
|
||||
assert(x->as_HiWord() == NULL, "index points to hi word");
|
||||
assert(x->type()->is_single_word() ||
|
||||
x->subst() == _stack.at(i+1)->as_HiWord()->lo_word(), "stack inconsistent");
|
||||
_stack.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
|
||||
return x;
|
||||
}
|
||||
|
||||
@ -146,7 +139,6 @@ class ValueStack: public CompilationResourceObj {
|
||||
void values_do(ValueVisitor* f);
|
||||
|
||||
// untyped manipulation (for dup_x1, etc.)
|
||||
void clear_stack() { _stack.clear(); }
|
||||
void truncate_stack(int size) { _stack.trunc_to(size); }
|
||||
void raw_push(Value t) { _stack.push(t); }
|
||||
Value raw_pop() { return _stack.pop(); }
|
||||
@ -156,15 +148,8 @@ class ValueStack: public CompilationResourceObj {
|
||||
void fpush(Value t) { _stack.push(check(floatTag , t)); }
|
||||
void apush(Value t) { _stack.push(check(objectTag , t)); }
|
||||
void rpush(Value t) { _stack.push(check(addressTag, t)); }
|
||||
#ifdef ASSERT
|
||||
// in debug mode, use HiWord for 2-word values
|
||||
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(new HiWord(t)); }
|
||||
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(new HiWord(t)); }
|
||||
#else
|
||||
// in optimized mode, use NULL for 2-word values
|
||||
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(NULL); }
|
||||
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(NULL); }
|
||||
#endif // ASSERT
|
||||
|
||||
void push(ValueType* type, Value t) {
|
||||
switch (type->tag()) {
|
||||
@ -182,15 +167,8 @@ class ValueStack: public CompilationResourceObj {
|
||||
Value fpop() { return check(floatTag , _stack.pop()); }
|
||||
Value apop() { return check(objectTag , _stack.pop()); }
|
||||
Value rpop() { return check(addressTag, _stack.pop()); }
|
||||
#ifdef ASSERT
|
||||
// in debug mode, check for HiWord consistency
|
||||
Value lpop() { Value h = _stack.pop(); return check(longTag , _stack.pop(), h); }
|
||||
Value dpop() { Value h = _stack.pop(); return check(doubleTag, _stack.pop(), h); }
|
||||
#else
|
||||
// in optimized mode, ignore HiWord since it is NULL
|
||||
Value lpop() { _stack.pop(); return check(longTag , _stack.pop()); }
|
||||
Value dpop() { _stack.pop(); return check(doubleTag, _stack.pop()); }
|
||||
#endif // ASSERT
|
||||
|
||||
Value pop(ValueType* type) {
|
||||
switch (type->tag()) {
|
||||
@ -208,16 +186,10 @@ class ValueStack: public CompilationResourceObj {
|
||||
Values* pop_arguments(int argument_size);
|
||||
|
||||
// locks access
|
||||
int lock (IRScope* scope, Value obj);
|
||||
int lock (Value obj);
|
||||
int unlock();
|
||||
Value lock_at(int i) const { return _locks.at(i); }
|
||||
|
||||
// Inlining support
|
||||
ValueStack* push_scope(IRScope* scope); // "Push" new scope, returning new resulting stack
|
||||
// Preserves stack and locks, destroys locals
|
||||
ValueStack* pop_scope(); // "Pop" topmost scope, returning new resulting stack
|
||||
// Preserves stack and locks, destroys locals
|
||||
|
||||
// SSA form IR support
|
||||
void setup_phi_for_stack(BlockBegin* b, int index);
|
||||
void setup_phi_for_local(BlockBegin* b, int index);
|
||||
@ -298,16 +270,18 @@ class ValueStack: public CompilationResourceObj {
|
||||
{ \
|
||||
int cur_index; \
|
||||
ValueStack* cur_state = v_state; \
|
||||
Value v_value; \
|
||||
{ \
|
||||
for_each_stack_value(cur_state, cur_index, v_value) { \
|
||||
v_code; \
|
||||
} \
|
||||
} \
|
||||
Value v_value; \
|
||||
for_each_state(cur_state) { \
|
||||
for_each_local_value(cur_state, cur_index, v_value) { \
|
||||
v_code; \
|
||||
{ \
|
||||
for_each_local_value(cur_state, cur_index, v_value) { \
|
||||
v_code; \
|
||||
} \
|
||||
} \
|
||||
{ \
|
||||
for_each_stack_value(cur_state, cur_index, v_value) { \
|
||||
v_code; \
|
||||
} \
|
||||
} \
|
||||
} \
|
||||
}
|
||||
|
||||
|
@ -216,9 +216,6 @@
|
||||
develop(bool, DeoptC1, true, \
|
||||
"Use deoptimization in C1") \
|
||||
\
|
||||
develop(bool, DeoptOnAsyncException, true, \
|
||||
"Deoptimize upon Thread.stop(); improves precision of IR") \
|
||||
\
|
||||
develop(bool, PrintBailouts, false, \
|
||||
"Print bailout and its reason") \
|
||||
\
|
||||
|
@ -448,3 +448,7 @@ thread.cpp c1_Compiler.hpp
|
||||
top.hpp c1_globals.hpp
|
||||
|
||||
vmStructs.hpp c1_Runtime1.hpp
|
||||
|
||||
c1_Canonicalizer.cpp c1_ValueStack.hpp
|
||||
|
||||
c1_LIR.cpp c1_ValueStack.hpp
|
||||
|
Loading…
x
Reference in New Issue
Block a user