7128352: assert(obj_node == obj) failed
Compare uncasted object nodes. Reviewed-by: never
This commit is contained in:
parent
94927c382b
commit
791afc42da
@ -1386,7 +1386,7 @@ bool AbstractLockNode::find_matching_unlock(const Node* ctrl, LockNode* lock,
|
||||
Node *n = ctrl_proj->in(0);
|
||||
if (n != NULL && n->is_Unlock()) {
|
||||
UnlockNode *unlock = n->as_Unlock();
|
||||
if ((lock->obj_node() == unlock->obj_node()) &&
|
||||
if (lock->obj_node()->eqv_uncast(unlock->obj_node()) &&
|
||||
BoxLockNode::same_slot(lock->box_node(), unlock->box_node()) &&
|
||||
!unlock->is_eliminated()) {
|
||||
lock_ops.append(unlock);
|
||||
@ -1431,7 +1431,7 @@ LockNode *AbstractLockNode::find_matching_lock(UnlockNode* unlock) {
|
||||
}
|
||||
if (ctrl->is_Lock()) {
|
||||
LockNode *lock = ctrl->as_Lock();
|
||||
if ((lock->obj_node() == unlock->obj_node()) &&
|
||||
if (lock->obj_node()->eqv_uncast(unlock->obj_node()) &&
|
||||
BoxLockNode::same_slot(lock->box_node(), unlock->box_node())) {
|
||||
lock_result = lock;
|
||||
}
|
||||
@ -1462,7 +1462,7 @@ bool AbstractLockNode::find_lock_and_unlock_through_if(Node* node, LockNode* loc
|
||||
}
|
||||
if (lock1_node != NULL && lock1_node->is_Lock()) {
|
||||
LockNode *lock1 = lock1_node->as_Lock();
|
||||
if ((lock->obj_node() == lock1->obj_node()) &&
|
||||
if (lock->obj_node()->eqv_uncast(lock1->obj_node()) &&
|
||||
BoxLockNode::same_slot(lock->box_node(), lock1->box_node()) &&
|
||||
!lock1->is_eliminated()) {
|
||||
lock_ops.append(lock1);
|
||||
@ -1650,7 +1650,7 @@ bool LockNode::is_nested_lock_region() {
|
||||
for (int idx = 0; idx < num_mon; idx++) {
|
||||
Node* obj_node = sfn->monitor_obj(jvms, idx);
|
||||
BoxLockNode* box_node = BoxLockNode::box_node(sfn->monitor_box(jvms, idx));
|
||||
if ((obj_node == obj) && (box_node->stack_slot() < stk_slot)) {
|
||||
if ((box_node->stack_slot() < stk_slot) && obj_node->eqv_uncast(obj)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -1597,7 +1597,7 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
|
||||
bool is_loop = (r->is_Loop() && r->req() == 3);
|
||||
// Then, check if there is a data loop when phi references itself directly
|
||||
// or through other data nodes.
|
||||
if (is_loop && !phase->eqv_uncast(uin, in(LoopNode::EntryControl)) ||
|
||||
if (is_loop && !uin->eqv_uncast(in(LoopNode::EntryControl)) ||
|
||||
!is_loop && is_unsafe_data_reference(uin)) {
|
||||
// Break this data loop to avoid creation of a dead loop.
|
||||
if (can_reshape) {
|
||||
|
@ -819,7 +819,7 @@ inline Node* LibraryCallKit::generate_limit_guard(Node* offset,
|
||||
if (stopped())
|
||||
return NULL; // already stopped
|
||||
bool zero_offset = _gvn.type(offset) == TypeInt::ZERO;
|
||||
if (zero_offset && _gvn.eqv_uncast(subseq_length, array_length))
|
||||
if (zero_offset && subseq_length->eqv_uncast(array_length))
|
||||
return NULL; // common case of whole-array copy
|
||||
Node* last = subseq_length;
|
||||
if (!zero_offset) // last += offset
|
||||
@ -4667,7 +4667,7 @@ LibraryCallKit::generate_arraycopy(const TypePtr* adr_type,
|
||||
if (ReduceBulkZeroing
|
||||
&& !ZeroTLAB // pointless if already zeroed
|
||||
&& basic_elem_type != T_CONFLICT // avoid corner case
|
||||
&& !_gvn.eqv_uncast(src, dest)
|
||||
&& !src->eqv_uncast(dest)
|
||||
&& ((alloc = tightly_coupled_allocation(dest, slow_region))
|
||||
!= NULL)
|
||||
&& _gvn.find_int_con(alloc->in(AllocateNode::ALength), 1) > 0
|
||||
@ -4745,7 +4745,7 @@ LibraryCallKit::generate_arraycopy(const TypePtr* adr_type,
|
||||
// copy_length is 0.
|
||||
if (!stopped() && dest_uninitialized) {
|
||||
Node* dest_length = alloc->in(AllocateNode::ALength);
|
||||
if (_gvn.eqv_uncast(copy_length, dest_length)
|
||||
if (copy_length->eqv_uncast(dest_length)
|
||||
|| _gvn.find_int_con(dest_length, 1) <= 0) {
|
||||
// There is no zeroing to do. No need for a secondary raw memory barrier.
|
||||
} else {
|
||||
@ -4791,7 +4791,7 @@ LibraryCallKit::generate_arraycopy(const TypePtr* adr_type,
|
||||
// with its attendant messy index arithmetic, and upgrade
|
||||
// the copy to a more hardware-friendly word size of 64 bits.
|
||||
Node* tail_ctl = NULL;
|
||||
if (!stopped() && !_gvn.eqv_uncast(dest_tail, dest_length)) {
|
||||
if (!stopped() && !dest_tail->eqv_uncast(dest_length)) {
|
||||
Node* cmp_lt = _gvn.transform( new(C,3) CmpINode(dest_tail, dest_length) );
|
||||
Node* bol_lt = _gvn.transform( new(C,2) BoolNode(cmp_lt, BoolTest::lt) );
|
||||
tail_ctl = generate_slow_guard(bol_lt, NULL);
|
||||
|
@ -100,7 +100,7 @@ bool BoxLockNode::is_simple_lock_region(LockNode** unique_lock, Node* obj) {
|
||||
AbstractLockNode* alock = n->as_AbstractLock();
|
||||
// Check lock's box since box could be referenced by Lock's debug info.
|
||||
if (alock->box_node() == this) {
|
||||
if (alock->obj_node() == obj) {
|
||||
if (alock->obj_node()->eqv_uncast(obj)) {
|
||||
if ((unique_lock != NULL) && alock->is_Lock()) {
|
||||
if (lock == NULL) {
|
||||
lock = alock->as_Lock();
|
||||
@ -121,7 +121,7 @@ bool BoxLockNode::is_simple_lock_region(LockNode** unique_lock, Node* obj) {
|
||||
Node* n = this->raw_out(i);
|
||||
if (n->is_FastLock()) {
|
||||
FastLockNode* flock = n->as_FastLock();
|
||||
assert((flock->box_node() == this) && (flock->obj_node() == obj),"");
|
||||
assert((flock->box_node() == this) && flock->obj_node()->eqv_uncast(obj),"");
|
||||
}
|
||||
if (n->is_SafePoint() && n->as_SafePoint()->jvms()) {
|
||||
SafePointNode* sfn = n->as_SafePoint();
|
||||
@ -135,7 +135,7 @@ bool BoxLockNode::is_simple_lock_region(LockNode** unique_lock, Node* obj) {
|
||||
Node* obj_node = sfn->monitor_obj(jvms, idx);
|
||||
Node* box_node = sfn->monitor_box(jvms, idx);
|
||||
if (box_node == this) {
|
||||
assert(obj_node == obj,"");
|
||||
assert(obj_node->eqv_uncast(obj),"");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1818,7 +1818,7 @@ void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
|
||||
AbstractLockNode* alock = u->as_AbstractLock();
|
||||
// Check lock's box since box could be referenced by Lock's debug info.
|
||||
if (alock->box_node() == oldbox) {
|
||||
assert(alock->obj_node() == obj, "");
|
||||
assert(alock->obj_node()->eqv_uncast(obj), "");
|
||||
// Mark eliminated all related locks and unlocks.
|
||||
alock->set_non_esc_obj();
|
||||
}
|
||||
@ -1845,7 +1845,7 @@ void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
|
||||
Node* u = oldbox->raw_out(i);
|
||||
if (u->is_AbstractLock()) {
|
||||
AbstractLockNode* alock = u->as_AbstractLock();
|
||||
if (alock->obj_node() == obj && alock->box_node() == oldbox) {
|
||||
if (alock->box_node() == oldbox && alock->obj_node()->eqv_uncast(obj)) {
|
||||
// Replace Box and mark eliminated all related locks and unlocks.
|
||||
alock->set_non_esc_obj();
|
||||
_igvn.hash_delete(alock);
|
||||
@ -1854,7 +1854,7 @@ void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
|
||||
next_edge = false;
|
||||
}
|
||||
}
|
||||
if (u->is_FastLock() && u->as_FastLock()->obj_node() == obj) {
|
||||
if (u->is_FastLock() && u->as_FastLock()->obj_node()->eqv_uncast(obj)) {
|
||||
FastLockNode* flock = u->as_FastLock();
|
||||
assert(flock->box_node() == oldbox, "sanity");
|
||||
_igvn.hash_delete(flock);
|
||||
@ -1875,7 +1875,7 @@ void PhaseMacroExpand::mark_eliminated_box(Node* oldbox, Node* obj) {
|
||||
for (int idx = 0; idx < num_mon; idx++) {
|
||||
Node* obj_node = sfn->monitor_obj(jvms, idx);
|
||||
Node* box_node = sfn->monitor_box(jvms, idx);
|
||||
if (box_node == oldbox && obj_node == obj) {
|
||||
if (box_node == oldbox && obj_node->eqv_uncast(obj)) {
|
||||
int j = jvms->monitor_box_offset(idx);
|
||||
_igvn.hash_delete(u);
|
||||
u->set_req(j, newbox);
|
||||
@ -1912,7 +1912,7 @@ void PhaseMacroExpand::mark_eliminated_locking_nodes(AbstractLockNode *alock) {
|
||||
alock = u->as_AbstractLock();
|
||||
if (alock->box_node() == box_node) {
|
||||
// Verify that this Box is referenced only by related locks.
|
||||
assert(alock->obj_node() == obj, "");
|
||||
assert(alock->obj_node()->eqv_uncast(obj), "");
|
||||
// Mark all related locks and unlocks.
|
||||
alock->set_nested();
|
||||
}
|
||||
@ -1931,7 +1931,8 @@ void PhaseMacroExpand::mark_eliminated_locking_nodes(AbstractLockNode *alock) {
|
||||
Node* obj = alock->obj_node();
|
||||
for (uint j = 0; j < obj->outcnt(); j++) {
|
||||
Node* o = obj->raw_out(j);
|
||||
if (o->is_AbstractLock() && o->as_AbstractLock()->obj_node() == obj) {
|
||||
if (o->is_AbstractLock() &&
|
||||
o->as_AbstractLock()->obj_node()->eqv_uncast(obj)) {
|
||||
alock = o->as_AbstractLock();
|
||||
Node* box = alock->box_node();
|
||||
// Replace old box node with new eliminated box for all users
|
||||
|
@ -2201,7 +2201,7 @@ Node *StoreNode::Ideal(PhaseGVN *phase, bool can_reshape) {
|
||||
// unsafe if I have intervening uses... Also disallowed for StoreCM
|
||||
// since they must follow each StoreP operation. Redundant StoreCMs
|
||||
// are eliminated just before matching in final_graph_reshape.
|
||||
if (mem->is_Store() && phase->eqv_uncast(mem->in(MemNode::Address), address) &&
|
||||
if (mem->is_Store() && mem->in(MemNode::Address)->eqv_uncast(address) &&
|
||||
mem->Opcode() != Op_StoreCM) {
|
||||
// Looking at a dead closed cycle of memory?
|
||||
assert(mem != mem->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
|
||||
@ -2274,16 +2274,16 @@ Node *StoreNode::Identity( PhaseTransform *phase ) {
|
||||
|
||||
// Load then Store? Then the Store is useless
|
||||
if (val->is_Load() &&
|
||||
phase->eqv_uncast( val->in(MemNode::Address), adr ) &&
|
||||
phase->eqv_uncast( val->in(MemNode::Memory ), mem ) &&
|
||||
val->in(MemNode::Address)->eqv_uncast(adr) &&
|
||||
val->in(MemNode::Memory )->eqv_uncast(mem) &&
|
||||
val->as_Load()->store_Opcode() == Opcode()) {
|
||||
return mem;
|
||||
}
|
||||
|
||||
// Two stores in a row of the same value?
|
||||
if (mem->is_Store() &&
|
||||
phase->eqv_uncast( mem->in(MemNode::Address), adr ) &&
|
||||
phase->eqv_uncast( mem->in(MemNode::ValueIn), val ) &&
|
||||
mem->in(MemNode::Address)->eqv_uncast(adr) &&
|
||||
mem->in(MemNode::ValueIn)->eqv_uncast(val) &&
|
||||
mem->Opcode() == Opcode()) {
|
||||
return mem;
|
||||
}
|
||||
|
@ -833,8 +833,20 @@ Node* Node::uncast() const {
|
||||
|
||||
//---------------------------uncast_helper-------------------------------------
|
||||
Node* Node::uncast_helper(const Node* p) {
|
||||
uint max_depth = 3;
|
||||
for (uint i = 0; i < max_depth; i++) {
|
||||
#ifdef ASSERT
|
||||
uint depth_count = 0;
|
||||
const Node* orig_p = p;
|
||||
#endif
|
||||
|
||||
while (true) {
|
||||
#ifdef ASSERT
|
||||
if (depth_count >= K) {
|
||||
orig_p->dump(4);
|
||||
if (p != orig_p)
|
||||
p->dump(1);
|
||||
}
|
||||
assert(depth_count++ < K, "infinite loop in Node::uncast_helper");
|
||||
#endif
|
||||
if (p == NULL || p->req() != 2) {
|
||||
break;
|
||||
} else if (p->is_ConstraintCast()) {
|
||||
|
@ -429,6 +429,10 @@ protected:
|
||||
|
||||
// Strip away casting. (It is depth-limited.)
|
||||
Node* uncast() const;
|
||||
// Return whether two Nodes are equivalent, after stripping casting.
|
||||
bool eqv_uncast(const Node* n) const {
|
||||
return (this->uncast() == n->uncast());
|
||||
}
|
||||
|
||||
private:
|
||||
static Node* uncast_helper(const Node* n);
|
||||
|
@ -256,11 +256,6 @@ public:
|
||||
// For pessimistic optimizations this is simply pointer equivalence.
|
||||
bool eqv(const Node* n1, const Node* n2) const { return n1 == n2; }
|
||||
|
||||
// Return whether two Nodes are equivalent, after stripping casting.
|
||||
bool eqv_uncast(const Node* n1, const Node* n2) const {
|
||||
return eqv(n1->uncast(), n2->uncast());
|
||||
}
|
||||
|
||||
// For pessimistic passes, the return type must monotonically narrow.
|
||||
// For optimistic passes, the return type must monotonically widen.
|
||||
// It is possible to get into a "death march" in either type of pass,
|
||||
|
@ -91,7 +91,7 @@ const Type *SubNode::Value( PhaseTransform *phase ) const {
|
||||
|
||||
// Not correct for SubFnode and AddFNode (must check for infinity)
|
||||
// Equal? Subtract is zero
|
||||
if (phase->eqv_uncast(in1, in2)) return add_id();
|
||||
if (in1->eqv_uncast(in2)) return add_id();
|
||||
|
||||
// Either input is BOTTOM ==> the result is the local BOTTOM
|
||||
if( t1 == Type::BOTTOM || t2 == Type::BOTTOM )
|
||||
|
@ -30,7 +30,7 @@
|
||||
* @run main/othervm -Xcomp -Xbatch StackOverflow
|
||||
*/
|
||||
|
||||
class StackOverflow {
|
||||
public class StackOverflow {
|
||||
static String stackOverflow_largeFrame_liveOopForGC;
|
||||
|
||||
public static int stackOverflow_largeFrame(int call_count, String liveOopForGC) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user