6747051: Improve code and implicit null check generation for compressed oops
Push DecodeN node below the Null check to the non-null path to use the mach node without 0 test. Reviewed-by: rasbold, never
This commit is contained in:
parent
4b1e242299
commit
b15796424e
@ -249,8 +249,6 @@ void AbstractAssembler::block_comment(const char* comment) {
|
||||
bool MacroAssembler::needs_explicit_null_check(intptr_t offset) {
|
||||
// Exception handler checks the nmethod's implicit null checks table
|
||||
// only when this method returns false.
|
||||
#ifndef SPARC
|
||||
// Sparc does not have based addressing
|
||||
if (UseCompressedOops) {
|
||||
// The first page after heap_base is unmapped and
|
||||
// the 'offset' is equal to [heap_base + offset] for
|
||||
@ -261,7 +259,6 @@ bool MacroAssembler::needs_explicit_null_check(intptr_t offset) {
|
||||
offset = (intptr_t)(pointer_delta((void*)offset, (void*)heap_base, 1));
|
||||
}
|
||||
}
|
||||
#endif // SPARC
|
||||
return offset < 0 || os::vm_page_size() <= offset;
|
||||
}
|
||||
|
||||
|
@ -1769,6 +1769,51 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef _LP64
|
||||
// Push DecodeN down through phi.
|
||||
// The rest of phi graph will transform by split EncodeP node though phis up.
|
||||
if (UseCompressedOops && can_reshape && progress == NULL) {
|
||||
bool may_push = true;
|
||||
bool has_decodeN = false;
|
||||
Node* in_decodeN = NULL;
|
||||
for (uint i=1; i<req(); ++i) {// For all paths in
|
||||
Node *ii = in(i);
|
||||
if (ii->is_DecodeN() && ii->bottom_type() == bottom_type()) {
|
||||
has_decodeN = true;
|
||||
in_decodeN = ii->in(1);
|
||||
} else if (!ii->is_Phi()) {
|
||||
may_push = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (has_decodeN && may_push) {
|
||||
PhaseIterGVN *igvn = phase->is_IterGVN();
|
||||
// Note: in_decodeN is used only to define the type of new phi here.
|
||||
PhiNode *new_phi = PhiNode::make_blank(in(0), in_decodeN);
|
||||
uint orig_cnt = req();
|
||||
for (uint i=1; i<req(); ++i) {// For all paths in
|
||||
Node *ii = in(i);
|
||||
Node* new_ii = NULL;
|
||||
if (ii->is_DecodeN()) {
|
||||
assert(ii->bottom_type() == bottom_type(), "sanity");
|
||||
new_ii = ii->in(1);
|
||||
} else {
|
||||
assert(ii->is_Phi(), "sanity");
|
||||
if (ii->as_Phi() == this) {
|
||||
new_ii = new_phi;
|
||||
} else {
|
||||
new_ii = new (phase->C, 2) EncodePNode(ii, in_decodeN->bottom_type());
|
||||
igvn->register_new_node_with_optimizer(new_ii);
|
||||
}
|
||||
}
|
||||
new_phi->set_req(i, new_ii);
|
||||
}
|
||||
igvn->register_new_node_with_optimizer(new_phi, this);
|
||||
progress = new (phase->C, 2) DecodeNNode(new_phi, bottom_type());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return progress; // Return any progress
|
||||
}
|
||||
|
||||
|
@ -2075,6 +2075,44 @@ static void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &fpu ) {
|
||||
}
|
||||
|
||||
#ifdef _LP64
|
||||
case Op_CastPP:
|
||||
if (n->in(1)->is_DecodeN() && UseImplicitNullCheckForNarrowOop) {
|
||||
Compile* C = Compile::current();
|
||||
Node* in1 = n->in(1);
|
||||
const Type* t = n->bottom_type();
|
||||
Node* new_in1 = in1->clone();
|
||||
new_in1->as_DecodeN()->set_type(t);
|
||||
|
||||
if (!Matcher::clone_shift_expressions) {
|
||||
//
|
||||
// x86, ARM and friends can handle 2 adds in addressing mode
|
||||
// and Matcher can fold a DecodeN node into address by using
|
||||
// a narrow oop directly and do implicit NULL check in address:
|
||||
//
|
||||
// [R12 + narrow_oop_reg<<3 + offset]
|
||||
// NullCheck narrow_oop_reg
|
||||
//
|
||||
// On other platforms (Sparc) we have to keep new DecodeN node and
|
||||
// use it to do implicit NULL check in address:
|
||||
//
|
||||
// decode_not_null narrow_oop_reg, base_reg
|
||||
// [base_reg + offset]
|
||||
// NullCheck base_reg
|
||||
//
|
||||
// Pin the new DecodeN node to non-null path on these patforms (Sparc)
|
||||
// to keep the information to which NULL check the new DecodeN node
|
||||
// corresponds to use it as value in implicit_null_check().
|
||||
//
|
||||
new_in1->set_req(0, n->in(0));
|
||||
}
|
||||
|
||||
n->subsume_by(new_in1);
|
||||
if (in1->outcnt() == 0) {
|
||||
in1->disconnect_inputs(NULL);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Op_CmpP:
|
||||
// Do this transformation here to preserve CmpPNode::sub() and
|
||||
// other TypePtr related Ideal optimizations (for example, ptr nullness).
|
||||
@ -2094,24 +2132,44 @@ static void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &fpu ) {
|
||||
} else if (in2->Opcode() == Op_ConP) {
|
||||
const Type* t = in2->bottom_type();
|
||||
if (t == TypePtr::NULL_PTR && UseImplicitNullCheckForNarrowOop) {
|
||||
if (Matcher::clone_shift_expressions) {
|
||||
// x86, ARM and friends can handle 2 adds in addressing mode.
|
||||
// Decode a narrow oop and do implicit NULL check in address
|
||||
// [R12 + narrow_oop_reg<<3 + offset]
|
||||
new_in2 = ConNode::make(C, TypeNarrowOop::NULL_PTR);
|
||||
} else {
|
||||
// Don't replace CmpP(o ,null) if 'o' is used in AddP
|
||||
// to generate implicit NULL check on Sparc where
|
||||
// narrow oops can't be used in address.
|
||||
uint i = 0;
|
||||
for (; i < in1->outcnt(); i++) {
|
||||
if (in1->raw_out(i)->is_AddP())
|
||||
break;
|
||||
}
|
||||
if (i >= in1->outcnt()) {
|
||||
new_in2 = ConNode::make(C, TypeNarrowOop::NULL_PTR);
|
||||
}
|
||||
}
|
||||
new_in2 = ConNode::make(C, TypeNarrowOop::NULL_PTR);
|
||||
//
|
||||
// This transformation together with CastPP transformation above
|
||||
// will generated code for implicit NULL checks for compressed oops.
|
||||
//
|
||||
// The original code after Optimize()
|
||||
//
|
||||
// LoadN memory, narrow_oop_reg
|
||||
// decode narrow_oop_reg, base_reg
|
||||
// CmpP base_reg, NULL
|
||||
// CastPP base_reg // NotNull
|
||||
// Load [base_reg + offset], val_reg
|
||||
//
|
||||
// after these transformations will be
|
||||
//
|
||||
// LoadN memory, narrow_oop_reg
|
||||
// CmpN narrow_oop_reg, NULL
|
||||
// decode_not_null narrow_oop_reg, base_reg
|
||||
// Load [base_reg + offset], val_reg
|
||||
//
|
||||
// and the uncommon path (== NULL) will use narrow_oop_reg directly
|
||||
// since narrow oops can be used in debug info now (see the code in
|
||||
// final_graph_reshaping_walk()).
|
||||
//
|
||||
// At the end the code will be matched to
|
||||
// on x86:
|
||||
//
|
||||
// Load_narrow_oop memory, narrow_oop_reg
|
||||
// Load [R12 + narrow_oop_reg<<3 + offset], val_reg
|
||||
// NullCheck narrow_oop_reg
|
||||
//
|
||||
// and on sparc:
|
||||
//
|
||||
// Load_narrow_oop memory, narrow_oop_reg
|
||||
// decode_not_null narrow_oop_reg, base_reg
|
||||
// Load [base_reg + offset], val_reg
|
||||
// NullCheck base_reg
|
||||
//
|
||||
} else if (t->isa_oopptr()) {
|
||||
new_in2 = ConNode::make(C, t->make_narrowoop());
|
||||
}
|
||||
@ -2128,6 +2186,49 @@ static void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &fpu ) {
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Op_DecodeN:
|
||||
assert(!n->in(1)->is_EncodeP(), "should be optimized out");
|
||||
break;
|
||||
|
||||
case Op_EncodeP: {
|
||||
Node* in1 = n->in(1);
|
||||
if (in1->is_DecodeN()) {
|
||||
n->subsume_by(in1->in(1));
|
||||
} else if (in1->Opcode() == Op_ConP) {
|
||||
Compile* C = Compile::current();
|
||||
const Type* t = in1->bottom_type();
|
||||
if (t == TypePtr::NULL_PTR) {
|
||||
n->subsume_by(ConNode::make(C, TypeNarrowOop::NULL_PTR));
|
||||
} else if (t->isa_oopptr()) {
|
||||
n->subsume_by(ConNode::make(C, t->make_narrowoop()));
|
||||
}
|
||||
}
|
||||
if (in1->outcnt() == 0) {
|
||||
in1->disconnect_inputs(NULL);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case Op_Phi:
|
||||
if (n->as_Phi()->bottom_type()->isa_narrowoop()) {
|
||||
// The EncodeP optimization may create Phi with the same edges
|
||||
// for all paths. It is not handled well by Register Allocator.
|
||||
Node* unique_in = n->in(1);
|
||||
assert(unique_in != NULL, "");
|
||||
uint cnt = n->req();
|
||||
for (uint i = 2; i < cnt; i++) {
|
||||
Node* m = n->in(i);
|
||||
assert(m != NULL, "");
|
||||
if (unique_in != m)
|
||||
unique_in = NULL;
|
||||
}
|
||||
if (unique_in != NULL) {
|
||||
n->subsume_by(unique_in);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
#endif
|
||||
|
||||
case Op_ModI:
|
||||
|
@ -433,8 +433,8 @@ Node *ConstraintCastNode::Ideal_DU_postCCP( PhaseCCP *ccp ) {
|
||||
// If not converting int->oop, throw away cast after constant propagation
|
||||
Node *CastPPNode::Ideal_DU_postCCP( PhaseCCP *ccp ) {
|
||||
const Type *t = ccp->type(in(1));
|
||||
if (!t->isa_oop_ptr()) {
|
||||
return NULL; // do not transform raw pointers
|
||||
if (!t->isa_oop_ptr() || in(1)->is_DecodeN()) {
|
||||
return NULL; // do not transform raw pointers or narrow oops
|
||||
}
|
||||
return ConstraintCastNode::Ideal_DU_postCCP(ccp);
|
||||
}
|
||||
|
@ -840,7 +840,7 @@ Node *Matcher::xform( Node *n, int max_stack ) {
|
||||
_new2old_map.map(m->_idx, n);
|
||||
#endif
|
||||
if (m->in(0) != NULL) // m might be top
|
||||
collect_null_checks(m);
|
||||
collect_null_checks(m, n);
|
||||
} else { // Else just a regular 'ol guy
|
||||
m = n->clone(); // So just clone into new-space
|
||||
#ifdef ASSERT
|
||||
@ -1478,12 +1478,19 @@ MachNode *Matcher::ReduceInst( State *s, int rule, Node *&mem ) {
|
||||
m = _mem_node;
|
||||
assert(m != NULL && m->is_Mem(), "expecting memory node");
|
||||
}
|
||||
if (m->adr_type() != mach->adr_type()) {
|
||||
const Type* mach_at = mach->adr_type();
|
||||
// DecodeN node consumed by an address may have different type
|
||||
// then its input. Don't compare types for such case.
|
||||
if (m->adr_type() != mach_at && m->in(MemNode::Address)->is_AddP() &&
|
||||
m->in(MemNode::Address)->in(AddPNode::Address)->is_DecodeN()) {
|
||||
mach_at = m->adr_type();
|
||||
}
|
||||
if (m->adr_type() != mach_at) {
|
||||
m->dump();
|
||||
tty->print_cr("mach:");
|
||||
mach->dump(1);
|
||||
}
|
||||
assert(m->adr_type() == mach->adr_type(), "matcher should not change adr type");
|
||||
assert(m->adr_type() == mach_at, "matcher should not change adr type");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@ -1995,7 +2002,7 @@ void Matcher::dump_old2new_map() {
|
||||
// it. Used by later implicit-null-check handling. Actually collects
|
||||
// either an IfTrue or IfFalse for the common NOT-null path, AND the ideal
|
||||
// value being tested.
|
||||
void Matcher::collect_null_checks( Node *proj ) {
|
||||
void Matcher::collect_null_checks( Node *proj, Node *orig_proj ) {
|
||||
Node *iff = proj->in(0);
|
||||
if( iff->Opcode() == Op_If ) {
|
||||
// During matching If's have Bool & Cmp side-by-side
|
||||
@ -2008,20 +2015,47 @@ void Matcher::collect_null_checks( Node *proj ) {
|
||||
if (ct == TypePtr::NULL_PTR ||
|
||||
(opc == Op_CmpN && ct == TypeNarrowOop::NULL_PTR)) {
|
||||
|
||||
bool push_it = false;
|
||||
if( proj->Opcode() == Op_IfTrue ) {
|
||||
extern int all_null_checks_found;
|
||||
all_null_checks_found++;
|
||||
if( b->_test._test == BoolTest::ne ) {
|
||||
_null_check_tests.push(proj);
|
||||
_null_check_tests.push(cmp->in(1));
|
||||
push_it = true;
|
||||
}
|
||||
} else {
|
||||
assert( proj->Opcode() == Op_IfFalse, "" );
|
||||
if( b->_test._test == BoolTest::eq ) {
|
||||
_null_check_tests.push(proj);
|
||||
_null_check_tests.push(cmp->in(1));
|
||||
push_it = true;
|
||||
}
|
||||
}
|
||||
if( push_it ) {
|
||||
_null_check_tests.push(proj);
|
||||
Node* val = cmp->in(1);
|
||||
#ifdef _LP64
|
||||
if (UseCompressedOops && !Matcher::clone_shift_expressions &&
|
||||
val->bottom_type()->isa_narrowoop()) {
|
||||
//
|
||||
// Look for DecodeN node which should be pinned to orig_proj.
|
||||
// On platforms (Sparc) which can not handle 2 adds
|
||||
// in addressing mode we have to keep a DecodeN node and
|
||||
// use it to do implicit NULL check in address.
|
||||
//
|
||||
// DecodeN node was pinned to non-null path (orig_proj) during
|
||||
// CastPP transformation in final_graph_reshaping_impl().
|
||||
//
|
||||
uint cnt = orig_proj->outcnt();
|
||||
for (uint i = 0; i < orig_proj->outcnt(); i++) {
|
||||
Node* d = orig_proj->raw_out(i);
|
||||
if (d->is_DecodeN() && d->in(1) == val) {
|
||||
val = d;
|
||||
val->set_req(0, NULL); // Unpin now.
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
_null_check_tests.push(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ public:
|
||||
// List of IfFalse or IfTrue Nodes that indicate a taken null test.
|
||||
// List is valid in the post-matching space.
|
||||
Node_List _null_check_tests;
|
||||
void collect_null_checks( Node *proj );
|
||||
void collect_null_checks( Node *proj, Node *orig_proj );
|
||||
void validate_null_checks( );
|
||||
|
||||
Matcher( Node_List &proj_list );
|
||||
|
Loading…
Reference in New Issue
Block a user