8144993: Elide redundant memory barrier after AllocationNode

Elide memory barrier for AllocationNode when it doesn't escape in initializer and has an MemBarRelease node at exit of initializer method.

Reviewed-by: aph, mdoerr, goetz, kvn, asiebenborn
This commit is contained in:
Hui Shi 2015-12-27 05:15:14 -08:00
parent c4a81b327d
commit 3767315e75
4 changed files with 48 additions and 4 deletions

View File

@ -1333,6 +1333,7 @@ AllocateNode::AllocateNode(Compile* C, const TypeFunc *atype,
init_flags(Flag_is_macro); init_flags(Flag_is_macro);
_is_scalar_replaceable = false; _is_scalar_replaceable = false;
_is_non_escaping = false; _is_non_escaping = false;
_is_allocation_MemBar_redundant = false;
Node *topnode = C->top(); Node *topnode = C->top();
init_req( TypeFunc::Control , ctrl ); init_req( TypeFunc::Control , ctrl );
@ -1347,6 +1348,23 @@ AllocateNode::AllocateNode(Compile* C, const TypeFunc *atype,
C->add_macro_node(this); C->add_macro_node(this);
} }
void AllocateNode::compute_MemBar_redundancy(ciMethod* initializer)
{
assert(initializer != NULL &&
initializer->is_initializer() &&
!initializer->is_static(),
"unexpected initializer method");
BCEscapeAnalyzer* analyzer = initializer->get_bcea();
if (analyzer == NULL) {
return;
}
// Allocation node is first parameter in its initializer
if (analyzer->is_arg_stack(0) || analyzer->is_arg_local(0)) {
_is_allocation_MemBar_redundant = true;
}
}
//============================================================================= //=============================================================================
Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) { Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) {
if (remove_dead_region(phase, can_reshape)) return this; if (remove_dead_region(phase, can_reshape)) return this;

View File

@ -858,6 +858,8 @@ public:
// Result of Escape Analysis // Result of Escape Analysis
bool _is_scalar_replaceable; bool _is_scalar_replaceable;
bool _is_non_escaping; bool _is_non_escaping;
// True when MemBar for new is redundant with MemBar at initialzer exit
bool _is_allocation_MemBar_redundant;
virtual uint size_of() const; // Size is bigger virtual uint size_of() const; // Size is bigger
AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
@ -923,6 +925,13 @@ public:
InitializeNode* init = NULL; InitializeNode* init = NULL;
return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape()); return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape());
} }
// If object doesn't escape in <.init> method and there is memory barrier
// inserted at exit of its <.init>, memory barrier for new is not necessary.
// Inovke this method when MemBar at exit of initializer and post-dominate
// allocation node.
void compute_MemBar_redundancy(ciMethod* initializer);
bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
}; };
//------------------------------AllocateArray--------------------------------- //------------------------------AllocateArray---------------------------------

View File

@ -1522,11 +1522,20 @@ void PhaseMacroExpand::expand_allocate_common(
// If initialization is performed by an array copy, any required // If initialization is performed by an array copy, any required
// MemBarStoreStore was already added. If the object does not // MemBarStoreStore was already added. If the object does not
// escape no need for a MemBarStoreStore. Otherwise we need a // escape no need for a MemBarStoreStore. If the object does not
// MemBarStoreStore so that stores that initialize this object // escape in its initializer and memory barrier (MemBarStoreStore or
// can't be reordered with a subsequent store that makes this // stronger) is already added at exit of initializer, also no need
// object accessible by other threads. // for a MemBarStoreStore. Otherwise we need a MemBarStoreStore
// so that stores that initialize this object can't be reordered
// with a subsequent store that makes this object accessible by
// other threads.
// Other threads include java threads and JVM internal threads
// (for example concurrent GC threads). Current concurrent GC
// implementation: CMS and G1 will not scan newly created object,
// so it's safe to skip storestore barrier when allocation does
// not escape.
if (!alloc->does_not_escape_thread() && if (!alloc->does_not_escape_thread() &&
!alloc->is_allocation_MemBar_redundant() &&
(init == NULL || !init->is_complete_with_arraycopy())) { (init == NULL || !init->is_complete_with_arraycopy())) {
if (init == NULL || init->req() < InitializeNode::RawStores) { if (init == NULL || init->req() < InitializeNode::RawStores) {
// No InitializeNode or no stores captured by zeroing // No InitializeNode or no stores captured by zeroing

View File

@ -962,6 +962,14 @@ void Parse::do_exits() {
PPC64_ONLY(wrote_volatile() ||) PPC64_ONLY(wrote_volatile() ||)
(AlwaysSafeConstructors && wrote_fields()))) { (AlwaysSafeConstructors && wrote_fields()))) {
_exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final()); _exits.insert_mem_bar(Op_MemBarRelease, alloc_with_final());
// If Memory barrier is created for final fields write
// and allocation node does not escape the initialize method,
// then barrier introduced by allocation node can be removed.
if (DoEscapeAnalysis && alloc_with_final()) {
AllocateNode *alloc = AllocateNode::Ideal_allocation(alloc_with_final(), &_gvn);
alloc->compute_MemBar_redundancy(method());
}
if (PrintOpto && (Verbose || WizardMode)) { if (PrintOpto && (Verbose || WizardMode)) {
method()->print_name(); method()->print_name();
tty->print_cr(" writes finals and needs a memory barrier"); tty->print_cr(" writes finals and needs a memory barrier");