8243339: AArch64: Obsolete UseBarriersForVolatile option

Reviewed-by: adinn, aph, drwhite
This commit is contained in:
Xiaohong Gong 2020-05-27 15:17:09 +08:00
parent 1357c01e57
commit bc669dfc82
17 changed files with 319 additions and 758 deletions

View File

@ -1361,17 +1361,12 @@ source %{
// traverse when searching from a card mark membar for the merge mem
// feeding a trailing membar or vice versa
// predicates controlling emit of ldr<x>/ldar<x> and associated dmb
// predicates controlling emit of ldr<x>/ldar<x>
bool unnecessary_acquire(const Node *barrier)
{
assert(barrier->is_MemBar(), "expecting a membar");
if (UseBarriersForVolatile) {
// we need to plant a dmb
return false;
}
MemBarNode* mb = barrier->as_MemBar();
if (mb->trailing_load()) {
@ -1390,26 +1385,15 @@ bool unnecessary_acquire(const Node *barrier)
bool needs_acquiring_load(const Node *n)
{
assert(n->is_Load(), "expecting a load");
if (UseBarriersForVolatile) {
// we use a normal load and a dmb
return false;
}
LoadNode *ld = n->as_Load();
return ld->is_acquire();
}
bool unnecessary_release(const Node *n)
{
assert((n->is_MemBar() &&
n->Opcode() == Op_MemBarRelease),
"expecting a release membar");
if (UseBarriersForVolatile) {
// we need to plant a dmb
return false;
}
n->Opcode() == Op_MemBarRelease),
"expecting a release membar");
MemBarNode *barrier = n->as_MemBar();
if (!barrier->leading()) {
@ -1437,11 +1421,6 @@ bool unnecessary_release(const Node *n)
bool unnecessary_volatile(const Node *n)
{
// assert n->is_MemBar();
if (UseBarriersForVolatile) {
// we need to plant a dmb
return false;
}
MemBarNode *mbvol = n->as_MemBar();
bool release = mbvol->trailing_store();
@ -1458,18 +1437,12 @@ bool unnecessary_volatile(const Node *n)
return release;
}
// predicates controlling emit of str<x>/stlr<x> and associated dmbs
// predicates controlling emit of str<x>/stlr<x>
bool needs_releasing_store(const Node *n)
{
// assert n->is_Store();
if (UseBarriersForVolatile) {
// we use a normal store and dmb combination
return false;
}
StoreNode *st = n->as_Store();
return st->trailing_membar() != NULL;
}
@ -1480,10 +1453,6 @@ bool needs_releasing_store(const Node *n)
bool needs_acquiring_load_exclusive(const Node *n)
{
assert(is_CAS(n->Opcode(), true), "expecting a compare and swap");
if (UseBarriersForVolatile) {
return false;
}
LoadStoreNode* ldst = n->as_LoadStore();
if (is_CAS(n->Opcode(), false)) {
assert(ldst->trailing_membar() != NULL, "expected trailing membar");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -1411,9 +1411,8 @@ void LIRGenerator::volatile_field_load(LIR_Address* address, LIR_Opr result,
// membar it's possible for a simple Dekker test to fail if loads
// use LD;DMB but stores use STLR. This can happen if C2 compiles
// the stores in one method and C1 compiles the loads in another.
if (! UseBarriersForVolatile) {
if (!is_c1_or_interpreter_only()) {
__ membar();
}
__ volatile_load_mem_reg(address, result, info);
}

View File

@ -85,9 +85,6 @@ define_pd_global(intx, InlineSmallCode, 1000);
\
product(bool, NearCpool, true, \
"constant pool is close to instructions") \
\
product(bool, UseBarriersForVolatile, false, \
"Use memory barriers to implement volatile accesses") \
product(bool, UseNeon, false, \
"Use Neon for CRC32 computation") \
product(bool, UseCRC32, false, \

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2004, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2004, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -80,23 +80,12 @@ address JNI_FastGetField::generate_fast_get_int_field0(BasicType type) {
__ ldrw(rcounter, safepoint_counter_addr);
__ tbnz(rcounter, 0, slow);
if (!UseBarriersForVolatile) {
// Field may be volatile. See other usages of this flag.
__ membar(MacroAssembler::AnyAny);
__ mov(robj, c_rarg1);
} else if (JvmtiExport::can_post_field_access()) {
// It doesn't need to issue a full barrier here even if the field
// is volatile, since it has already used "ldar" for it.
if (JvmtiExport::can_post_field_access()) {
// Using barrier to order wrt. JVMTI check and load of result.
__ membar(Assembler::LoadLoad);
__ mov(robj, c_rarg1);
} else {
// Using address dependency to order wrt. load of result.
__ eor(robj, c_rarg1, rcounter);
__ eor(robj, robj, rcounter); // obj, since
// robj ^ rcounter ^ rcounter == robj
// robj is address dependent on rcounter.
}
if (JvmtiExport::can_post_field_access()) {
// Check to see if a field access watch has been set before we
// take the fast path.
unsigned long offset2;
@ -105,6 +94,14 @@ address JNI_FastGetField::generate_fast_get_int_field0(BasicType type) {
offset2);
__ ldrw(result, Address(result, offset2));
__ cbnzw(result, slow);
__ mov(robj, c_rarg1);
} else {
// Using address dependency to order wrt. load of result.
__ eor(robj, c_rarg1, rcounter);
__ eor(robj, robj, rcounter); // obj, since
// robj ^ rcounter ^ rcounter == robj
// robj is address dependent on rcounter.
}
// Both robj and rscratch1 are clobbered by try_resolve_jobject_in_native.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -2487,7 +2487,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
// membar it's possible for a simple Dekker test to fail if loads
// use LDR;DMB but stores use STLR. This can happen if C2 compiles
// the stores in one method and we interpret the loads in another.
if (! UseBarriersForVolatile) {
if (!is_c1_or_interpreter_only()){
Label notVolatile;
__ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::AnyAny);
@ -3083,7 +3083,7 @@ void TemplateTable::fast_accessfield(TosState state)
// membar it's possible for a simple Dekker test to fail if loads
// use LDR;DMB but stores use STLR. This can happen if C2 compiles
// the stores in one method and we interpret the loads in another.
if (! UseBarriersForVolatile) {
if (!is_c1_or_interpreter_only()) {
Label notVolatile;
__ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
__ membar(MacroAssembler::AnyAny);
@ -3145,7 +3145,7 @@ void TemplateTable::fast_xaccess(TosState state)
// membar it's possible for a simple Dekker test to fail if loads
// use LDR;DMB but stores use STLR. This can happen if C2 compiles
// the stores in one method and we interpret the loads in another.
if (! UseBarriersForVolatile) {
if (!is_c1_or_interpreter_only()) {
Label notVolatile;
__ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::flags_offset())));

View File

@ -220,7 +220,7 @@ void VM_Version::get_processor_features() {
// ThunderX
if (_cpu == CPU_CAVIUM && (_model == 0xA1)) {
if (_variant == 0) _features |= CPU_DMB_ATOMICS;
guarantee(_variant != 0, "Pre-release hardware no longer supported.");
if (FLAG_IS_DEFAULT(AvoidUnalignedAccesses)) {
FLAG_SET_DEFAULT(AvoidUnalignedAccesses, true);
}
@ -420,10 +420,6 @@ void VM_Version::get_processor_features() {
FLAG_SET_DEFAULT(UseUnalignedAccesses, true);
}
if (FLAG_IS_DEFAULT(UseBarriersForVolatile)) {
UseBarriersForVolatile = (_features & CPU_DMB_ATOMICS) != 0;
}
if (FLAG_IS_DEFAULT(UsePopCountInstruction)) {
FLAG_SET_DEFAULT(UsePopCountInstruction, true);
}

View File

@ -1,6 +1,6 @@
/*
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2019, Red Hat Inc. All rights reserved.
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -98,7 +98,6 @@ public:
CPU_LSE = (1<<8),
CPU_STXR_PREFETCH= (1 << 29),
CPU_A53MAC = (1 << 30),
CPU_DMB_ATOMICS = (1 << 31),
};
static int cpu_family() { return _cpu; }

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -517,3 +517,42 @@ void CompilerConfig::ergo_initialize() {
}
#endif // COMPILER2
}
static CompLevel highest_compile_level() {
return TieredCompilation ? MIN2((CompLevel) TieredStopAtLevel, CompLevel_highest_tier) : CompLevel_highest_tier;
}
bool is_c1_or_interpreter_only() {
if (Arguments::is_interpreter_only()) {
return true;
}
#if INCLUDE_AOT
if (UseAOT) {
return false;
}
#endif
if (highest_compile_level() < CompLevel_full_optimization) {
#if INCLUDE_JVMCI
if (TieredCompilation) {
return true;
}
// This happens on jvm variant with C2 disabled and JVMCI
// enabled.
return !UseJVMCICompiler;
#else
return true;
#endif
}
#ifdef TIERED
// The quick-only compilation mode is c1 only. However,
// CompilationModeFlag only takes effect with TieredCompilation
// enabled.
if (TieredCompilation && CompilationModeFlag::quick_only()) {
return true;
}
#endif
return false;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -116,6 +116,8 @@ inline bool is_compile(int comp_level) {
return is_c1_compile(comp_level) || is_c2_compile(comp_level);
}
bool is_c1_or_interpreter_only();
// States of Restricted Transactional Memory usage.
enum RTMState {
NoRTM = 0x2, // Don't use RTM

View File

@ -42,6 +42,7 @@ JVMCICompiler::JVMCICompiler() : AbstractCompiler(compiler_jvmci) {
// Initialization
void JVMCICompiler::initialize() {
assert(!is_c1_or_interpreter_only(), "JVMCI is launched, it's not c1/interpreter only mode");
if (!UseCompiler || !EnableJVMCI || !UseJVMCICompiler || !should_perform_init()) {
return;
}

View File

@ -725,8 +725,7 @@
declare_constant(VM_Version::CPU_CRC32) \
declare_constant(VM_Version::CPU_LSE) \
declare_constant(VM_Version::CPU_STXR_PREFETCH) \
declare_constant(VM_Version::CPU_A53MAC) \
declare_constant(VM_Version::CPU_DMB_ATOMICS)
declare_constant(VM_Version::CPU_A53MAC)
#endif

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -84,6 +84,7 @@ bool C2Compiler::init_c2_runtime() {
}
void C2Compiler::initialize() {
assert(!is_c1_or_interpreter_only(), "C2 compiler is launched, it's not c1/interpreter only mode");
// The first compiler thread that gets here will initialize the
// small amount of global state (and runtime stubs) that C2 needs.

View File

@ -560,6 +560,9 @@ static SpecialFlag const special_jvm_flags[] = {
#endif // !X86
{ "UseAdaptiveGCBoundary", JDK_Version::undefined(), JDK_Version::jdk(15), JDK_Version::jdk(16) },
{ "MonitorBound", JDK_Version::jdk(14), JDK_Version::jdk(15), JDK_Version::jdk(16) },
#ifdef AARCH64
{ "UseBarriersForVolatile", JDK_Version::undefined(), JDK_Version::jdk(15), JDK_Version::jdk(16) },
#endif
#ifdef TEST_VERIFY_SPECIAL_JVM_FLAGS
// These entries will generate build errors. Their purpose is to test the macros.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -173,8 +173,7 @@ public class AArch64 extends Architecture {
CRC32,
LSE,
STXR_PREFETCH,
A53MAC,
DMB_ATOMICS
A53MAC
}
private final EnumSet<CPUFeature> features;
@ -183,7 +182,6 @@ public class AArch64 extends Architecture {
* Set of flags to control code emission.
*/
public enum Flag {
UseBarriersForVolatile,
UseCRC32,
UseNeon,
UseSIMDForMemoryOps,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -80,9 +80,6 @@ public class AArch64HotSpotJVMCIBackendFactory implements HotSpotJVMCIBackendFac
if ((config.vmVersionFeatures & config.aarch64A53MAC) != 0) {
features.add(AArch64.CPUFeature.A53MAC);
}
if ((config.vmVersionFeatures & config.aarch64DMB_ATOMICS) != 0) {
features.add(AArch64.CPUFeature.DMB_ATOMICS);
}
return features;
}
@ -90,9 +87,6 @@ public class AArch64HotSpotJVMCIBackendFactory implements HotSpotJVMCIBackendFac
private static EnumSet<AArch64.Flag> computeFlags(@SuppressWarnings("unused") AArch64HotSpotVMConfig config) {
EnumSet<AArch64.Flag> flags = EnumSet.noneOf(AArch64.Flag.class);
if (config.useBarriersForVolatile) {
flags.add(AArch64.Flag.UseBarriersForVolatile);
}
if (config.useCRC32) {
flags.add(AArch64.Flag.UseCRC32);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -46,7 +46,6 @@ class AArch64HotSpotVMConfig extends HotSpotVMConfigAccess {
/*
* These flags are set based on the corresponding command line flags.
*/
final boolean useBarriersForVolatile = getFlag("UseBarriersForVolatile", Boolean.class);
final boolean useCRC32 = getFlag("UseCRC32", Boolean.class);
final boolean useNeon = getFlag("UseNeon", Boolean.class);
final boolean useSIMDForMemoryOps = getFlag("UseSIMDForMemoryOps", Boolean.class);
@ -71,6 +70,5 @@ class AArch64HotSpotVMConfig extends HotSpotVMConfigAccess {
final long aarch64LSE = getConstant("VM_Version::CPU_LSE", Long.class);
final long aarch64STXR_PREFETCH = getConstant("VM_Version::CPU_STXR_PREFETCH", Long.class);
final long aarch64A53MAC = getConstant("VM_Version::CPU_A53MAC", Long.class);
final long aarch64DMB_ATOMICS = getConstant("VM_Version::CPU_DMB_ATOMICS", Long.class);
// Checkstyle: resume
}

File diff suppressed because it is too large Load Diff