8284990: AArch64: Remove STXR_PREFETCH from CPU features

Reviewed-by: aph, ngasson, njian
This commit is contained in:
Eric Liu 2022-07-27 09:38:04 +00:00 committed by Nick Gasson
parent 2a1d9cfead
commit 2bd90c2149
5 changed files with 7 additions and 18 deletions

View File

@ -1892,8 +1892,7 @@ void MacroAssembler::atomic_incw(Register counter_addr, Register tmp, Register t
return;
}
Label retry_load;
if (VM_Version::supports_stxr_prefetch())
prfm(Address(counter_addr), PSTL1STRM);
prfm(Address(counter_addr), PSTL1STRM);
bind(retry_load);
// flush and load exclusive from the memory location
ldxrw(tmp, counter_addr);
@ -2627,8 +2626,7 @@ void MacroAssembler::cmpxchgptr(Register oldv, Register newv, Register addr, Reg
membar(AnyAny);
} else {
Label retry_load, nope;
if (VM_Version::supports_stxr_prefetch())
prfm(Address(addr), PSTL1STRM);
prfm(Address(addr), PSTL1STRM);
bind(retry_load);
// flush and load exclusive from the memory location
// and fail if it is not what we expect
@ -2670,8 +2668,7 @@ void MacroAssembler::cmpxchgw(Register oldv, Register newv, Register addr, Regis
membar(AnyAny);
} else {
Label retry_load, nope;
if (VM_Version::supports_stxr_prefetch())
prfm(Address(addr), PSTL1STRM);
prfm(Address(addr), PSTL1STRM);
bind(retry_load);
// flush and load exclusive from the memory location
// and fail if it is not what we expect
@ -2712,8 +2709,7 @@ void MacroAssembler::cmpxchg(Register addr, Register expected,
compare_eq(result, expected, size);
} else {
Label retry_load, done;
if (VM_Version::supports_stxr_prefetch())
prfm(Address(addr), PSTL1STRM);
prfm(Address(addr), PSTL1STRM);
bind(retry_load);
load_exclusive(result, addr, size, acquire);
compare_eq(result, expected, size);
@ -2771,8 +2767,7 @@ void MacroAssembler::atomic_##NAME(Register prev, RegisterOrConstant incr, Regis
result = different(prev, incr, addr) ? prev : rscratch2; \
\
Label retry_load; \
if (VM_Version::supports_stxr_prefetch()) \
prfm(Address(addr), PSTL1STRM); \
prfm(Address(addr), PSTL1STRM); \
bind(retry_load); \
LDXR(result, addr); \
OP(rscratch1, result, incr); \
@ -2802,8 +2797,7 @@ void MacroAssembler::atomic_##OP(Register prev, Register newv, Register addr) {
result = different(prev, newv, addr) ? prev : rscratch2; \
\
Label retry_load; \
if (VM_Version::supports_stxr_prefetch()) \
prfm(Address(addr), PSTL1STRM); \
prfm(Address(addr), PSTL1STRM); \
bind(retry_load); \
LDXR(result, addr); \
STXR(rscratch1, newv, addr); \

View File

@ -223,8 +223,6 @@ void VM_Version::initialize() {
}
}
if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
char buf[512];
sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);

View File

@ -120,7 +120,6 @@ public:
/* flags above must follow Linux HWCAP */ \
decl(SVEBITPERM, svebitperm, 27) \
decl(SVE2, sve2, 28) \
decl(STXR_PREFETCH, stxr_prefetch, 29) \
decl(A53MAC, a53mac, 31)
enum Feature_Flag {

View File

@ -180,7 +180,6 @@ public class AArch64 extends Architecture {
PACA,
SVEBITPERM,
SVE2,
STXR_PREFETCH,
A53MAC,
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -68,7 +68,6 @@ class AArch64HotSpotVMConfig extends HotSpotVMConfigAccess {
final long aarch64SHA2 = getConstant("VM_Version::CPU_SHA2", Long.class);
final long aarch64CRC32 = getConstant("VM_Version::CPU_CRC32", Long.class);
final long aarch64LSE = getConstant("VM_Version::CPU_LSE", Long.class);
final long aarch64STXR_PREFETCH = getConstant("VM_Version::CPU_STXR_PREFETCH", Long.class);
final long aarch64A53MAC = getConstant("VM_Version::CPU_A53MAC", Long.class);
// Checkstyle: resume
}