8215547: ZGC: Fix incorrect match rule for loadBarrierWeakSlowRegNoVec

Reviewed-by: eosterlund, neliasso
This commit is contained in:
Per Lidén 2018-12-20 11:43:04 +01:00
parent 83adde9866
commit 2778c86488
4 changed files with 180 additions and 125 deletions
src/hotspot

@ -4265,132 +4265,196 @@ operand cmpOpUCF2() %{
// Operands for bound floating pointer register arguments // Operands for bound floating pointer register arguments
operand rxmm0() %{ operand rxmm0() %{
constraint(ALLOC_IN_RC(xmm0_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm0_reg));
predicate((UseSSE > 0) && (UseAVX<= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm1() %{ operand rxmm1() %{
constraint(ALLOC_IN_RC(xmm1_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm1_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm2() %{ operand rxmm2() %{
constraint(ALLOC_IN_RC(xmm2_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm2_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm3() %{ operand rxmm3() %{
constraint(ALLOC_IN_RC(xmm3_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm3_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm4() %{ operand rxmm4() %{
constraint(ALLOC_IN_RC(xmm4_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm4_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm5() %{ operand rxmm5() %{
constraint(ALLOC_IN_RC(xmm5_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm5_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm6() %{ operand rxmm6() %{
constraint(ALLOC_IN_RC(xmm6_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm6_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm7() %{ operand rxmm7() %{
constraint(ALLOC_IN_RC(xmm7_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm7_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm8() %{ operand rxmm8() %{
constraint(ALLOC_IN_RC(xmm8_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm8_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm9() %{ operand rxmm9() %{
constraint(ALLOC_IN_RC(xmm9_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm9_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm10() %{ operand rxmm10() %{
constraint(ALLOC_IN_RC(xmm10_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm10_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm11() %{ operand rxmm11() %{
constraint(ALLOC_IN_RC(xmm11_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm11_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm12() %{ operand rxmm12() %{
constraint(ALLOC_IN_RC(xmm12_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm12_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm13() %{ operand rxmm13() %{
constraint(ALLOC_IN_RC(xmm13_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm13_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm14() %{ operand rxmm14() %{
constraint(ALLOC_IN_RC(xmm14_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm14_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm15() %{ operand rxmm15() %{
constraint(ALLOC_IN_RC(xmm15_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm15_reg));
predicate((UseSSE > 0) && (UseAVX <= 2)); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm16() %{ operand rxmm16() %{
constraint(ALLOC_IN_RC(xmm16_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm16_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm17() %{ operand rxmm17() %{
constraint(ALLOC_IN_RC(xmm17_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm17_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm18() %{ operand rxmm18() %{
constraint(ALLOC_IN_RC(xmm18_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm18_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm19() %{ operand rxmm19() %{
constraint(ALLOC_IN_RC(xmm19_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm19_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm20() %{ operand rxmm20() %{
constraint(ALLOC_IN_RC(xmm20_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm20_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm21() %{ operand rxmm21() %{
constraint(ALLOC_IN_RC(xmm21_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm21_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm22() %{ operand rxmm22() %{
constraint(ALLOC_IN_RC(xmm22_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm22_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm23() %{ operand rxmm23() %{
constraint(ALLOC_IN_RC(xmm23_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm23_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm24() %{ operand rxmm24() %{
constraint(ALLOC_IN_RC(xmm24_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm24_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm25() %{ operand rxmm25() %{
constraint(ALLOC_IN_RC(xmm25_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm25_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm26() %{ operand rxmm26() %{
constraint(ALLOC_IN_RC(xmm26_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm26_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm27() %{ operand rxmm27() %{
constraint(ALLOC_IN_RC(xmm27_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm27_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm28() %{ operand rxmm28() %{
constraint(ALLOC_IN_RC(xmm28_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm28_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm29() %{ operand rxmm29() %{
constraint(ALLOC_IN_RC(xmm29_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm29_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm30() %{ operand rxmm30() %{
constraint(ALLOC_IN_RC(xmm30_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm30_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
operand rxmm31() %{ operand rxmm31() %{
constraint(ALLOC_IN_RC(xmm31_reg)); match(VecX); constraint(ALLOC_IN_RC(xmm31_reg));
predicate(UseAVX == 3); format%{%} interface(REG_INTER); match(VecX);
format%{%}
interface(REG_INTER);
%} %}
//----------OPERAND CLASSES---------------------------------------------------- //----------OPERAND CLASSES----------------------------------------------------
@ -12651,33 +12715,6 @@ instruct RethrowException()
// Execute ZGC load barrier (strong) slow path // Execute ZGC load barrier (strong) slow path
// //
// When running without XMM regs
instruct loadBarrierSlowRegNoVec(rRegP dst, memory mem, rFlagsReg cr) %{
match(Set dst (LoadBarrierSlowReg mem));
predicate(MaxVectorSize < 16);
effect(DEF dst, KILL cr);
format %{"LoadBarrierSlowRegNoVec $dst, $mem" %}
ins_encode %{
#if INCLUDE_ZGC
Register d = $dst$$Register;
ZBarrierSetAssembler* bs = (ZBarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();
assert(d != r12, "Can't be R12!");
assert(d != r15, "Can't be R15!");
assert(d != rsp, "Can't be RSP!");
__ lea(d, $mem$$Address);
__ call(RuntimeAddress(bs->load_barrier_slow_stub(d)));
#else
ShouldNotReachHere();
#endif
%}
ins_pipe(pipe_slow);
%}
// For XMM and YMM enabled processors // For XMM and YMM enabled processors
instruct loadBarrierSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr, instruct loadBarrierSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
@ -12686,7 +12723,7 @@ instruct loadBarrierSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
match(Set dst (LoadBarrierSlowReg mem)); match(Set dst (LoadBarrierSlowReg mem));
predicate((UseSSE > 0) && (UseAVX <= 2) && (MaxVectorSize >= 16)); predicate(UseAVX <= 2);
effect(DEF dst, KILL cr, effect(DEF dst, KILL cr,
KILL x0, KILL x1, KILL x2, KILL x3, KILL x0, KILL x1, KILL x2, KILL x3,
@ -12694,7 +12731,7 @@ instruct loadBarrierSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
KILL x8, KILL x9, KILL x10, KILL x11, KILL x8, KILL x9, KILL x10, KILL x11,
KILL x12, KILL x13, KILL x14, KILL x15); KILL x12, KILL x13, KILL x14, KILL x15);
format %{"LoadBarrierSlowRegXmm $dst, $mem" %} format %{"LoadBarrierSlowRegXmmAndYmm $dst, $mem" %}
ins_encode %{ ins_encode %{
#if INCLUDE_ZGC #if INCLUDE_ZGC
Register d = $dst$$Register; Register d = $dst$$Register;
@ -12725,7 +12762,7 @@ instruct loadBarrierSlowRegZmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
match(Set dst (LoadBarrierSlowReg mem)); match(Set dst (LoadBarrierSlowReg mem));
predicate((UseAVX == 3) && (MaxVectorSize >= 16)); predicate(UseAVX == 3);
effect(DEF dst, KILL cr, effect(DEF dst, KILL cr,
KILL x0, KILL x1, KILL x2, KILL x3, KILL x0, KILL x1, KILL x2, KILL x3,
@ -12760,33 +12797,6 @@ instruct loadBarrierSlowRegZmm(rRegP dst, memory mem, rFlagsReg cr,
// Execute ZGC load barrier (weak) slow path // Execute ZGC load barrier (weak) slow path
// //
// When running without XMM regs
instruct loadBarrierWeakSlowRegNoVec(rRegP dst, memory mem, rFlagsReg cr) %{
match(Set dst (LoadBarrierSlowReg mem));
predicate(MaxVectorSize < 16);
effect(DEF dst, KILL cr);
format %{"LoadBarrierSlowRegNoVec $dst, $mem" %}
ins_encode %{
#if INCLUDE_ZGC
Register d = $dst$$Register;
ZBarrierSetAssembler* bs = (ZBarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();
assert(d != r12, "Can't be R12!");
assert(d != r15, "Can't be R15!");
assert(d != rsp, "Can't be RSP!");
__ lea(d, $mem$$Address);
__ call(RuntimeAddress(bs->load_barrier_weak_slow_stub(d)));
#else
ShouldNotReachHere();
#endif
%}
ins_pipe(pipe_slow);
%}
// For XMM and YMM enabled processors // For XMM and YMM enabled processors
instruct loadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr, instruct loadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3,
@ -12795,7 +12805,7 @@ instruct loadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
match(Set dst (LoadBarrierWeakSlowReg mem)); match(Set dst (LoadBarrierWeakSlowReg mem));
predicate((UseSSE > 0) && (UseAVX <= 2) && (MaxVectorSize >= 16)); predicate(UseAVX <= 2);
effect(DEF dst, KILL cr, effect(DEF dst, KILL cr,
KILL x0, KILL x1, KILL x2, KILL x3, KILL x0, KILL x1, KILL x2, KILL x3,
@ -12803,7 +12813,7 @@ instruct loadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory mem, rFlagsReg cr,
KILL x8, KILL x9, KILL x10, KILL x11, KILL x8, KILL x9, KILL x10, KILL x11,
KILL x12, KILL x13, KILL x14, KILL x15); KILL x12, KILL x13, KILL x14, KILL x15);
format %{"LoadBarrierWeakSlowRegXmm $dst, $mem" %} format %{"LoadBarrierWeakSlowRegXmmAndYmm $dst, $mem" %}
ins_encode %{ ins_encode %{
#if INCLUDE_ZGC #if INCLUDE_ZGC
Register d = $dst$$Register; Register d = $dst$$Register;
@ -12834,7 +12844,7 @@ instruct loadBarrierWeakSlowRegZmm(rRegP dst, memory mem, rFlagsReg cr,
rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
match(Set dst (LoadBarrierWeakSlowReg mem)); match(Set dst (LoadBarrierWeakSlowReg mem));
predicate((UseAVX == 3) && (MaxVectorSize >= 16)); predicate(UseAVX == 3);
effect(DEF dst, KILL cr, effect(DEF dst, KILL cr,
KILL x0, KILL x1, KILL x2, KILL x3, KILL x0, KILL x1, KILL x2, KILL x3,

@ -0,0 +1,40 @@
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#include "precompiled.hpp"
#include "gc/z/zArguments.hpp"
#include "runtime/globals.hpp"
#include "runtime/globals_extension.hpp"
#include "utilities/debug.hpp"
void ZArguments::initialize_platform() {
// The C2 barrier slow path expects vector registers to be least
// 16 bytes wide, which is the minimum width available on all
// x86-64 systems. However, the user could have speficied a lower
// number on the command-line, in which case we print a warning
// and raise it to 16.
if (MaxVectorSize < 16) {
warning("ZGC requires MaxVectorSize to be at least 16");
FLAG_SET_DEFAULT(MaxVectorSize, 16);
}
}

@ -19,7 +19,6 @@
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*
*/ */
#include "precompiled.hpp" #include "precompiled.hpp"
@ -91,6 +90,9 @@ void ZArguments::initialize() {
// Verification of stacks not (yet) supported, for the same reason // Verification of stacks not (yet) supported, for the same reason
// we need fixup_partial_loads // we need fixup_partial_loads
DEBUG_ONLY(FLAG_SET_DEFAULT(VerifyStack, false)); DEBUG_ONLY(FLAG_SET_DEFAULT(VerifyStack, false));
// Initialize platform specific arguments
initialize_platform();
} }
CollectedHeap* ZArguments::create_heap() { CollectedHeap* ZArguments::create_heap() {

@ -29,6 +29,9 @@
class CollectedHeap; class CollectedHeap;
class ZArguments : public GCArguments { class ZArguments : public GCArguments {
private:
void initialize_platform();
public: public:
virtual void initialize(); virtual void initialize();
virtual size_t conservative_max_heap_alignment(); virtual size_t conservative_max_heap_alignment();