8302328: [s390x] Simplify asm_assert definition

Reviewed-by: lucy, mdoerr
This commit is contained in:
Amit Kumar 2023-04-26 16:06:55 +00:00 committed by Martin Doerr
parent 9bc6a212f7
commit a18191fee8
9 changed files with 48 additions and 84 deletions

@ -2984,7 +2984,7 @@ void LIR_Assembler::emit_profile_type(LIR_OpProfileType* op) {
__ z_bru(next);
}
} else {
__ asm_assert_ne("unexpected null obj", __LINE__);
__ asm_assert(Assembler::bcondNotZero, "unexpected null obj", __LINE__);
}
__ bind(update);
@ -2995,7 +2995,7 @@ void LIR_Assembler::emit_profile_type(LIR_OpProfileType* op) {
__ load_klass(tmp1, tmp1);
metadata2reg(exact_klass->constant_encoding(), tmp2);
__ z_cgr(tmp1, tmp2);
__ asm_assert_eq("exact klass and actual klass differ", __LINE__);
__ asm_assert(Assembler::bcondEqual, "exact klass and actual klass differ", __LINE__);
}
#endif

@ -1,6 +1,6 @@
/*
* Copyright (c) 2019, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2019 SAP SE. All rights reserved.
* Copyright (c) 2018, 2023 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -177,7 +177,7 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm, Decorator
if (preloaded && not_null) {
#ifdef ASSERT
__ z_ltgr(Rpre_val, Rpre_val);
__ asm_assert_ne("null oop not allowed (G1 pre)", 0x321); // Checked by caller.
__ asm_assert(Assembler::bcondNotZero, "null oop not allowed (G1 pre)", 0x321); // Checked by caller.
#endif
} else {
__ z_ltgr(Rpre_val, Rpre_val);
@ -289,7 +289,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, Decorato
if (not_null) {
#ifdef ASSERT
__ z_ltgr(Rnew_val, Rnew_val);
__ asm_assert_ne("null oop not allowed (G1 post)", 0x322); // Checked by caller.
__ asm_assert(Assembler::bcondNotZero, "null oop not allowed (G1 post)", 0x322); // Checked by caller.
#endif
} else {
__ z_ltgr(Rnew_val, Rnew_val);

@ -2074,7 +2074,7 @@ void MacroAssembler::push_frame(Register bytes, Register old_sp, bool copy_sp, b
assert_different_registers(bytes, old_sp, Z_SP);
if (!copy_sp) {
z_cgr(old_sp, Z_SP);
asm_assert_eq("[old_sp]!=[Z_SP]", 0x211);
asm_assert(bcondEqual, "[old_sp]!=[Z_SP]", 0x211);
}
#endif
if (copy_sp) { z_lgr(old_sp, Z_SP); }
@ -5326,47 +5326,25 @@ void MacroAssembler::multiply_to_len(Register x, Register xlen,
z_lmg(Z_R7, Z_R13, _z_abi(gpr7), Z_SP);
}
#ifndef PRODUCT
void MacroAssembler::asm_assert(branch_condition cond, const char* msg, int id, bool is_static) {
#ifdef ASSERT
Label ok;
z_brc(cond, ok);
is_static ? stop_static(msg, id) : stop(msg, id);
bind(ok);
#endif // ASSERT
}
// Assert if CC indicates "not equal" (check_equal==true) or "equal" (check_equal==false).
void MacroAssembler::asm_assert(bool check_equal, const char *msg, int id) {
Label ok;
if (check_equal) {
z_bre(ok);
} else {
z_brne(ok);
}
stop(msg, id);
bind(ok);
}
// Assert if CC indicates "low".
void MacroAssembler::asm_assert_low(const char *msg, int id) {
Label ok;
z_brnl(ok);
stop(msg, id);
bind(ok);
}
// Assert if CC indicates "high".
void MacroAssembler::asm_assert_high(const char *msg, int id) {
Label ok;
z_brnh(ok);
stop(msg, id);
bind(ok);
}
// Assert if CC indicates "not equal" (check_equal==true) or "equal" (check_equal==false)
// generate non-relocatable code.
void MacroAssembler::asm_assert_static(bool check_equal, const char *msg, int id) {
Label ok;
if (check_equal) { z_bre(ok); }
else { z_brne(ok); }
stop_static(msg, id);
bind(ok);
#ifdef ASSERT
asm_assert(check_equal ? bcondEqual : bcondNotEqual, msg, id);
#endif // ASSERT
}
void MacroAssembler::asm_assert_mems_zero(bool check_equal, bool allow_relocation, int size, int64_t mem_offset,
Register mem_base, const char* msg, int id) {
#ifdef ASSERT
switch (size) {
case 4:
load_and_test_int(Z_R0, Address(mem_base, mem_offset));
@ -5377,8 +5355,9 @@ void MacroAssembler::asm_assert_mems_zero(bool check_equal, bool allow_relocatio
default:
ShouldNotReachHere();
}
if (allow_relocation) { asm_assert(check_equal, msg, id); }
else { asm_assert_static(check_equal, msg, id); }
// if relocation is not allowed then stop_static() will be called otherwise call stop()
asm_assert(check_equal ? bcondEqual : bcondNotEqual, msg, id, !allow_relocation);
#endif // ASSERT
}
// Check the condition
@ -5387,18 +5366,13 @@ void MacroAssembler::asm_assert_mems_zero(bool check_equal, bool allow_relocatio
// expected_size - FP + SP == 0
// Destroys Register expected_size if no tmp register is passed.
void MacroAssembler::asm_assert_frame_size(Register expected_size, Register tmp, const char* msg, int id) {
if (tmp == noreg) {
tmp = expected_size;
} else {
if (tmp != expected_size) {
z_lgr(tmp, expected_size);
}
z_algr(tmp, Z_SP);
z_slg(tmp, 0, Z_R0, Z_SP);
asm_assert_eq(msg, id);
}
#ifdef ASSERT
lgr_if_needed(tmp, expected_size);
z_algr(tmp, Z_SP);
z_slg(tmp, 0, Z_R0, Z_SP);
asm_assert(bcondEqual, msg, id);
#endif // ASSERT
}
#endif // !PRODUCT
// Save and restore functions: Exclude Z_R0.
void MacroAssembler::save_volatile_regs(Register dst, int offset, bool include_fp, bool include_flags) {
@ -5519,8 +5493,8 @@ void MacroAssembler::stop(int type, const char* msg, int id) {
// The plain disassembler does not recognize illtrap. It instead displays
// a 32-bit value. Issuing two illtraps assures the disassembler finds
// the proper beginning of the next instruction.
z_illtrap(); // Illegal instruction.
z_illtrap(); // Illegal instruction.
z_illtrap(id); // Illegal instruction.
z_illtrap(id); // Illegal instruction.
BLOCK_COMMENT(" } stop");
}
@ -5559,7 +5533,7 @@ address MacroAssembler::stop_chain(address reentry, int type, const char* msg, i
} else {
call_VM_leaf_static(CAST_FROM_FN_PTR(address, stop_on_request), Z_ARG1, Z_ARG2);
}
z_illtrap(); // Illegal instruction as emergency stop, should the above call return.
z_illtrap(id); // Illegal instruction as emergency stop, should the above call return.
}
BLOCK_COMMENT(" } stop_chain");

@ -1,6 +1,6 @@
/*
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2022 SAP SE. All rights reserved.
* Copyright (c) 2016, 2023 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -863,18 +863,13 @@ class MacroAssembler: public Assembler {
//
// Assert on CC (condition code in CPU state).
void asm_assert(bool check_equal, const char* msg, int id) PRODUCT_RETURN;
void asm_assert_low(const char *msg, int id) PRODUCT_RETURN;
void asm_assert_high(const char *msg, int id) PRODUCT_RETURN;
void asm_assert_eq(const char* msg, int id) { asm_assert(true, msg, id); }
void asm_assert_ne(const char* msg, int id) { asm_assert(false, msg, id); }
void asm_assert_static(bool check_equal, const char* msg, int id) PRODUCT_RETURN;
void asm_assert(branch_condition cond, const char* msg, int id, bool is_static=true);
void asm_assert(bool check_equal, const char* msg, int id);
private:
// Emit assertions.
void asm_assert_mems_zero(bool check_equal, bool allow_relocation, int size, int64_t mem_offset,
Register mem_base, const char* msg, int id) PRODUCT_RETURN;
Register mem_base, const char* msg, int id);
public:
inline void asm_assert_mem4_is_zero(int64_t mem_offset, Register mem_base, const char* msg, int id) {
@ -889,7 +884,6 @@ class MacroAssembler: public Assembler {
inline void asm_assert_mem8_isnot_zero(int64_t mem_offset, Register mem_base, const char* msg, int id) {
asm_assert_mems_zero(false, true, 8, mem_offset, mem_base, msg, id);
}
inline void asm_assert_mem4_is_zero_static(int64_t mem_offset, Register mem_base, const char* msg, int id) {
asm_assert_mems_zero(true, false, 4, mem_offset, mem_base, msg, id);
}
@ -902,7 +896,7 @@ class MacroAssembler: public Assembler {
inline void asm_assert_mem8_isnot_zero_static(int64_t mem_offset, Register mem_base, const char* msg, int id) {
asm_assert_mems_zero(false, false, 8, mem_offset, mem_base, msg, id);
}
void asm_assert_frame_size(Register expected_size, Register tmp, const char* msg, int id) PRODUCT_RETURN;
void asm_assert_frame_size(Register expected_size, Register tmp, const char* msg, int id);
// Save and restore functions: Exclude Z_R0.
void save_volatile_regs( Register dst, int offset, bool include_fp, bool include_flags);

@ -119,7 +119,7 @@ void OptoRuntime::generate_exception_blob() {
// (unwind_initial_activation_pending_exception).
#ifdef ASSERT
__ z_ltgr(handle_exception, handle_exception);
__ asm_assert_ne("handler must not be null", 0x852);
__ asm_assert(Assembler::bcondNotZero, "handler must not be null", 0x852);
#endif
// Handle_exception contains the handler address. If the associated frame

@ -2478,7 +2478,7 @@ static void push_skeleton_frames(MacroAssembler* masm, bool deopt,
// Make sure that there is at least one entry in the array.
DEBUG_ONLY(__ z_ltgr(number_of_frames_reg, number_of_frames_reg));
__ asm_assert_ne("array_size must be > 0", 0x205);
__ asm_assert(Assembler::bcondNotZero, "array_size must be > 0", 0x205);
__ z_bru(loop_entry);
@ -2788,7 +2788,7 @@ void SharedRuntime::generate_uncommon_trap_blob() {
} else {
__ z_cliy(unpack_kind_byte_offset, unroll_block_reg, Deoptimization::Unpack_uncommon_trap);
}
__ asm_assert_eq("SharedRuntime::generate_deopt_blob: expected Unpack_uncommon_trap", 0);
__ asm_assert(Assembler::bcondEqual, "SharedRuntime::generate_deopt_blob: expected Unpack_uncommon_trap", 0);
#endif
__ zap_from_to(Z_SP, Z_SP, Z_R0_scratch, Z_R1, 500, -1);

@ -372,9 +372,9 @@ class StubGenerator: public StubCodeGenerator {
#ifdef ASSERT
char assertMsg[] = "check BasicType definition in globalDefinitions.hpp";
__ z_chi(r_arg_result_type, T_BOOLEAN);
__ asm_assert_low(assertMsg, 0x0234);
__ asm_assert(Assembler::bcondNotLow, assertMsg, 0x0234);
__ z_chi(r_arg_result_type, T_NARROWOOP);
__ asm_assert_high(assertMsg, 0x0235);
__ asm_assert(Assembler::bcondNotHigh, assertMsg, 0x0235);
#endif
__ add2reg(r_arg_result_type, -T_BOOLEAN); // Remove offset.
__ z_larl(Z_R1, firstHandler); // location of first handler
@ -740,7 +740,7 @@ class StubGenerator: public StubCodeGenerator {
void assert_positive_int(Register count) {
#ifdef ASSERT
__ z_srag(Z_R0, count, 31); // Just leave the sign (must be zero) in Z_R0.
__ asm_assert_eq("missing zero extend", 0xAFFE);
__ asm_assert(Assembler::bcondZero, "missing zero extend", 0xAFFE);
#endif
}

@ -1,6 +1,6 @@
/*
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2017 SAP SE. All rights reserved.
* Copyright (c) 2016, 2023 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -55,8 +55,7 @@ void StubRoutines::zarch::generate_load_absolute_address(MacroAssembler* masm, R
__ load_const_optimized(Z_R0, table_addr);
__ z_cgr(table, Z_R0); // safety net
__ z_bre(L);
__ z_illtrap();
__ asm_assert_eq("crc_table: external word relocation required for load_absolute_address", 0x33);
__ stop("crc_table: external word relocation required for load_absolute_address", 0x33);
__ bind(L);
}
{
@ -65,8 +64,7 @@ void StubRoutines::zarch::generate_load_absolute_address(MacroAssembler* masm, R
__ z_cl(Z_R0, Address(table, 4)); // safety net
__ z_bre(L);
__ z_l(Z_R0, Address(table, 4)); // Load data from memory, we know the constant we compared against.
__ z_illtrap();
__ asm_assert_eq("crc_table: address or contents seems to be messed up", 0x22);
__ stop("crc_table: address or contents seems to be messed up", 0x22);
__ bind(L);
}
#endif
@ -100,8 +98,7 @@ void StubRoutines::zarch::generate_load_trot_table_addr(MacroAssembler* masm, Re
__ load_const_optimized(Z_R0, StubRoutines::zarch::_trot_table_addr);
__ z_cgr(table, Z_R0); // safety net
__ z_bre(L);
__ z_illtrap();
__ asm_assert_eq("crc_table: external word relocation does not work for load_absolute_address", 0x33);
__ stop("crc_table: external word relocation does not work for load_absolute_address", 0x33);
__ bind(L);
}
{
@ -110,8 +107,7 @@ void StubRoutines::zarch::generate_load_trot_table_addr(MacroAssembler* masm, Re
__ z_clg(Z_R0, Address(table, 8)); // safety net
__ z_bre(L);
__ z_lg(Z_R0, Address(table, 8)); // Load data from memory, we know the constant we compared against.
__ z_illtrap();
__ asm_assert_eq("trot_table: address or contents seems to be messed up", 0x22);
__ stop("trot_table: address or contents seems to be messed up", 0x22);
__ bind(L);
}
#endif

@ -1088,7 +1088,7 @@ void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) {
// asm_assert* is a nop in product builds
NOT_PRODUCT(__ z_cg(Z_R14, _z_common_abi(return_pc), Z_SP));
NOT_PRODUCT(__ asm_assert_eq("killed Z_R14", 0));
NOT_PRODUCT(__ asm_assert(Assembler::bcondEqual, "killed Z_R14", 0));
__ resize_frame_absolute(sp_after_resize, fp, true);
__ save_return_pc(Z_R14);