8217874: Shenandoah: Clobbered register in ShenandoahBarrierSetAssembler::cmpxchg_oop()

Reviewed-by: adinn
This commit is contained in:
Roman Kennke 2019-02-11 16:49:08 +01:00
parent 9bc10ee9bf
commit 7e1347391a

View File

@ -425,54 +425,56 @@ void ShenandoahBarrierSetAssembler::resolve(MacroAssembler* masm, DecoratorSet d
void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val, void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
bool acquire, bool release, bool weak, bool is_cae, bool acquire, bool release, bool weak, bool is_cae,
Register result) { Register result) {
Register tmp1 = rscratch1;
Register tmp = rscratch2; Register tmp2 = rscratch2;
bool is_narrow = UseCompressedOops; bool is_narrow = UseCompressedOops;
Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword; Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword;
assert_different_registers(addr, expected, new_val, result, tmp); assert_different_registers(addr, expected, new_val, tmp1, tmp2);
Label retry, done, fail; Label retry, done, fail;
// CAS, using LL/SC pair. // CAS, using LL/SC pair.
__ bind(retry); __ bind(retry);
__ load_exclusive(result, addr, size, acquire); __ load_exclusive(tmp1, addr, size, acquire);
if (is_narrow) { if (is_narrow) {
__ cmpw(result, expected); __ cmpw(tmp1, expected);
} else { } else {
__ cmp(result, expected); __ cmp(tmp1, expected);
} }
__ br(Assembler::NE, fail); __ br(Assembler::NE, fail);
__ store_exclusive(tmp, new_val, addr, size, release); __ store_exclusive(tmp2, new_val, addr, size, release);
if (weak) { if (weak) {
__ cmpw(tmp, 0u); // If the store fails, return NE to our caller __ cmpw(tmp2, 0u); // If the store fails, return NE to our caller
} else { } else {
__ cbnzw(tmp, retry); __ cbnzw(tmp2, retry);
} }
__ b(done); __ b(done);
__ bind(fail); __ bind(fail);
// Check if rb(expected)==rb(result) // Check if rb(expected)==rb(tmp1)
// Shuffle registers so that we have memory value ready for next expected. // Shuffle registers so that we have memory value ready for next expected.
__ mov(tmp, expected); __ mov(tmp2, expected);
__ mov(expected, result); __ mov(expected, tmp1);
if (is_narrow) { if (is_narrow) {
__ decode_heap_oop(result, result); __ decode_heap_oop(tmp1, tmp1);
__ decode_heap_oop(tmp, tmp); __ decode_heap_oop(tmp2, tmp2);
} }
read_barrier_impl(masm, result); read_barrier_impl(masm, tmp1);
read_barrier_impl(masm, tmp); read_barrier_impl(masm, tmp2);
__ cmp(result, tmp); __ cmp(tmp1, tmp2);
// Retry with expected now being the value we just loaded from addr. // Retry with expected now being the value we just loaded from addr.
__ br(Assembler::EQ, retry); __ br(Assembler::EQ, retry);
if (is_cae && is_narrow) { if (is_cae && is_narrow) {
// For cmp-and-exchange and narrow oops, we need to restore // For cmp-and-exchange and narrow oops, we need to restore
// the compressed old-value. We moved it to 'expected' a few lines up. // the compressed old-value. We moved it to 'expected' a few lines up.
__ mov(result, expected); __ mov(tmp1, expected);
} }
__ bind(done); __ bind(done);
if (!is_cae) { if (is_cae) {
__ mov(result, tmp1);
} else {
__ cset(result, Assembler::EQ); __ cset(result, Assembler::EQ);
} }
} }