8256999: Add C2 intrinsic for Reference.refersTo and PhantomReference::refersTo
Reviewed-by: pliden, vlivanov, rkennke, eosterlund, shade
This commit is contained in:
parent
c5d95071df
commit
816e8f83b8
@ -30,8 +30,11 @@ source_hpp %{
|
||||
|
||||
source %{
|
||||
|
||||
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
|
||||
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
|
||||
if (barrier_data == ZLoadBarrierElided) {
|
||||
return;
|
||||
}
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
|
||||
__ ldr(tmp, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||||
__ andr(tmp, tmp, ref);
|
||||
__ cbnz(tmp, *stub->entry());
|
||||
@ -39,7 +42,7 @@ static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address
|
||||
}
|
||||
|
||||
static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
|
||||
__ b(*stub->entry());
|
||||
__ bind(*stub->continuation());
|
||||
}
|
||||
@ -50,7 +53,7 @@ static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node
|
||||
instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
|
||||
%{
|
||||
match(Set dst (LoadP mem));
|
||||
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierStrong));
|
||||
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() != 0));
|
||||
effect(TEMP dst, KILL cr);
|
||||
|
||||
ins_cost(4 * INSN_COST);
|
||||
@ -60,29 +63,7 @@ instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
|
||||
ins_encode %{
|
||||
const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
|
||||
__ ldr($dst$$Register, ref_addr);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, false /* weak */);
|
||||
}
|
||||
%}
|
||||
|
||||
ins_pipe(iload_reg_mem);
|
||||
%}
|
||||
|
||||
// Load Weak Pointer
|
||||
instruct zLoadWeakP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
|
||||
%{
|
||||
match(Set dst (LoadP mem));
|
||||
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierWeak));
|
||||
effect(TEMP dst, KILL cr);
|
||||
|
||||
ins_cost(4 * INSN_COST);
|
||||
|
||||
format %{ "ldr $dst, $mem" %}
|
||||
|
||||
ins_encode %{
|
||||
const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
|
||||
__ ldr($dst$$Register, ref_addr);
|
||||
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, true /* weak */);
|
||||
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, barrier_data());
|
||||
%}
|
||||
|
||||
ins_pipe(iload_reg_mem);
|
||||
@ -92,7 +73,7 @@ instruct zLoadWeakP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
|
||||
instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg cr)
|
||||
%{
|
||||
match(Set dst (LoadP mem));
|
||||
predicate(UseZGC && needs_acquiring_load(n) && n->as_Load()->barrier_data() == ZLoadBarrierStrong);
|
||||
predicate(UseZGC && needs_acquiring_load(n) && n->as_Load()->barrier_data() != 0);
|
||||
effect(TEMP dst, KILL cr);
|
||||
|
||||
ins_cost(VOLATILE_REF_COST);
|
||||
@ -101,9 +82,7 @@ instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg
|
||||
|
||||
ins_encode %{
|
||||
__ ldar($dst$$Register, $mem$$Register);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, Address($mem$$Register), $dst$$Register, rscratch2 /* tmp */, false /* weak */);
|
||||
}
|
||||
z_load_barrier(_masm, this, Address($mem$$Register), $dst$$Register, rscratch2 /* tmp */, barrier_data());
|
||||
%}
|
||||
|
||||
ins_pipe(pipe_serial);
|
||||
@ -231,7 +210,7 @@ instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iReg
|
||||
|
||||
instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
|
||||
match(Set prev (GetAndSetP mem newv));
|
||||
predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
|
||||
predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() != 0);
|
||||
effect(TEMP_DEF prev, KILL cr);
|
||||
|
||||
ins_cost(2 * VOLATILE_REF_COST);
|
||||
@ -240,9 +219,7 @@ instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
|
||||
|
||||
ins_encode %{
|
||||
__ atomic_xchg($prev$$Register, $newv$$Register, $mem$$Register);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, false /* weak */);
|
||||
}
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
|
||||
%}
|
||||
|
||||
ins_pipe(pipe_serial);
|
||||
@ -250,7 +227,7 @@ instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
|
||||
|
||||
instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
|
||||
match(Set prev (GetAndSetP mem newv));
|
||||
predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong));
|
||||
predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() != 0));
|
||||
effect(TEMP_DEF prev, KILL cr);
|
||||
|
||||
ins_cost(VOLATILE_REF_COST);
|
||||
@ -259,9 +236,7 @@ instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr)
|
||||
|
||||
ins_encode %{
|
||||
__ atomic_xchgal($prev$$Register, $newv$$Register, $mem$$Register);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, false /* weak */);
|
||||
}
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
|
||||
%}
|
||||
ins_pipe(pipe_serial);
|
||||
%}
|
||||
|
@ -32,8 +32,11 @@ source %{
|
||||
|
||||
#include "c2_intelJccErratum_x86.hpp"
|
||||
|
||||
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
|
||||
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
|
||||
if (barrier_data == ZLoadBarrierElided) {
|
||||
return; // Elided.
|
||||
}
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
|
||||
{
|
||||
IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */);
|
||||
__ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
|
||||
@ -43,7 +46,7 @@ static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address
|
||||
}
|
||||
|
||||
static void z_load_barrier_cmpxchg(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, Label& good) {
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
|
||||
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
|
||||
{
|
||||
IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */);
|
||||
__ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
|
||||
@ -61,7 +64,7 @@ static void z_load_barrier_cmpxchg(MacroAssembler& _masm, const MachNode* node,
|
||||
// Load Pointer
|
||||
instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr)
|
||||
%{
|
||||
predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierStrong);
|
||||
predicate(UseZGC && n->as_Load()->barrier_data() != 0);
|
||||
match(Set dst (LoadP mem));
|
||||
effect(KILL cr, TEMP dst);
|
||||
|
||||
@ -71,28 +74,7 @@ instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr)
|
||||
|
||||
ins_encode %{
|
||||
__ movptr($dst$$Register, $mem$$Address);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, false /* weak */);
|
||||
}
|
||||
%}
|
||||
|
||||
ins_pipe(ialu_reg_mem);
|
||||
%}
|
||||
|
||||
// Load Weak Pointer
|
||||
instruct zLoadWeakP(rRegP dst, memory mem, rFlagsReg cr)
|
||||
%{
|
||||
predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierWeak);
|
||||
match(Set dst (LoadP mem));
|
||||
effect(KILL cr, TEMP dst);
|
||||
|
||||
ins_cost(125);
|
||||
|
||||
format %{ "movq $dst, $mem" %}
|
||||
|
||||
ins_encode %{
|
||||
__ movptr($dst$$Register, $mem$$Address);
|
||||
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, true /* weak */);
|
||||
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, barrier_data());
|
||||
%}
|
||||
|
||||
ins_pipe(ialu_reg_mem);
|
||||
@ -107,11 +89,12 @@ instruct zCompareAndExchangeP(memory mem, rax_RegP oldval, rRegP newval, rRegP t
|
||||
"cmpxchgq $newval, $mem" %}
|
||||
|
||||
ins_encode %{
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers()
|
||||
__ movptr($tmp$$Register, $oldval$$Register);
|
||||
}
|
||||
__ lock();
|
||||
__ cmpxchgptr($newval$$Register, $mem$$Address);
|
||||
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
Label good;
|
||||
z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good);
|
||||
@ -137,11 +120,12 @@ instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlags
|
||||
"movzbl $res, $res" %}
|
||||
|
||||
ins_encode %{
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers()
|
||||
__ movptr($tmp$$Register, $oldval$$Register);
|
||||
}
|
||||
__ lock();
|
||||
__ cmpxchgptr($newval$$Register, $mem$$Address);
|
||||
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
Label good;
|
||||
z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good);
|
||||
@ -160,16 +144,14 @@ instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlags
|
||||
|
||||
instruct zXChgP(memory mem, rRegP newval, rFlagsReg cr) %{
|
||||
match(Set newval (GetAndSetP mem newval));
|
||||
predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
|
||||
predicate(UseZGC && n->as_LoadStore()->barrier_data() != 0);
|
||||
effect(KILL cr);
|
||||
|
||||
format %{ "xchgq $newval, $mem" %}
|
||||
|
||||
ins_encode %{
|
||||
__ xchgptr($newval$$Register, $mem$$Address);
|
||||
if (barrier_data() != ZLoadBarrierElided) {
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, false /* weak */);
|
||||
}
|
||||
z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, barrier_data());
|
||||
%}
|
||||
|
||||
ins_pipe(pipe_cmpxchg);
|
||||
|
@ -390,6 +390,8 @@ class methodHandle;
|
||||
\
|
||||
/* java/lang/ref/Reference */ \
|
||||
do_intrinsic(_Reference_get, java_lang_ref_Reference, get_name, void_object_signature, F_R) \
|
||||
do_intrinsic(_Reference_refersTo0, java_lang_ref_Reference, refersTo0_name, object_boolean_signature, F_R) \
|
||||
do_intrinsic(_PhantomReference_refersTo0, java_lang_ref_PhantomReference, refersTo0_name, object_boolean_signature, F_R) \
|
||||
\
|
||||
/* support for com.sun.crypto.provider.AESCrypt and some of its callers */ \
|
||||
do_class(com_sun_crypto_provider_aescrypt, "com/sun/crypto/provider/AESCrypt") \
|
||||
|
@ -394,6 +394,7 @@
|
||||
template(dispatchUncaughtException_name, "dispatchUncaughtException") \
|
||||
template(loadClass_name, "loadClass") \
|
||||
template(get_name, "get") \
|
||||
template(refersTo0_name, "refersTo0") \
|
||||
template(put_name, "put") \
|
||||
template(type_name, "type") \
|
||||
template(findNative_name, "findNative") \
|
||||
|
@ -604,7 +604,9 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
|
||||
bool in_heap = (decorators & IN_HEAP) != 0;
|
||||
bool in_native = (decorators & IN_NATIVE) != 0;
|
||||
bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
|
||||
bool on_phantom = (decorators & ON_PHANTOM_OOP_REF) != 0;
|
||||
bool is_unordered = (decorators & MO_UNORDERED) != 0;
|
||||
bool no_keepalive = (decorators & AS_NO_KEEPALIVE) != 0;
|
||||
bool is_mixed = !in_heap && !in_native;
|
||||
bool need_cpu_mem_bar = !is_unordered || mismatched || is_mixed;
|
||||
|
||||
@ -618,8 +620,8 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
|
||||
// SATB log buffer using the pre-barrier mechanism.
|
||||
// Also we need to add memory barrier to prevent commoning reads
|
||||
// from this field across safepoint since GC can change its value.
|
||||
bool need_read_barrier = in_heap && (on_weak ||
|
||||
(unknown && offset != top && obj != top));
|
||||
bool need_read_barrier = (((on_weak || on_phantom) && !no_keepalive) ||
|
||||
(in_heap && unknown && offset != top && obj != top));
|
||||
|
||||
if (!access.is_oop() || !need_read_barrier) {
|
||||
return load;
|
||||
@ -629,7 +631,7 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
|
||||
C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
|
||||
GraphKit* kit = parse_access.kit();
|
||||
|
||||
if (on_weak) {
|
||||
if (on_weak || on_phantom) {
|
||||
// Use the pre-barrier to record the value in the referent field
|
||||
pre_barrier(kit, false /* do_load */,
|
||||
kit->control(),
|
||||
|
@ -996,9 +996,13 @@ void ShenandoahBarrierC2Support::call_lrb_stub(Node*& ctrl, Node*& val, Node* lo
|
||||
}
|
||||
} else {
|
||||
assert(is_phantom, "only remaining strength");
|
||||
assert(!is_narrow, "phantom access cannot be narrow");
|
||||
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
|
||||
name = "load_reference_barrier_phantom";
|
||||
if (is_narrow) {
|
||||
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom_narrow);
|
||||
name = "load_reference_barrier_phantom_narrow";
|
||||
} else {
|
||||
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
|
||||
name = "load_reference_barrier_phantom";
|
||||
}
|
||||
}
|
||||
Node* call = new CallLeafNode(ShenandoahBarrierSetC2::shenandoah_load_reference_barrier_Type(), calladdr, name, TypeRawPtr::BOTTOM);
|
||||
|
||||
|
@ -77,3 +77,7 @@ JRT_END
|
||||
JRT_LEAF(oopDesc*, ShenandoahRuntime::load_reference_barrier_phantom(oopDesc * src, oop* load_addr))
|
||||
return (oopDesc*) ShenandoahBarrierSet::barrier_set()->load_reference_barrier<ON_PHANTOM_OOP_REF, oop>(oop(src), load_addr);
|
||||
JRT_END
|
||||
|
||||
JRT_LEAF(oopDesc*, ShenandoahRuntime::load_reference_barrier_phantom_narrow(oopDesc * src, narrowOop* load_addr))
|
||||
return (oopDesc*) ShenandoahBarrierSet::barrier_set()->load_reference_barrier<ON_PHANTOM_OOP_REF, narrowOop>(oop(src), load_addr);
|
||||
JRT_END
|
||||
|
@ -45,6 +45,7 @@ public:
|
||||
static oopDesc* load_reference_barrier_weak_narrow(oopDesc* src, narrowOop* load_addr);
|
||||
|
||||
static oopDesc* load_reference_barrier_phantom(oopDesc* src, oop* load_addr);
|
||||
static oopDesc* load_reference_barrier_phantom_narrow(oopDesc* src, narrowOop* load_addr);
|
||||
|
||||
static void shenandoah_clone_barrier(oopDesc* src);
|
||||
};
|
||||
|
@ -64,8 +64,7 @@ public:
|
||||
}
|
||||
|
||||
const MachNode* const mach = node->as_Mach();
|
||||
if (mach->barrier_data() != ZLoadBarrierStrong &&
|
||||
mach->barrier_data() != ZLoadBarrierWeak) {
|
||||
if (mach->barrier_data() == ZLoadBarrierElided) {
|
||||
// Don't need liveness data for nodes without barriers
|
||||
return NULL;
|
||||
}
|
||||
@ -84,8 +83,8 @@ static ZBarrierSetC2State* barrier_set_state() {
|
||||
return reinterpret_cast<ZBarrierSetC2State*>(Compile::current()->barrier_set_state());
|
||||
}
|
||||
|
||||
ZLoadBarrierStubC2* ZLoadBarrierStubC2::create(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
|
||||
ZLoadBarrierStubC2* const stub = new (Compile::current()->comp_arena()) ZLoadBarrierStubC2(node, ref_addr, ref, tmp, weak);
|
||||
ZLoadBarrierStubC2* ZLoadBarrierStubC2::create(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
|
||||
ZLoadBarrierStubC2* const stub = new (Compile::current()->comp_arena()) ZLoadBarrierStubC2(node, ref_addr, ref, tmp, barrier_data);
|
||||
if (!Compile::current()->output()->in_scratch_emit_size()) {
|
||||
barrier_set_state()->stubs()->append(stub);
|
||||
}
|
||||
@ -93,12 +92,12 @@ ZLoadBarrierStubC2* ZLoadBarrierStubC2::create(const MachNode* node, Address ref
|
||||
return stub;
|
||||
}
|
||||
|
||||
ZLoadBarrierStubC2::ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) :
|
||||
ZLoadBarrierStubC2::ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) :
|
||||
_node(node),
|
||||
_ref_addr(ref_addr),
|
||||
_ref(ref),
|
||||
_tmp(tmp),
|
||||
_weak(weak),
|
||||
_barrier_data(barrier_data),
|
||||
_entry(),
|
||||
_continuation() {
|
||||
assert_different_registers(ref, ref_addr.base());
|
||||
@ -118,7 +117,19 @@ Register ZLoadBarrierStubC2::tmp() const {
|
||||
}
|
||||
|
||||
address ZLoadBarrierStubC2::slow_path() const {
|
||||
const DecoratorSet decorators = _weak ? ON_WEAK_OOP_REF : ON_STRONG_OOP_REF;
|
||||
DecoratorSet decorators = DECORATORS_NONE;
|
||||
if (_barrier_data & ZLoadBarrierStrong) {
|
||||
decorators |= ON_STRONG_OOP_REF;
|
||||
}
|
||||
if (_barrier_data & ZLoadBarrierWeak) {
|
||||
decorators |= ON_WEAK_OOP_REF;
|
||||
}
|
||||
if (_barrier_data & ZLoadBarrierPhantom) {
|
||||
decorators |= ON_PHANTOM_OOP_REF;
|
||||
}
|
||||
if (_barrier_data & ZLoadBarrierNoKeepalive) {
|
||||
decorators |= AS_NO_KEEPALIVE;
|
||||
}
|
||||
return ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators);
|
||||
}
|
||||
|
||||
@ -318,10 +329,18 @@ void ZBarrierSetC2::analyze_dominating_barriers() const {
|
||||
MachNode* const mach = node->as_Mach();
|
||||
switch (mach->ideal_Opcode()) {
|
||||
case Op_LoadP:
|
||||
if ((mach->barrier_data() & ZLoadBarrierStrong) != 0) {
|
||||
barrier_loads.push(mach);
|
||||
}
|
||||
if ((mach->barrier_data() & (ZLoadBarrierStrong | ZLoadBarrierNoKeepalive)) ==
|
||||
ZLoadBarrierStrong) {
|
||||
mem_ops.push(mach);
|
||||
}
|
||||
break;
|
||||
case Op_CompareAndExchangeP:
|
||||
case Op_CompareAndSwapP:
|
||||
case Op_GetAndSetP:
|
||||
if (mach->barrier_data() == ZLoadBarrierStrong) {
|
||||
if ((mach->barrier_data() & ZLoadBarrierStrong) != 0) {
|
||||
barrier_loads.push(mach);
|
||||
}
|
||||
case Op_StoreP:
|
||||
|
@ -29,9 +29,11 @@
|
||||
#include "opto/node.hpp"
|
||||
#include "utilities/growableArray.hpp"
|
||||
|
||||
const uint8_t ZLoadBarrierStrong = 1;
|
||||
const uint8_t ZLoadBarrierWeak = 2;
|
||||
const uint8_t ZLoadBarrierElided = 3;
|
||||
const uint8_t ZLoadBarrierElided = 0;
|
||||
const uint8_t ZLoadBarrierStrong = 1;
|
||||
const uint8_t ZLoadBarrierWeak = 2;
|
||||
const uint8_t ZLoadBarrierPhantom = 4;
|
||||
const uint8_t ZLoadBarrierNoKeepalive = 8;
|
||||
|
||||
class ZLoadBarrierStubC2 : public ResourceObj {
|
||||
private:
|
||||
@ -39,14 +41,14 @@ private:
|
||||
const Address _ref_addr;
|
||||
const Register _ref;
|
||||
const Register _tmp;
|
||||
const bool _weak;
|
||||
const int _barrier_data;
|
||||
Label _entry;
|
||||
Label _continuation;
|
||||
|
||||
ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak);
|
||||
ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data);
|
||||
|
||||
public:
|
||||
static ZLoadBarrierStubC2* create(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak);
|
||||
static ZLoadBarrierStubC2* create(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data);
|
||||
|
||||
Address ref_addr() const;
|
||||
Register ref() const;
|
||||
|
@ -55,7 +55,6 @@ ZBarrierSetAssembler* ZBarrierSet::assembler() {
|
||||
|
||||
bool ZBarrierSet::barrier_needed(DecoratorSet decorators, BasicType type) {
|
||||
assert((decorators & AS_RAW) == 0, "Unexpected decorator");
|
||||
assert((decorators & AS_NO_KEEPALIVE) == 0, "Unexpected decorator");
|
||||
//assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Unexpected decorator");
|
||||
|
||||
if (is_reference_type(type)) {
|
||||
|
@ -30,6 +30,18 @@ JRT_LEAF(oopDesc*, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded(oopDe
|
||||
return ZBarrier::load_barrier_on_oop_field_preloaded(p, o);
|
||||
JRT_END
|
||||
|
||||
JRT_LEAF(oopDesc*, ZBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded(oopDesc* o, oop* p))
|
||||
return ZBarrier::weak_load_barrier_on_oop_field_preloaded(p, o);
|
||||
JRT_END
|
||||
|
||||
JRT_LEAF(oopDesc*, ZBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded(oopDesc* o, oop* p))
|
||||
return ZBarrier::weak_load_barrier_on_weak_oop_field_preloaded(p, o);
|
||||
JRT_END
|
||||
|
||||
JRT_LEAF(oopDesc*, ZBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded(oopDesc* o, oop* p))
|
||||
return ZBarrier::weak_load_barrier_on_phantom_oop_field_preloaded(p, o);
|
||||
JRT_END
|
||||
|
||||
JRT_LEAF(oopDesc*, ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded(oopDesc* o, oop* p))
|
||||
return ZBarrier::load_barrier_on_weak_oop_field_preloaded(p, o);
|
||||
JRT_END
|
||||
@ -48,11 +60,23 @@ JRT_END
|
||||
|
||||
address ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(DecoratorSet decorators) {
|
||||
if (decorators & ON_PHANTOM_OOP_REF) {
|
||||
return load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
if (decorators & AS_NO_KEEPALIVE) {
|
||||
return weak_load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
} else {
|
||||
return load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
}
|
||||
} else if (decorators & ON_WEAK_OOP_REF) {
|
||||
return load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
if (decorators & AS_NO_KEEPALIVE) {
|
||||
return weak_load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
} else {
|
||||
return load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
}
|
||||
} else {
|
||||
return load_barrier_on_oop_field_preloaded_addr();
|
||||
if (decorators & AS_NO_KEEPALIVE) {
|
||||
return weak_load_barrier_on_oop_field_preloaded_addr();
|
||||
} else {
|
||||
return load_barrier_on_oop_field_preloaded_addr();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -68,6 +92,18 @@ address ZBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded_addr() {
|
||||
return reinterpret_cast<address>(load_barrier_on_phantom_oop_field_preloaded);
|
||||
}
|
||||
|
||||
address ZBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded_addr() {
|
||||
return reinterpret_cast<address>(weak_load_barrier_on_oop_field_preloaded);
|
||||
}
|
||||
|
||||
address ZBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded_addr() {
|
||||
return reinterpret_cast<address>(weak_load_barrier_on_weak_oop_field_preloaded);
|
||||
}
|
||||
|
||||
address ZBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded_addr() {
|
||||
return reinterpret_cast<address>(weak_load_barrier_on_phantom_oop_field_preloaded);
|
||||
}
|
||||
|
||||
address ZBarrierSetRuntime::load_barrier_on_oop_array_addr() {
|
||||
return reinterpret_cast<address>(load_barrier_on_oop_array);
|
||||
}
|
||||
|
@ -35,6 +35,9 @@ private:
|
||||
static oopDesc* load_barrier_on_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static oopDesc* load_barrier_on_weak_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static oopDesc* load_barrier_on_phantom_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static oopDesc* weak_load_barrier_on_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static oopDesc* weak_load_barrier_on_weak_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static oopDesc* weak_load_barrier_on_phantom_oop_field_preloaded(oopDesc* o, oop* p);
|
||||
static void load_barrier_on_oop_array(oop* p, size_t length);
|
||||
static void clone(oopDesc* src, oopDesc* dst, size_t size);
|
||||
|
||||
@ -43,6 +46,9 @@ public:
|
||||
static address load_barrier_on_oop_field_preloaded_addr();
|
||||
static address load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
static address load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
static address weak_load_barrier_on_oop_field_preloaded_addr();
|
||||
static address weak_load_barrier_on_weak_oop_field_preloaded_addr();
|
||||
static address weak_load_barrier_on_phantom_oop_field_preloaded_addr();
|
||||
static address load_barrier_on_oop_array_addr();
|
||||
static address clone_addr();
|
||||
};
|
||||
|
@ -614,6 +614,8 @@ bool C2Compiler::is_intrinsic_supported(const methodHandle& method, bool is_virt
|
||||
case vmIntrinsics::_doubleToLongBits:
|
||||
case vmIntrinsics::_longBitsToDouble:
|
||||
case vmIntrinsics::_Reference_get:
|
||||
case vmIntrinsics::_Reference_refersTo0:
|
||||
case vmIntrinsics::_PhantomReference_refersTo0:
|
||||
case vmIntrinsics::_Class_cast:
|
||||
case vmIntrinsics::_aescrypt_encryptBlock:
|
||||
case vmIntrinsics::_aescrypt_decryptBlock:
|
||||
|
@ -522,6 +522,8 @@ bool LibraryCallKit::try_to_inline(int predicate) {
|
||||
case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass();
|
||||
|
||||
case vmIntrinsics::_Reference_get: return inline_reference_get();
|
||||
case vmIntrinsics::_Reference_refersTo0: return inline_reference_refersTo0(false);
|
||||
case vmIntrinsics::_PhantomReference_refersTo0: return inline_reference_refersTo0(true);
|
||||
|
||||
case vmIntrinsics::_Class_cast: return inline_Class_cast();
|
||||
|
||||
@ -5273,7 +5275,7 @@ bool LibraryCallKit::inline_updateByteBufferCRC32() {
|
||||
|
||||
//------------------------------get_table_from_crc32c_class-----------------------
|
||||
Node * LibraryCallKit::get_table_from_crc32c_class(ciInstanceKlass *crc32c_class) {
|
||||
Node* table = load_field_from_object(NULL, "byteTable", "[I", /*is_exact*/ false, /*is_static*/ true, crc32c_class);
|
||||
Node* table = load_field_from_object(NULL, "byteTable", "[I", /*decorators*/ IN_HEAP, /*is_static*/ true, crc32c_class);
|
||||
assert (table != NULL, "wrong version of java.util.zip.CRC32C");
|
||||
|
||||
return table;
|
||||
@ -5464,23 +5466,11 @@ bool LibraryCallKit::inline_reference_get() {
|
||||
Node* reference_obj = null_check_receiver();
|
||||
if (stopped()) return true;
|
||||
|
||||
const TypeInstPtr* tinst = _gvn.type(reference_obj)->isa_instptr();
|
||||
assert(tinst != NULL, "obj is null");
|
||||
assert(tinst->klass()->is_loaded(), "obj is not loaded");
|
||||
ciInstanceKlass* referenceKlass = tinst->klass()->as_instance_klass();
|
||||
ciField* field = referenceKlass->get_field_by_name(ciSymbol::make("referent"),
|
||||
ciSymbol::make("Ljava/lang/Object;"),
|
||||
false);
|
||||
assert (field != NULL, "undefined field");
|
||||
|
||||
Node* adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
|
||||
const TypePtr* adr_type = C->alias_type(field)->adr_type();
|
||||
|
||||
ciInstanceKlass* klass = env()->Object_klass();
|
||||
const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
|
||||
|
||||
DecoratorSet decorators = IN_HEAP | ON_WEAK_OOP_REF;
|
||||
Node* result = access_load_at(reference_obj, adr, adr_type, object_type, T_OBJECT, decorators);
|
||||
Node* result = load_field_from_object(reference_obj, "referent", "Ljava/lang/Object;",
|
||||
decorators, /*is_static*/ false, NULL);
|
||||
if (result == NULL) return false;
|
||||
|
||||
// Add memory barrier to prevent commoning reads from this field
|
||||
// across safepoint since GC can change its value.
|
||||
insert_mem_bar(Op_MemBarCPUOrder);
|
||||
@ -5489,15 +5479,54 @@ bool LibraryCallKit::inline_reference_get() {
|
||||
return true;
|
||||
}
|
||||
|
||||
//----------------------------inline_reference_refersTo0----------------------------
|
||||
// bool java.lang.ref.Reference.refersTo0();
|
||||
// bool java.lang.ref.PhantomReference.refersTo0();
|
||||
bool LibraryCallKit::inline_reference_refersTo0(bool is_phantom) {
|
||||
// Get arguments:
|
||||
Node* reference_obj = null_check_receiver();
|
||||
Node* other_obj = argument(1);
|
||||
if (stopped()) return true;
|
||||
|
||||
Node * LibraryCallKit::load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString,
|
||||
bool is_exact=true, bool is_static=false,
|
||||
ciInstanceKlass * fromKls=NULL) {
|
||||
DecoratorSet decorators = IN_HEAP | AS_NO_KEEPALIVE;
|
||||
decorators |= (is_phantom ? ON_PHANTOM_OOP_REF : ON_WEAK_OOP_REF);
|
||||
Node* referent = load_field_from_object(reference_obj, "referent", "Ljava/lang/Object;",
|
||||
decorators, /*is_static*/ false, NULL);
|
||||
if (referent == NULL) return false;
|
||||
|
||||
// Add memory barrier to prevent commoning reads from this field
|
||||
// across safepoint since GC can change its value.
|
||||
insert_mem_bar(Op_MemBarCPUOrder);
|
||||
|
||||
Node* cmp = _gvn.transform(new CmpPNode(referent, other_obj));
|
||||
Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
|
||||
IfNode* if_node = create_and_map_if(control(), bol, PROB_FAIR, COUNT_UNKNOWN);
|
||||
|
||||
RegionNode* region = new RegionNode(3);
|
||||
PhiNode* phi = new PhiNode(region, TypeInt::BOOL);
|
||||
|
||||
Node* if_true = _gvn.transform(new IfTrueNode(if_node));
|
||||
region->init_req(1, if_true);
|
||||
phi->init_req(1, intcon(1));
|
||||
|
||||
Node* if_false = _gvn.transform(new IfFalseNode(if_node));
|
||||
region->init_req(2, if_false);
|
||||
phi->init_req(2, intcon(0));
|
||||
|
||||
set_control(_gvn.transform(region));
|
||||
record_for_igvn(region);
|
||||
set_result(_gvn.transform(phi));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
Node* LibraryCallKit::load_field_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString,
|
||||
DecoratorSet decorators = IN_HEAP, bool is_static = false,
|
||||
ciInstanceKlass* fromKls = NULL) {
|
||||
if (fromKls == NULL) {
|
||||
const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
|
||||
assert(tinst != NULL, "obj is null");
|
||||
assert(tinst->klass()->is_loaded(), "obj is not loaded");
|
||||
assert(!is_exact || tinst->klass_is_exact(), "klass not exact");
|
||||
fromKls = tinst->klass()->as_instance_klass();
|
||||
} else {
|
||||
assert(is_static, "only for static field access");
|
||||
@ -5533,8 +5562,6 @@ Node * LibraryCallKit::load_field_from_object(Node * fromObj, const char * field
|
||||
type = Type::get_const_basic_type(bt);
|
||||
}
|
||||
|
||||
DecoratorSet decorators = IN_HEAP;
|
||||
|
||||
if (is_vol) {
|
||||
decorators |= MO_SEQ_CST;
|
||||
}
|
||||
@ -5688,7 +5715,7 @@ bool LibraryCallKit::inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id) {
|
||||
// so we cast it here safely.
|
||||
// this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
|
||||
|
||||
Node* embeddedCipherObj = load_field_from_object(cipherBlockChaining_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(cipherBlockChaining_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
if (embeddedCipherObj == NULL) return false;
|
||||
|
||||
// cast it to what we know it will be at runtime
|
||||
@ -5709,7 +5736,7 @@ bool LibraryCallKit::inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id) {
|
||||
if (k_start == NULL) return false;
|
||||
|
||||
// similarly, get the start address of the r vector
|
||||
Node* objRvec = load_field_from_object(cipherBlockChaining_object, "r", "[B", /*is_exact*/ false);
|
||||
Node* objRvec = load_field_from_object(cipherBlockChaining_object, "r", "[B");
|
||||
if (objRvec == NULL) return false;
|
||||
Node* r_start = array_element_address(objRvec, intcon(0), T_BYTE);
|
||||
|
||||
@ -5776,7 +5803,7 @@ bool LibraryCallKit::inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id) {
|
||||
// so we cast it here safely.
|
||||
// this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
|
||||
|
||||
Node* embeddedCipherObj = load_field_from_object(electronicCodeBook_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(electronicCodeBook_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
if (embeddedCipherObj == NULL) return false;
|
||||
|
||||
// cast it to what we know it will be at runtime
|
||||
@ -5849,7 +5876,7 @@ bool LibraryCallKit::inline_counterMode_AESCrypt(vmIntrinsics::ID id) {
|
||||
// (because of the predicated logic executed earlier).
|
||||
// so we cast it here safely.
|
||||
// this requires a newer class file that has this array as littleEndian ints, otherwise we revert to java
|
||||
Node* embeddedCipherObj = load_field_from_object(counterMode_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(counterMode_object, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
if (embeddedCipherObj == NULL) return false;
|
||||
// cast it to what we know it will be at runtime
|
||||
const TypeInstPtr* tinst = _gvn.type(counterMode_object)->isa_instptr();
|
||||
@ -5866,11 +5893,11 @@ bool LibraryCallKit::inline_counterMode_AESCrypt(vmIntrinsics::ID id) {
|
||||
Node* k_start = get_key_start_from_aescrypt_object(aescrypt_object);
|
||||
if (k_start == NULL) return false;
|
||||
// similarly, get the start address of the r vector
|
||||
Node* obj_counter = load_field_from_object(counterMode_object, "counter", "[B", /*is_exact*/ false);
|
||||
Node* obj_counter = load_field_from_object(counterMode_object, "counter", "[B");
|
||||
if (obj_counter == NULL) return false;
|
||||
Node* cnt_start = array_element_address(obj_counter, intcon(0), T_BYTE);
|
||||
|
||||
Node* saved_encCounter = load_field_from_object(counterMode_object, "encryptedCounter", "[B", /*is_exact*/ false);
|
||||
Node* saved_encCounter = load_field_from_object(counterMode_object, "encryptedCounter", "[B");
|
||||
if (saved_encCounter == NULL) return false;
|
||||
Node* saved_encCounter_start = array_element_address(saved_encCounter, intcon(0), T_BYTE);
|
||||
Node* used = field_address_from_object(counterMode_object, "used", "I", /*is_exact*/ false);
|
||||
@ -5894,14 +5921,14 @@ Node * LibraryCallKit::get_key_start_from_aescrypt_object(Node *aescrypt_object)
|
||||
// Intel's extention is based on this optimization and AESCrypt generates round keys by preprocessing MixColumns.
|
||||
// However, ppc64 vncipher processes MixColumns and requires the same round keys with encryption.
|
||||
// The ppc64 stubs of encryption and decryption use the same round keys (sessionK[0]).
|
||||
Node* objSessionK = load_field_from_object(aescrypt_object, "sessionK", "[[I", /*is_exact*/ false);
|
||||
Node* objSessionK = load_field_from_object(aescrypt_object, "sessionK", "[[I");
|
||||
assert (objSessionK != NULL, "wrong version of com.sun.crypto.provider.AESCrypt");
|
||||
if (objSessionK == NULL) {
|
||||
return (Node *) NULL;
|
||||
}
|
||||
Node* objAESCryptKey = load_array_element(control(), objSessionK, intcon(0), TypeAryPtr::OOPS);
|
||||
#else
|
||||
Node* objAESCryptKey = load_field_from_object(aescrypt_object, "K", "[I", /*is_exact*/ false);
|
||||
Node* objAESCryptKey = load_field_from_object(aescrypt_object, "K", "[I");
|
||||
#endif // PPC64
|
||||
assert (objAESCryptKey != NULL, "wrong version of com.sun.crypto.provider.AESCrypt");
|
||||
if (objAESCryptKey == NULL) return (Node *) NULL;
|
||||
@ -5928,7 +5955,7 @@ Node* LibraryCallKit::inline_cipherBlockChaining_AESCrypt_predicate(bool decrypt
|
||||
Node* dest = argument(4);
|
||||
|
||||
// Load embeddedCipher field of CipherBlockChaining object.
|
||||
Node* embeddedCipherObj = load_field_from_object(objCBC, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(objCBC, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
|
||||
// get AESCrypt klass for instanceOf check
|
||||
// AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
|
||||
@ -5991,7 +6018,7 @@ Node* LibraryCallKit::inline_electronicCodeBook_AESCrypt_predicate(bool decrypti
|
||||
Node* objECB = argument(0);
|
||||
|
||||
// Load embeddedCipher field of ElectronicCodeBook object.
|
||||
Node* embeddedCipherObj = load_field_from_object(objECB, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(objECB, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
|
||||
// get AESCrypt klass for instanceOf check
|
||||
// AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
|
||||
@ -6051,7 +6078,7 @@ Node* LibraryCallKit::inline_counterMode_AESCrypt_predicate() {
|
||||
Node* objCTR = argument(0);
|
||||
|
||||
// Load embeddedCipher field of CipherBlockChaining object.
|
||||
Node* embeddedCipherObj = load_field_from_object(objCTR, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;", /*is_exact*/ false);
|
||||
Node* embeddedCipherObj = load_field_from_object(objCTR, "embeddedCipher", "Lcom/sun/crypto/provider/SymmetricCipher;");
|
||||
|
||||
// get AESCrypt klass for instanceOf check
|
||||
// AESCrypt might not be loaded yet if some other SymmetricCipher got us to this compile point
|
||||
@ -6412,7 +6439,7 @@ bool LibraryCallKit::inline_digestBase_implCompressMB(Node* digestBase_obj, ciIn
|
||||
|
||||
//------------------------------get_state_from_digest_object-----------------------
|
||||
Node * LibraryCallKit::get_state_from_digest_object(Node *digest_object, const char *state_type) {
|
||||
Node* digest_state = load_field_from_object(digest_object, "state", state_type, /*is_exact*/ false);
|
||||
Node* digest_state = load_field_from_object(digest_object, "state", state_type);
|
||||
assert (digest_state != NULL, "wrong version of sun.security.provider.MD5/SHA/SHA2/SHA5/SHA3");
|
||||
if (digest_state == NULL) return (Node *) NULL;
|
||||
|
||||
@ -6423,7 +6450,7 @@ Node * LibraryCallKit::get_state_from_digest_object(Node *digest_object, const c
|
||||
|
||||
//------------------------------get_digest_length_from_sha3_object----------------------------------
|
||||
Node * LibraryCallKit::get_digest_length_from_digest_object(Node *digest_object) {
|
||||
Node* digest_length = load_field_from_object(digest_object, "digestLength", "I", /*is_exact*/ false);
|
||||
Node* digest_length = load_field_from_object(digest_object, "digestLength", "I");
|
||||
assert (digest_length != NULL, "sanity");
|
||||
return digest_length;
|
||||
}
|
||||
|
@ -181,8 +181,8 @@ class LibraryCallKit : public GraphKit {
|
||||
CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
|
||||
return generate_method_call(method_id, true, false);
|
||||
}
|
||||
Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
|
||||
Node * field_address_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
|
||||
Node* load_field_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, DecoratorSet decorators, bool is_static, ciInstanceKlass* fromKls);
|
||||
Node* field_address_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass* fromKls);
|
||||
|
||||
Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
|
||||
bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
|
||||
@ -264,6 +264,7 @@ class LibraryCallKit : public GraphKit {
|
||||
bool inline_fp_conversions(vmIntrinsics::ID id);
|
||||
bool inline_number_methods(vmIntrinsics::ID id);
|
||||
bool inline_reference_get();
|
||||
bool inline_reference_refersTo0(bool is_phantom);
|
||||
bool inline_Class_cast();
|
||||
bool inline_aescrypt_Block(vmIntrinsics::ID id);
|
||||
bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
|
||||
|
@ -215,7 +215,7 @@ public:
|
||||
virtual uint mach_constant_base_node_input() const { return (uint)-1; }
|
||||
|
||||
uint8_t barrier_data() const { return _barrier; }
|
||||
void set_barrier_data(uint data) { _barrier = data; }
|
||||
void set_barrier_data(uint8_t data) { _barrier = data; }
|
||||
|
||||
// Copy inputs and operands to new node of instruction.
|
||||
// Called from cisc_version() and short_branch_version().
|
||||
|
@ -25,6 +25,7 @@
|
||||
|
||||
package java.lang.ref;
|
||||
|
||||
import jdk.internal.vm.annotation.IntrinsicCandidate;
|
||||
|
||||
/**
|
||||
* Phantom reference objects, which are enqueued after the collector
|
||||
@ -66,6 +67,7 @@ public class PhantomReference<T> extends Reference<T> {
|
||||
* do reference processing concurrently.
|
||||
*/
|
||||
@Override
|
||||
@IntrinsicCandidate
|
||||
native final boolean refersTo0(Object o);
|
||||
|
||||
/**
|
||||
|
@ -359,6 +359,7 @@ public abstract class Reference<T> {
|
||||
|
||||
/* Implementation of refersTo(), overridden for phantom references.
|
||||
*/
|
||||
@IntrinsicCandidate
|
||||
native boolean refersTo0(Object o);
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 8256377
|
||||
* @summary Based on test/jdk/java/lang/ref/ReferenceRefersTo.java.
|
||||
*/
|
||||
|
||||
import java.lang.ref.Reference;
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.PhantomReference;
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.lang.ref.WeakReference;
|
||||
|
||||
public class TestReferenceRefersTo {
|
||||
private static final void fail(String msg) throws Exception {
|
||||
throw new RuntimeException(msg);
|
||||
}
|
||||
|
||||
// Test java.lang.ref.Reference::refersTo0 intrinsic.
|
||||
private static final void test(Reference ref,
|
||||
Object expectedValue,
|
||||
Object unexpectedValue,
|
||||
String kind) throws Exception {
|
||||
if ((expectedValue != null) && ref.refersTo(null)) {
|
||||
fail(kind + " refers to null");
|
||||
}
|
||||
if (!ref.refersTo(expectedValue)) {
|
||||
fail(kind + " doesn't refer to expected value");
|
||||
}
|
||||
if (ref.refersTo(unexpectedValue)) {
|
||||
fail(kind + " refers to unexpected value");
|
||||
}
|
||||
}
|
||||
|
||||
// Test java.lang.ref.PhantomReference::refersTo0 intrinsic.
|
||||
private static final void test_phantom(PhantomReference ref,
|
||||
Object expectedValue,
|
||||
Object unexpectedValue) throws Exception {
|
||||
String kind = "phantom";
|
||||
if ((expectedValue != null) && ref.refersTo(null)) {
|
||||
fail(kind + " refers to null");
|
||||
}
|
||||
if (!ref.refersTo(expectedValue)) {
|
||||
fail(kind + " doesn't refer to expected value");
|
||||
}
|
||||
if (ref.refersTo(unexpectedValue)) {
|
||||
fail(kind + " refers to unexpected value");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final void test_weak(WeakReference ref,
|
||||
Object expectedValue,
|
||||
Object unexpectedValue) throws Exception {
|
||||
test(ref, expectedValue, unexpectedValue, "weak");
|
||||
}
|
||||
|
||||
private static final void test_soft(SoftReference ref,
|
||||
Object expectedValue,
|
||||
Object unexpectedValue) throws Exception {
|
||||
test(ref, expectedValue, unexpectedValue, "soft");
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
var queue = new ReferenceQueue<Object>();
|
||||
|
||||
var obj0 = new Object();
|
||||
var obj1 = new Object();
|
||||
var obj2 = new Object();
|
||||
var obj3 = new Object();
|
||||
|
||||
var pref = new PhantomReference(obj0, queue);
|
||||
var wref = new WeakReference(obj1);
|
||||
var sref = new SoftReference(obj2);
|
||||
|
||||
System.out.println("Warmup");
|
||||
for (int i = 0; i < 10000; i++) {
|
||||
test_phantom(pref, obj0, obj3);
|
||||
test_weak(wref, obj1, obj3);
|
||||
test_soft(sref, obj2, obj3);
|
||||
}
|
||||
|
||||
System.out.println("Testing starts");
|
||||
test_phantom(pref, obj0, obj3);
|
||||
test_weak(wref, obj1, obj3);
|
||||
test_soft(sref, obj2, obj3);
|
||||
|
||||
System.out.println("Cleaning references");
|
||||
pref.clear();
|
||||
wref.clear();
|
||||
sref.clear();
|
||||
|
||||
System.out.println("Testing after cleaning");
|
||||
test_phantom(pref, null, obj3);
|
||||
test_weak(wref, null, obj3);
|
||||
test_soft(sref, null, obj3);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user