8150921: Update Unsafe getters/setters to use double-register variants

Reviewed-by: dholmes, shade, psandoz, jrose
This commit is contained in:
Mikael Vidstedt 2016-05-06 15:59:26 -07:00
parent 0588e8953c
commit fd9f92e18d
7 changed files with 251 additions and 408 deletions

View File

@ -198,20 +198,6 @@ bool Compiler::is_intrinsic_supported(const methodHandle& method) {
case vmIntrinsics::_putLongVolatile:
case vmIntrinsics::_putFloatVolatile:
case vmIntrinsics::_putDoubleVolatile:
case vmIntrinsics::_getByte_raw:
case vmIntrinsics::_getShort_raw:
case vmIntrinsics::_getChar_raw:
case vmIntrinsics::_getInt_raw:
case vmIntrinsics::_getLong_raw:
case vmIntrinsics::_getFloat_raw:
case vmIntrinsics::_getDouble_raw:
case vmIntrinsics::_putByte_raw:
case vmIntrinsics::_putShort_raw:
case vmIntrinsics::_putChar_raw:
case vmIntrinsics::_putInt_raw:
case vmIntrinsics::_putLong_raw:
case vmIntrinsics::_putFloat_raw:
case vmIntrinsics::_putDouble_raw:
case vmIntrinsics::_getShortUnaligned:
case vmIntrinsics::_getCharUnaligned:
case vmIntrinsics::_getIntUnaligned:

View File

@ -3465,20 +3465,6 @@ void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
case vmIntrinsics::_putLongVolatile : append_unsafe_put_obj(callee, T_LONG, true); return;
case vmIntrinsics::_putFloatVolatile : append_unsafe_put_obj(callee, T_FLOAT, true); return;
case vmIntrinsics::_putDoubleVolatile : append_unsafe_put_obj(callee, T_DOUBLE, true); return;
case vmIntrinsics::_getByte_raw : append_unsafe_get_raw(callee, T_BYTE ); return;
case vmIntrinsics::_getShort_raw : append_unsafe_get_raw(callee, T_SHORT ); return;
case vmIntrinsics::_getChar_raw : append_unsafe_get_raw(callee, T_CHAR ); return;
case vmIntrinsics::_getInt_raw : append_unsafe_get_raw(callee, T_INT ); return;
case vmIntrinsics::_getLong_raw : append_unsafe_get_raw(callee, T_LONG ); return;
case vmIntrinsics::_getFloat_raw : append_unsafe_get_raw(callee, T_FLOAT ); return;
case vmIntrinsics::_getDouble_raw : append_unsafe_get_raw(callee, T_DOUBLE); return;
case vmIntrinsics::_putByte_raw : append_unsafe_put_raw(callee, T_BYTE ); return;
case vmIntrinsics::_putShort_raw : append_unsafe_put_raw(callee, T_SHORT ); return;
case vmIntrinsics::_putChar_raw : append_unsafe_put_raw(callee, T_CHAR ); return;
case vmIntrinsics::_putInt_raw : append_unsafe_put_raw(callee, T_INT ); return;
case vmIntrinsics::_putLong_raw : append_unsafe_put_raw(callee, T_LONG ); return;
case vmIntrinsics::_putFloat_raw : append_unsafe_put_raw(callee, T_FLOAT ); return;
case vmIntrinsics::_putDouble_raw : append_unsafe_put_raw(callee, T_DOUBLE); return;
case vmIntrinsics::_compareAndSwapLong:
case vmIntrinsics::_compareAndSwapInt:
case vmIntrinsics::_compareAndSwapObject: append_unsafe_CAS(callee); return;

View File

@ -578,20 +578,6 @@ bool vmIntrinsics::is_disabled_by_flags(const methodHandle& method) {
case vmIntrinsics::_putLongOpaque:
case vmIntrinsics::_putFloatOpaque:
case vmIntrinsics::_putDoubleOpaque:
case vmIntrinsics::_getByte_raw:
case vmIntrinsics::_getShort_raw:
case vmIntrinsics::_getChar_raw:
case vmIntrinsics::_getInt_raw:
case vmIntrinsics::_getLong_raw:
case vmIntrinsics::_getFloat_raw:
case vmIntrinsics::_getDouble_raw:
case vmIntrinsics::_putByte_raw:
case vmIntrinsics::_putShort_raw:
case vmIntrinsics::_putChar_raw:
case vmIntrinsics::_putInt_raw:
case vmIntrinsics::_putLong_raw:
case vmIntrinsics::_putFloat_raw:
case vmIntrinsics::_putDouble_raw:
case vmIntrinsics::_getAndAddInt:
case vmIntrinsics::_getAndAddLong:
case vmIntrinsics::_getAndSetInt:
@ -632,8 +618,6 @@ bool vmIntrinsics::is_disabled_by_flags(const methodHandle& method) {
case vmIntrinsics::_putIntUnaligned:
case vmIntrinsics::_putLongUnaligned:
case vmIntrinsics::_allocateInstance:
case vmIntrinsics::_getAddress_raw:
case vmIntrinsics::_putAddress_raw:
if (!InlineUnsafeOps || !UseUnalignedAccesses) return true;
break;
case vmIntrinsics::_hashCode:

View File

@ -1231,43 +1231,6 @@
do_intrinsic(_putIntUnaligned, jdk_internal_misc_Unsafe, putIntUnaligned_name, putInt_signature, F_R) \
do_intrinsic(_putLongUnaligned, jdk_internal_misc_Unsafe, putLongUnaligned_name, putLong_signature, F_R) \
\
/* %%% these are redundant except perhaps for getAddress, but Unsafe has native methods for them */ \
do_signature(getByte_raw_signature, "(J)B") \
do_signature(putByte_raw_signature, "(JB)V") \
do_signature(getShort_raw_signature, "(J)S") \
do_signature(putShort_raw_signature, "(JS)V") \
do_signature(getChar_raw_signature, "(J)C") \
do_signature(putChar_raw_signature, "(JC)V") \
do_signature(putInt_raw_signature, "(JI)V") \
do_alias(getLong_raw_signature, /*(J)J*/ long_long_signature) \
do_alias(putLong_raw_signature, /*(JJ)V*/ long_long_void_signature) \
do_signature(getFloat_raw_signature, "(J)F") \
do_signature(putFloat_raw_signature, "(JF)V") \
do_alias(getDouble_raw_signature, /*(J)D*/ long_double_signature) \
do_signature(putDouble_raw_signature, "(JD)V") \
do_alias(getAddress_raw_signature, /*(J)J*/ long_long_signature) \
do_alias(putAddress_raw_signature, /*(JJ)V*/ long_long_void_signature) \
\
do_name( getAddress_name, "getAddress") \
do_name( putAddress_name, "putAddress") \
\
do_intrinsic(_getByte_raw, jdk_internal_misc_Unsafe, getByte_name, getByte_raw_signature, F_R) \
do_intrinsic(_getShort_raw, jdk_internal_misc_Unsafe, getShort_name, getShort_raw_signature, F_R) \
do_intrinsic(_getChar_raw, jdk_internal_misc_Unsafe, getChar_name, getChar_raw_signature, F_R) \
do_intrinsic(_getInt_raw, jdk_internal_misc_Unsafe, getInt_name, long_int_signature, F_R) \
do_intrinsic(_getLong_raw, jdk_internal_misc_Unsafe, getLong_name, getLong_raw_signature, F_R) \
do_intrinsic(_getFloat_raw, jdk_internal_misc_Unsafe, getFloat_name, getFloat_raw_signature, F_R) \
do_intrinsic(_getDouble_raw, jdk_internal_misc_Unsafe, getDouble_name, getDouble_raw_signature, F_R) \
do_intrinsic(_getAddress_raw, jdk_internal_misc_Unsafe, getAddress_name, getAddress_raw_signature, F_R) \
do_intrinsic(_putByte_raw, jdk_internal_misc_Unsafe, putByte_name, putByte_raw_signature, F_R) \
do_intrinsic(_putShort_raw, jdk_internal_misc_Unsafe, putShort_name, putShort_raw_signature, F_R) \
do_intrinsic(_putChar_raw, jdk_internal_misc_Unsafe, putChar_name, putChar_raw_signature, F_R) \
do_intrinsic(_putInt_raw, jdk_internal_misc_Unsafe, putInt_name, putInt_raw_signature, F_R) \
do_intrinsic(_putLong_raw, jdk_internal_misc_Unsafe, putLong_name, putLong_raw_signature, F_R) \
do_intrinsic(_putFloat_raw, jdk_internal_misc_Unsafe, putFloat_name, putFloat_raw_signature, F_R) \
do_intrinsic(_putDouble_raw, jdk_internal_misc_Unsafe, putDouble_name, putDouble_raw_signature, F_R) \
do_intrinsic(_putAddress_raw, jdk_internal_misc_Unsafe, putAddress_name, putAddress_raw_signature, F_R) \
\
do_signature(compareAndSwapObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Z") \
do_signature(compareAndExchangeObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;") \
do_signature(compareAndSwapLong_signature, "(Ljava/lang/Object;JJJ)Z") \

View File

@ -409,22 +409,6 @@ bool C2Compiler::is_intrinsic_supported(const methodHandle& method, bool is_virt
case vmIntrinsics::_putLong:
case vmIntrinsics::_putFloat:
case vmIntrinsics::_putDouble:
case vmIntrinsics::_getByte_raw:
case vmIntrinsics::_getShort_raw:
case vmIntrinsics::_getChar_raw:
case vmIntrinsics::_getInt_raw:
case vmIntrinsics::_getLong_raw:
case vmIntrinsics::_getFloat_raw:
case vmIntrinsics::_getDouble_raw:
case vmIntrinsics::_getAddress_raw:
case vmIntrinsics::_putByte_raw:
case vmIntrinsics::_putShort_raw:
case vmIntrinsics::_putChar_raw:
case vmIntrinsics::_putInt_raw:
case vmIntrinsics::_putLong_raw:
case vmIntrinsics::_putFloat_raw:
case vmIntrinsics::_putDouble_raw:
case vmIntrinsics::_putAddress_raw:
case vmIntrinsics::_getObjectVolatile:
case vmIntrinsics::_getBooleanVolatile:
case vmIntrinsics::_getByteVolatile:

View File

@ -93,7 +93,7 @@ class LibraryCallKit : public GraphKit {
Node* _result; // the result node, if any
int _reexecute_sp; // the stack pointer when bytecode needs to be reexecuted
const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type, bool is_native_ptr = false);
const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type);
public:
LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic)
@ -247,7 +247,7 @@ class LibraryCallKit : public GraphKit {
void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
static bool klass_needs_init_guard(Node* kls);
bool inline_unsafe_allocate();
bool inline_unsafe_newArray(bool uninitialized);
@ -475,7 +475,6 @@ bool LibraryCallKit::try_to_inline(int predicate) {
// Handle symbolic names for otherwise undistinguished boolean switches:
const bool is_store = true;
const bool is_compress = true;
const bool is_native_ptr = true;
const bool is_static = true;
const bool is_volatile = true;
@ -555,113 +554,95 @@ bool LibraryCallKit::try_to_inline(int predicate) {
case vmIntrinsics::_inflateStringC:
case vmIntrinsics::_inflateStringB: return inline_string_copy(!is_compress);
case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Relaxed, false);
case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Relaxed, false);
case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Relaxed, false);
case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_getObject: return inline_unsafe_access(!is_store, T_OBJECT, Relaxed, false);
case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_store, T_BOOLEAN, Relaxed, false);
case vmIntrinsics::_getByte: return inline_unsafe_access(!is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_getShort: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_getChar: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_getInt: return inline_unsafe_access(!is_store, T_INT, Relaxed, false);
case vmIntrinsics::_getLong: return inline_unsafe_access(!is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Relaxed, false);
case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Relaxed, false);
case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Relaxed, false);
case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_putObject: return inline_unsafe_access( is_store, T_OBJECT, Relaxed, false);
case vmIntrinsics::_putBoolean: return inline_unsafe_access( is_store, T_BOOLEAN, Relaxed, false);
case vmIntrinsics::_putByte: return inline_unsafe_access( is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_putShort: return inline_unsafe_access( is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_putChar: return inline_unsafe_access( is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_putInt: return inline_unsafe_access( is_store, T_INT, Relaxed, false);
case vmIntrinsics::_putLong: return inline_unsafe_access( is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_putFloat: return inline_unsafe_access( is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_putDouble: return inline_unsafe_access( is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, Relaxed, false);
case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, Relaxed, false);
case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_store, T_OBJECT, Volatile, false);
case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_store, T_BOOLEAN, Volatile, false);
case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_store, T_BYTE, Volatile, false);
case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_store, T_SHORT, Volatile, false);
case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_store, T_CHAR, Volatile, false);
case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_store, T_INT, Volatile, false);
case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_store, T_LONG, Volatile, false);
case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_store, T_FLOAT, Volatile, false);
case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_store, T_DOUBLE, Volatile, false);
case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, Relaxed, false);
case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, Relaxed, false);
case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, Relaxed, false);
case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, Relaxed, false);
case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, Relaxed, false);
case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, Relaxed, false);
case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, Relaxed, false);
case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, Relaxed, false);
case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access( is_store, T_OBJECT, Volatile, false);
case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access( is_store, T_BOOLEAN, Volatile, false);
case vmIntrinsics::_putByteVolatile: return inline_unsafe_access( is_store, T_BYTE, Volatile, false);
case vmIntrinsics::_putShortVolatile: return inline_unsafe_access( is_store, T_SHORT, Volatile, false);
case vmIntrinsics::_putCharVolatile: return inline_unsafe_access( is_store, T_CHAR, Volatile, false);
case vmIntrinsics::_putIntVolatile: return inline_unsafe_access( is_store, T_INT, Volatile, false);
case vmIntrinsics::_putLongVolatile: return inline_unsafe_access( is_store, T_LONG, Volatile, false);
case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access( is_store, T_FLOAT, Volatile, false);
case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access( is_store, T_DOUBLE, Volatile, false);
case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Volatile, false);
case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Volatile, false);
case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Volatile, false);
case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Volatile, false);
case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Volatile, false);
case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Volatile, false);
case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Volatile, false);
case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Volatile, false);
case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Volatile, false);
case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_store, T_SHORT, Relaxed, true);
case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_store, T_CHAR, Relaxed, true);
case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_store, T_INT, Relaxed, true);
case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_store, T_LONG, Relaxed, true);
case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Volatile, false);
case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Volatile, false);
case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Volatile, false);
case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Volatile, false);
case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Volatile, false);
case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Volatile, false);
case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Volatile, false);
case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Volatile, false);
case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Volatile, false);
case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access( is_store, T_SHORT, Relaxed, true);
case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access( is_store, T_CHAR, Relaxed, true);
case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access( is_store, T_INT, Relaxed, true);
case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access( is_store, T_LONG, Relaxed, true);
case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Relaxed, true);
case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Relaxed, true);
case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Relaxed, true);
case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Relaxed, true);
case vmIntrinsics::_getObjectAcquire: return inline_unsafe_access(!is_store, T_OBJECT, Acquire, false);
case vmIntrinsics::_getBooleanAcquire: return inline_unsafe_access(!is_store, T_BOOLEAN, Acquire, false);
case vmIntrinsics::_getByteAcquire: return inline_unsafe_access(!is_store, T_BYTE, Acquire, false);
case vmIntrinsics::_getShortAcquire: return inline_unsafe_access(!is_store, T_SHORT, Acquire, false);
case vmIntrinsics::_getCharAcquire: return inline_unsafe_access(!is_store, T_CHAR, Acquire, false);
case vmIntrinsics::_getIntAcquire: return inline_unsafe_access(!is_store, T_INT, Acquire, false);
case vmIntrinsics::_getLongAcquire: return inline_unsafe_access(!is_store, T_LONG, Acquire, false);
case vmIntrinsics::_getFloatAcquire: return inline_unsafe_access(!is_store, T_FLOAT, Acquire, false);
case vmIntrinsics::_getDoubleAcquire: return inline_unsafe_access(!is_store, T_DOUBLE, Acquire, false);
case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Relaxed, true);
case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Relaxed, true);
case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Relaxed, true);
case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Relaxed, true);
case vmIntrinsics::_putObjectRelease: return inline_unsafe_access( is_store, T_OBJECT, Release, false);
case vmIntrinsics::_putBooleanRelease: return inline_unsafe_access( is_store, T_BOOLEAN, Release, false);
case vmIntrinsics::_putByteRelease: return inline_unsafe_access( is_store, T_BYTE, Release, false);
case vmIntrinsics::_putShortRelease: return inline_unsafe_access( is_store, T_SHORT, Release, false);
case vmIntrinsics::_putCharRelease: return inline_unsafe_access( is_store, T_CHAR, Release, false);
case vmIntrinsics::_putIntRelease: return inline_unsafe_access( is_store, T_INT, Release, false);
case vmIntrinsics::_putLongRelease: return inline_unsafe_access( is_store, T_LONG, Release, false);
case vmIntrinsics::_putFloatRelease: return inline_unsafe_access( is_store, T_FLOAT, Release, false);
case vmIntrinsics::_putDoubleRelease: return inline_unsafe_access( is_store, T_DOUBLE, Release, false);
case vmIntrinsics::_getObjectAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Acquire, false);
case vmIntrinsics::_getBooleanAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Acquire, false);
case vmIntrinsics::_getByteAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Acquire, false);
case vmIntrinsics::_getShortAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Acquire, false);
case vmIntrinsics::_getCharAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Acquire, false);
case vmIntrinsics::_getIntAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Acquire, false);
case vmIntrinsics::_getLongAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Acquire, false);
case vmIntrinsics::_getFloatAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Acquire, false);
case vmIntrinsics::_getDoubleAcquire: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Acquire, false);
case vmIntrinsics::_getObjectOpaque: return inline_unsafe_access(!is_store, T_OBJECT, Opaque, false);
case vmIntrinsics::_getBooleanOpaque: return inline_unsafe_access(!is_store, T_BOOLEAN, Opaque, false);
case vmIntrinsics::_getByteOpaque: return inline_unsafe_access(!is_store, T_BYTE, Opaque, false);
case vmIntrinsics::_getShortOpaque: return inline_unsafe_access(!is_store, T_SHORT, Opaque, false);
case vmIntrinsics::_getCharOpaque: return inline_unsafe_access(!is_store, T_CHAR, Opaque, false);
case vmIntrinsics::_getIntOpaque: return inline_unsafe_access(!is_store, T_INT, Opaque, false);
case vmIntrinsics::_getLongOpaque: return inline_unsafe_access(!is_store, T_LONG, Opaque, false);
case vmIntrinsics::_getFloatOpaque: return inline_unsafe_access(!is_store, T_FLOAT, Opaque, false);
case vmIntrinsics::_getDoubleOpaque: return inline_unsafe_access(!is_store, T_DOUBLE, Opaque, false);
case vmIntrinsics::_putObjectRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Release, false);
case vmIntrinsics::_putBooleanRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Release, false);
case vmIntrinsics::_putByteRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Release, false);
case vmIntrinsics::_putShortRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Release, false);
case vmIntrinsics::_putCharRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Release, false);
case vmIntrinsics::_putIntRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Release, false);
case vmIntrinsics::_putLongRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Release, false);
case vmIntrinsics::_putFloatRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Release, false);
case vmIntrinsics::_putDoubleRelease: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Release, false);
case vmIntrinsics::_getObjectOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, Opaque, false);
case vmIntrinsics::_getBooleanOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, Opaque, false);
case vmIntrinsics::_getByteOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, Opaque, false);
case vmIntrinsics::_getShortOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, Opaque, false);
case vmIntrinsics::_getCharOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, Opaque, false);
case vmIntrinsics::_getIntOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, Opaque, false);
case vmIntrinsics::_getLongOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, Opaque, false);
case vmIntrinsics::_getFloatOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, Opaque, false);
case vmIntrinsics::_getDoubleOpaque: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, Opaque, false);
case vmIntrinsics::_putObjectOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, Opaque, false);
case vmIntrinsics::_putBooleanOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, Opaque, false);
case vmIntrinsics::_putByteOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, Opaque, false);
case vmIntrinsics::_putShortOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, Opaque, false);
case vmIntrinsics::_putCharOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, Opaque, false);
case vmIntrinsics::_putIntOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, Opaque, false);
case vmIntrinsics::_putLongOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, Opaque, false);
case vmIntrinsics::_putFloatOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, Opaque, false);
case vmIntrinsics::_putDoubleOpaque: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, Opaque, false);
case vmIntrinsics::_putObjectOpaque: return inline_unsafe_access( is_store, T_OBJECT, Opaque, false);
case vmIntrinsics::_putBooleanOpaque: return inline_unsafe_access( is_store, T_BOOLEAN, Opaque, false);
case vmIntrinsics::_putByteOpaque: return inline_unsafe_access( is_store, T_BYTE, Opaque, false);
case vmIntrinsics::_putShortOpaque: return inline_unsafe_access( is_store, T_SHORT, Opaque, false);
case vmIntrinsics::_putCharOpaque: return inline_unsafe_access( is_store, T_CHAR, Opaque, false);
case vmIntrinsics::_putIntOpaque: return inline_unsafe_access( is_store, T_INT, Opaque, false);
case vmIntrinsics::_putLongOpaque: return inline_unsafe_access( is_store, T_LONG, Opaque, false);
case vmIntrinsics::_putFloatOpaque: return inline_unsafe_access( is_store, T_FLOAT, Opaque, false);
case vmIntrinsics::_putDoubleOpaque: return inline_unsafe_access( is_store, T_DOUBLE, Opaque, false);
case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmp_swap, Volatile);
case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmp_swap, Volatile);
@ -2196,8 +2177,6 @@ bool LibraryCallKit::inline_number_methods(vmIntrinsics::ID id) {
//----------------------------inline_unsafe_access----------------------------
const static BasicType T_ADDRESS_HOLDER = T_LONG;
// Helper that guards and inserts a pre-barrier.
void LibraryCallKit::insert_pre_barrier(Node* base_oop, Node* offset,
Node* pre_val, bool need_mem_bar) {
@ -2298,13 +2277,12 @@ void LibraryCallKit::insert_pre_barrier(Node* base_oop, Node* offset,
}
const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type, bool is_native_ptr) {
const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type) {
// Attempt to infer a sharper value type from the offset and base type.
ciKlass* sharpened_klass = NULL;
// See if it is an instance field, with an object type.
if (alias_type->field() != NULL) {
assert(!is_native_ptr, "native pointer op cannot use a java address");
if (alias_type->field()->type()->is_klass()) {
sharpened_klass = alias_type->field()->type()->as_klass();
}
@ -2337,7 +2315,7 @@ const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_
return NULL;
}
bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
if (callee()->is_static()) return false; // caller must have the capability!
guarantee(!is_store || kind != Acquire, "Acquire accesses can be produced only for loads");
guarantee( is_store || kind != Release, "Release accesses can be produced only for stores");
@ -2352,31 +2330,17 @@ bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_stor
if (!is_store) {
// Object getObject(Object base, int/long offset), etc.
BasicType rtype = sig->return_type()->basic_type();
if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
rtype = T_ADDRESS; // it is really a C void*
assert(rtype == type, "getter must return the expected value");
if (!is_native_ptr) {
assert(sig->count() == 2, "oop getter has 2 arguments");
assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
} else {
assert(sig->count() == 1, "native getter has 1 argument");
assert(sig->type_at(0)->basic_type() == T_LONG, "getter base is long");
}
assert(sig->count() == 2, "oop getter has 2 arguments");
assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
} else {
// void putObject(Object base, int/long offset, Object x), etc.
assert(sig->return_type()->basic_type() == T_VOID, "putter must not return a value");
if (!is_native_ptr) {
assert(sig->count() == 3, "oop putter has 3 arguments");
assert(sig->type_at(0)->basic_type() == T_OBJECT, "putter base is object");
assert(sig->type_at(1)->basic_type() == T_LONG, "putter offset is correct");
} else {
assert(sig->count() == 2, "native putter has 2 arguments");
assert(sig->type_at(0)->basic_type() == T_LONG, "putter base is long");
}
assert(sig->count() == 3, "oop putter has 3 arguments");
assert(sig->type_at(0)->basic_type() == T_OBJECT, "putter base is object");
assert(sig->type_at(1)->basic_type() == T_LONG, "putter offset is correct");
BasicType vtype = sig->type_at(sig->count()-1)->basic_type();
if (vtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::putAddress_name())
vtype = T_ADDRESS; // it is really a C void*
assert(vtype == type, "putter must accept the expected value");
}
#endif // ASSERT
@ -2393,27 +2357,22 @@ bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_stor
Node* offset = top();
Node* val;
if (!is_native_ptr) {
// The base is either a Java object or a value produced by Unsafe.staticFieldBase
Node* base = argument(1); // type: oop
// The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
offset = argument(2); // type: long
// We currently rely on the cookies produced by Unsafe.xxxFieldOffset
// to be plain byte offsets, which are also the same as those accepted
// by oopDesc::field_base.
assert(Unsafe_field_offset_to_byte_offset(11) == 11,
"fieldOffset must be byte-scaled");
// 32-bit machines ignore the high half!
offset = ConvL2X(offset);
adr = make_unsafe_address(base, offset);
// The base is either a Java object or a value produced by Unsafe.staticFieldBase
Node* base = argument(1); // type: oop
// The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
offset = argument(2); // type: long
// We currently rely on the cookies produced by Unsafe.xxxFieldOffset
// to be plain byte offsets, which are also the same as those accepted
// by oopDesc::field_base.
assert(Unsafe_field_offset_to_byte_offset(11) == 11,
"fieldOffset must be byte-scaled");
// 32-bit machines ignore the high half!
offset = ConvL2X(offset);
adr = make_unsafe_address(base, offset);
if (_gvn.type(base)->isa_ptr() != TypePtr::NULL_PTR) {
heap_base_oop = base;
val = is_store ? argument(4) : NULL;
} else {
Node* ptr = argument(1); // type: long
ptr = ConvL2X(ptr); // adjust Java long to machine word
adr = make_unsafe_address(NULL, ptr);
val = is_store ? argument(3) : NULL;
}
val = is_store ? argument(4) : NULL;
const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
@ -2494,11 +2453,11 @@ bool LibraryCallKit::inline_unsafe_access(const bool is_native_ptr, bool is_stor
// SATB log buffer using the pre-barrier mechanism.
// Also we need to add memory barrier to prevent commoning reads
// from this field across safepoint since GC can change its value.
bool need_read_barrier = !is_native_ptr && !is_store &&
bool need_read_barrier = !is_store &&
offset != top() && heap_base_oop != top();
if (!is_store && type == T_OBJECT) {
const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type, is_native_ptr);
const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
if (tjp != NULL) {
value_type = tjp;
}

View File

@ -131,38 +131,137 @@ jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
}
///// Data in the Java heap.
///// Data read/writes on the Java heap and in native (off-heap) memory
#define truncate_jboolean(x) ((x) & 1)
#define truncate_jbyte(x) (x)
#define truncate_jshort(x) (x)
#define truncate_jchar(x) (x)
#define truncate_jint(x) (x)
#define truncate_jlong(x) (x)
#define truncate_jfloat(x) (x)
#define truncate_jdouble(x) (x)
/**
* Helper class for accessing memory.
*
* Normalizes values and wraps accesses in
* JavaThread::doing_unsafe_access() if needed.
*/
class MemoryAccess : StackObj {
JavaThread* _thread;
jobject _obj;
jlong _offset;
#define GET_FIELD(obj, offset, type_name, v) \
oop p = JNIHandles::resolve(obj); \
type_name v = *(type_name*)index_oop_from_field_offset_long(p, offset)
// Resolves and returns the address of the memory access
void* addr() {
return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
}
#define SET_FIELD(obj, offset, type_name, x) \
oop p = JNIHandles::resolve(obj); \
*(type_name*)index_oop_from_field_offset_long(p, offset) = truncate_##type_name(x)
template <typename T>
T normalize(T x) {
return x;
}
#define GET_FIELD_VOLATILE(obj, offset, type_name, v) \
oop p = JNIHandles::resolve(obj); \
if (support_IRIW_for_not_multiple_copy_atomic_cpu) { \
OrderAccess::fence(); \
} \
volatile type_name v = OrderAccess::load_acquire((volatile type_name*)index_oop_from_field_offset_long(p, offset));
jboolean normalize(jboolean x) {
return x & 1;
}
#define SET_FIELD_VOLATILE(obj, offset, type_name, x) \
oop p = JNIHandles::resolve(obj); \
OrderAccess::release_store_fence((volatile type_name*)index_oop_from_field_offset_long(p, offset), truncate_##type_name(x));
/**
* Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
*/
class GuardUnsafeAccess {
JavaThread* _thread;
bool _active;
public:
GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
if (JNIHandles::resolve(_obj) == NULL) {
// native/off-heap access which may raise SIGBUS if accessing
// memory mapped file data in a region of the file which has
// been truncated and is now invalid
_thread->set_doing_unsafe_access(true);
_active = true;
} else {
_active = false;
}
}
~GuardUnsafeAccess() {
if (_active) {
_thread->set_doing_unsafe_access(false);
}
}
};
public:
MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
: _thread(thread), _obj(obj), _offset(offset) {
}
template <typename T>
T get() {
GuardUnsafeAccess guard(_thread, _obj);
T* p = (T*)addr();
T x = *p;
return x;
}
template <typename T>
void put(T x) {
GuardUnsafeAccess guard(_thread, _obj);
T* p = (T*)addr();
*p = normalize(x);
}
// Get/SetObject must be special-cased, since it works with handles.
template <typename T>
T get_volatile() {
GuardUnsafeAccess guard(_thread, _obj);
T* p = (T*)addr();
if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
OrderAccess::fence();
}
T x = OrderAccess::load_acquire((volatile T*)p);
return x;
}
template <typename T>
void put_volatile(T x) {
GuardUnsafeAccess guard(_thread, _obj);
T* p = (T*)addr();
OrderAccess::release_store_fence((volatile T*)p, normalize(x));
}
#ifndef SUPPORTS_NATIVE_CX8
jlong get_jlong_locked() {
GuardUnsafeAccess guard(_thread, _obj);
MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
jlong* p = (jlong*)addr();
jlong x = Atomic::load(p);
return x;
}
void put_jlong_locked(jlong x) {
GuardUnsafeAccess guard(_thread, _obj);
MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
jlong* p = (jlong*)addr();
Atomic::store(normalize(x), p);
}
#endif
};
// Get/PutObject must be special-cased, since it works with handles.
// These functions allow a null base pointer with an arbitrary address.
// But if the base pointer is non-null, the offset should make some sense.
@ -208,7 +307,7 @@ UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj,
return ret;
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
@ -236,7 +335,7 @@ UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobj
return JNIHandles::make_local(env, v);
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
@ -301,25 +400,17 @@ UNSAFE_ENTRY(jlong, Unsafe_GetKlassPointer(JNIEnv *env, jobject unsafe, jobject
UNSAFE_ENTRY(jlong, Unsafe_GetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
if (VM_Version::supports_cx8()) {
GET_FIELD_VOLATILE(obj, offset, jlong, v);
return v;
return MemoryAccess(thread, obj, offset).get_volatile<jlong>();
} else {
Handle p (THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
jlong value = Atomic::load(addr);
return value;
return MemoryAccess(thread, obj, offset).get_jlong_locked();
}
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x)) {
UNSAFE_ENTRY(void, Unsafe_PutLongVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x)) {
if (VM_Version::supports_cx8()) {
SET_FIELD_VOLATILE(obj, offset, jlong, x);
MemoryAccess(thread, obj, offset).put_volatile<jlong>(x);
} else {
Handle p (THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
Atomic::store(x, addr);
MemoryAccess(thread, obj, offset).put_jlong_locked(x);
}
} UNSAFE_END
@ -337,15 +428,14 @@ UNSAFE_LEAF(jint, Unsafe_unalignedAccess0(JNIEnv *env, jobject unsafe)) {
return UseUnalignedAccesses;
} UNSAFE_END
#define DEFINE_GETSETOOP(java_type, Type) \
#define DEFINE_GETSETOOP(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
GET_FIELD(obj, offset, java_type, v); \
return v; \
return MemoryAccess(thread, obj, offset).get<java_type>(); \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Set##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
SET_FIELD(obj, offset, java_type, x); \
UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
MemoryAccess(thread, obj, offset).put<java_type>(x); \
} UNSAFE_END \
\
// END DEFINE_GETSETOOP.
@ -364,12 +454,11 @@ DEFINE_GETSETOOP(jdouble, Double);
#define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
GET_FIELD_VOLATILE(obj, offset, java_type, v); \
return v; \
return MemoryAccess(thread, obj, offset).get_volatile<java_type>(); \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Set##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
SET_FIELD_VOLATILE(obj, offset, java_type, x); \
UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
MemoryAccess(thread, obj, offset).put_volatile<java_type>(x); \
} UNSAFE_END \
\
// END DEFINE_GETSETOOP_VOLATILE.
@ -400,98 +489,6 @@ UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
OrderAccess::fence();
} UNSAFE_END
////// Data in the C heap.
// Note: These do not throw NullPointerException for bad pointers.
// They just crash. Only a oop base pointer can generate a NullPointerException.
//
#define DEFINE_GETSETNATIVE(java_type, Type, native_type) \
\
UNSAFE_ENTRY(java_type, Unsafe_GetNative##Type(JNIEnv *env, jobject unsafe, jlong addr)) { \
void* p = addr_from_java(addr); \
JavaThread* t = JavaThread::current(); \
t->set_doing_unsafe_access(true); \
java_type x = *(volatile native_type*)p; \
t->set_doing_unsafe_access(false); \
return x; \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_SetNative##Type(JNIEnv *env, jobject unsafe, jlong addr, java_type x)) { \
JavaThread* t = JavaThread::current(); \
t->set_doing_unsafe_access(true); \
void* p = addr_from_java(addr); \
*(volatile native_type*)p = x; \
t->set_doing_unsafe_access(false); \
} UNSAFE_END \
\
// END DEFINE_GETSETNATIVE.
DEFINE_GETSETNATIVE(jbyte, Byte, signed char)
DEFINE_GETSETNATIVE(jshort, Short, signed short);
DEFINE_GETSETNATIVE(jchar, Char, unsigned short);
DEFINE_GETSETNATIVE(jint, Int, jint);
// no long -- handled specially
DEFINE_GETSETNATIVE(jfloat, Float, float);
DEFINE_GETSETNATIVE(jdouble, Double, double);
#undef DEFINE_GETSETNATIVE
UNSAFE_ENTRY(jlong, Unsafe_GetNativeLong(JNIEnv *env, jobject unsafe, jlong addr)) {
JavaThread* t = JavaThread::current();
// We do it this way to avoid problems with access to heap using 64
// bit loads, as jlong in heap could be not 64-bit aligned, and on
// some CPUs (SPARC) it leads to SIGBUS.
t->set_doing_unsafe_access(true);
void* p = addr_from_java(addr);
jlong x;
if (is_ptr_aligned(p, sizeof(jlong)) == 0) {
// jlong is aligned, do a volatile access
x = *(volatile jlong*)p;
} else {
jlong_accessor acc;
acc.words[0] = ((volatile jint*)p)[0];
acc.words[1] = ((volatile jint*)p)[1];
x = acc.long_value;
}
t->set_doing_unsafe_access(false);
return x;
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetNativeLong(JNIEnv *env, jobject unsafe, jlong addr, jlong x)) {
JavaThread* t = JavaThread::current();
// see comment for Unsafe_GetNativeLong
t->set_doing_unsafe_access(true);
void* p = addr_from_java(addr);
if (is_ptr_aligned(p, sizeof(jlong))) {
// jlong is aligned, do a volatile access
*(volatile jlong*)p = x;
} else {
jlong_accessor acc;
acc.long_value = x;
((volatile jint*)p)[0] = acc.words[0];
((volatile jint*)p)[1] = acc.words[1];
}
t->set_doing_unsafe_access(false);
} UNSAFE_END
UNSAFE_LEAF(jlong, Unsafe_GetNativeAddress(JNIEnv *env, jobject unsafe, jlong addr)) {
void* p = addr_from_java(addr);
return addr_to_java(*(void**)p);
} UNSAFE_END
UNSAFE_LEAF(void, Unsafe_SetNativeAddress(JNIEnv *env, jobject unsafe, jlong addr, jlong x)) {
void* p = addr_from_java(addr);
*(void**)p = addr_from_java(x);
} UNSAFE_END
////// Allocation requests
UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
@ -980,8 +977,8 @@ UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, job
} UNSAFE_END
UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
Handle p (THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)(index_oop_from_field_offset_long(p(), offset));
Handle p(THREAD, JNIHandles::resolve(obj));
jlong* addr = (jlong*)index_oop_from_field_offset_long(p(), offset);
#ifdef SUPPORTS_NATIVE_CX8
return (jlong)(Atomic::cmpxchg(x, addr, e));
@ -1017,7 +1014,7 @@ UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapObject(JNIEnv *env, jobject unsafe,
UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSwapInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
oop p = JNIHandles::resolve(obj);
jint* addr = (jint *) index_oop_from_field_offset_long(p, offset);
jint* addr = (jint *)index_oop_from_field_offset_long(p, offset);
return (jint)(Atomic::cmpxchg(x, addr, e)) == e;
} UNSAFE_END
@ -1143,20 +1140,16 @@ UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleAr
#define DECLARE_GETPUTOOP(Type, Desc) \
{CC "get" #Type, CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type)}, \
{CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Set##Type)}, \
{CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type)}, \
{CC "get" #Type "Volatile", CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type##Volatile)}, \
{CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Set##Type##Volatile)}
{CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type##Volatile)}
#define DECLARE_GETPUTNATIVE(Byte, B) \
{CC "get" #Byte, CC "(" ADR ")" #B, FN_PTR(Unsafe_GetNative##Byte)}, \
{CC "put" #Byte, CC "(" ADR#B ")V", FN_PTR(Unsafe_SetNative##Byte)}
static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
{CC "getObject", CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetObject)},
{CC "putObject", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_SetObject)},
{CC "putObject", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutObject)},
{CC "getObjectVolatile",CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetObjectVolatile)},
{CC "putObjectVolatile",CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_SetObjectVolatile)},
{CC "putObjectVolatile",CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutObjectVolatile)},
{CC "getUncompressedObject", CC "(" ADR ")" OBJ, FN_PTR(Unsafe_GetUncompressedObject)},
{CC "getJavaMirror", CC "(" ADR ")" CLS, FN_PTR(Unsafe_GetJavaMirror)},
@ -1171,17 +1164,6 @@ static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
DECLARE_GETPUTOOP(Float, F),
DECLARE_GETPUTOOP(Double, D),
DECLARE_GETPUTNATIVE(Byte, B),
DECLARE_GETPUTNATIVE(Short, S),
DECLARE_GETPUTNATIVE(Char, C),
DECLARE_GETPUTNATIVE(Int, I),
DECLARE_GETPUTNATIVE(Long, J),
DECLARE_GETPUTNATIVE(Float, F),
DECLARE_GETPUTNATIVE(Double, D),
{CC "getAddress", CC "(" ADR ")" ADR, FN_PTR(Unsafe_GetNativeAddress)},
{CC "putAddress", CC "(" ADR "" ADR ")V", FN_PTR(Unsafe_SetNativeAddress)},
{CC "allocateMemory0", CC "(J)" ADR, FN_PTR(Unsafe_AllocateMemory0)},
{CC "reallocateMemory0", CC "(" ADR "J)" ADR, FN_PTR(Unsafe_ReallocateMemory0)},
{CC "freeMemory0", CC "(" ADR ")V", FN_PTR(Unsafe_FreeMemory0)},
@ -1239,7 +1221,6 @@ static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
#undef DAC_Args
#undef DECLARE_GETPUTOOP
#undef DECLARE_GETPUTNATIVE
// This function is exported, used by NativeLookup.