8265129: Add intrinsic support for JVM.getClassId
Reviewed-by: kvn, mgronlun
This commit is contained in:
parent
123cdd1fbd
commit
2e8812df14
@ -223,11 +223,7 @@ bool Compiler::is_intrinsic_supported(const methodHandle& method) {
|
||||
#ifdef JFR_HAVE_INTRINSICS
|
||||
case vmIntrinsics::_counterTime:
|
||||
case vmIntrinsics::_getEventWriter:
|
||||
#if defined(_LP64) || !defined(TRACE_ID_SHIFT)
|
||||
case vmIntrinsics::_getClassId:
|
||||
#endif
|
||||
#endif
|
||||
break;
|
||||
case vmIntrinsics::_getObjectSize:
|
||||
break;
|
||||
case vmIntrinsics::_blackhole:
|
||||
|
@ -3055,33 +3055,6 @@ void LIRGenerator::do_IfOp(IfOp* x) {
|
||||
}
|
||||
|
||||
#ifdef JFR_HAVE_INTRINSICS
|
||||
void LIRGenerator::do_ClassIDIntrinsic(Intrinsic* x) {
|
||||
CodeEmitInfo* info = state_for(x);
|
||||
CodeEmitInfo* info2 = new CodeEmitInfo(info); // Clone for the second null check
|
||||
|
||||
assert(info != NULL, "must have info");
|
||||
LIRItem arg(x->argument_at(0), this);
|
||||
|
||||
arg.load_item();
|
||||
LIR_Opr klass = new_register(T_METADATA);
|
||||
__ move(new LIR_Address(arg.result(), java_lang_Class::klass_offset(), T_ADDRESS), klass, info);
|
||||
LIR_Opr id = new_register(T_LONG);
|
||||
ByteSize offset = KLASS_TRACE_ID_OFFSET;
|
||||
LIR_Address* trace_id_addr = new LIR_Address(klass, in_bytes(offset), T_LONG);
|
||||
|
||||
__ move(trace_id_addr, id);
|
||||
__ logical_or(id, LIR_OprFact::longConst(0x01l), id);
|
||||
__ store(id, trace_id_addr);
|
||||
|
||||
#ifdef TRACE_ID_META_BITS
|
||||
__ logical_and(id, LIR_OprFact::longConst(~TRACE_ID_META_BITS), id);
|
||||
#endif
|
||||
#ifdef TRACE_ID_SHIFT
|
||||
__ unsigned_shift_right(id, TRACE_ID_SHIFT, id);
|
||||
#endif
|
||||
|
||||
__ move(id, rlock_result(x));
|
||||
}
|
||||
|
||||
void LIRGenerator::do_getEventWriter(Intrinsic* x) {
|
||||
LabelObj* L_end = new LabelObj();
|
||||
@ -3131,9 +3104,6 @@ void LIRGenerator::do_Intrinsic(Intrinsic* x) {
|
||||
}
|
||||
|
||||
#ifdef JFR_HAVE_INTRINSICS
|
||||
case vmIntrinsics::_getClassId:
|
||||
do_ClassIDIntrinsic(x);
|
||||
break;
|
||||
case vmIntrinsics::_getEventWriter:
|
||||
do_getEventWriter(x);
|
||||
break;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -25,12 +25,14 @@
|
||||
#include "precompiled.hpp"
|
||||
#include "jfr/jfr.hpp"
|
||||
#include "jfr/leakprofiler/leakProfiler.hpp"
|
||||
#include "jfr/recorder/checkpoint/types/traceid/jfrTraceIdLoadBarrier.inline.hpp"
|
||||
#include "jfr/recorder/jfrRecorder.hpp"
|
||||
#include "jfr/recorder/checkpoint/jfrCheckpointManager.hpp"
|
||||
#include "jfr/recorder/repository/jfrEmergencyDump.hpp"
|
||||
#include "jfr/recorder/service/jfrOptionSet.hpp"
|
||||
#include "jfr/recorder/repository/jfrRepository.hpp"
|
||||
#include "jfr/support/jfrThreadLocal.hpp"
|
||||
#include "runtime/interfaceSupport.inline.hpp"
|
||||
#include "runtime/java.hpp"
|
||||
#include "runtime/thread.hpp"
|
||||
|
||||
@ -109,3 +111,16 @@ bool Jfr::on_flight_recorder_option(const JavaVMOption** option, char* delimiter
|
||||
bool Jfr::on_start_flight_recording_option(const JavaVMOption** option, char* delimiter) {
|
||||
return JfrOptionSet::parse_start_flight_recording_option(option, delimiter);
|
||||
}
|
||||
|
||||
JRT_LEAF(void, Jfr::get_class_id_intrinsic(const Klass* klass))
|
||||
assert(klass != NULL, "sanity");
|
||||
JfrTraceIdLoadBarrier::load_barrier(klass);
|
||||
JRT_END
|
||||
|
||||
address Jfr::epoch_address() {
|
||||
return JfrTraceIdEpoch::epoch_address();
|
||||
}
|
||||
|
||||
address Jfr::signal_address() {
|
||||
return JfrTraceIdEpoch::signal_address();
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2018, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -30,6 +30,7 @@
|
||||
|
||||
class JavaThread;
|
||||
class Thread;
|
||||
class Klass;
|
||||
|
||||
extern "C" void JNICALL jfr_register_natives(JNIEnv*, jclass);
|
||||
|
||||
@ -54,6 +55,11 @@ class Jfr : AllStatic {
|
||||
static void exclude_thread(Thread* thread);
|
||||
static bool is_excluded(Thread* thread);
|
||||
static void include_thread(Thread* thread);
|
||||
|
||||
// intrinsic support
|
||||
static void get_class_id_intrinsic(const Klass* klass);
|
||||
static address epoch_address();
|
||||
static address signal_address();
|
||||
};
|
||||
|
||||
#endif // SHARE_JFR_JFR_HPP
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -44,7 +44,7 @@ JfrJniMethodRegistration::JfrJniMethodRegistration(JNIEnv* env) {
|
||||
(char*)"destroyJFR", (char*)"()Z", (void*)jfr_destroy_jfr,
|
||||
(char*)"emitEvent", (char*)"(JJJ)Z", (void*)jfr_emit_event,
|
||||
(char*)"getAllEventClasses", (char*)"()Ljava/util/List;", (void*)jfr_get_all_event_classes,
|
||||
(char*)"getClassIdNonIntrinsic", (char*)"(Ljava/lang/Class;)J", (void*)jfr_class_id,
|
||||
(char*)"getClassId", (char*)"(Ljava/lang/Class;)J", (void*)jfr_class_id,
|
||||
(char*)"getPid", (char*)"()Ljava/lang/String;", (void*)jfr_get_pid,
|
||||
(char*)"getStackTraceId", (char*)"(I)J", (void*)jfr_stacktrace_id,
|
||||
(char*)"getThreadId", (char*)"(Ljava/lang/Thread;)J", (void*)jfr_id_for_thread,
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -67,8 +67,8 @@ class JfrTraceIdEpoch : AllStatic {
|
||||
return _epoch_state;
|
||||
}
|
||||
|
||||
static jlong epoch_address() {
|
||||
return (jlong)&_epoch_state;
|
||||
static address epoch_address() {
|
||||
return (address)&_epoch_state;
|
||||
}
|
||||
|
||||
static u1 current() {
|
||||
@ -114,6 +114,10 @@ class JfrTraceIdEpoch : AllStatic {
|
||||
static void set_changed_tag_state() {
|
||||
_tag_state.signal();
|
||||
}
|
||||
|
||||
static address signal_address() {
|
||||
return _tag_state.signaled_address();
|
||||
}
|
||||
};
|
||||
|
||||
#endif // SHARE_JFR_RECORDER_CHECKPOINT_TYPES_TRACEID_JFRTRACEIDEPOCH_HPP
|
||||
|
@ -67,12 +67,14 @@ class PackageEntry;
|
||||
*
|
||||
*/
|
||||
class JfrTraceIdLoadBarrier : AllStatic {
|
||||
friend class Jfr;
|
||||
friend class JfrCheckpointManager;
|
||||
private:
|
||||
static bool initialize();
|
||||
static void clear();
|
||||
static void destroy();
|
||||
static void enqueue(const Klass* klass);
|
||||
static void load_barrier(const Klass* klass);
|
||||
public:
|
||||
static traceid load(const ClassLoaderData* cld);
|
||||
static traceid load(const Klass* klass);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2020, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -65,12 +65,16 @@ inline traceid set_used_and_get(const T* type) {
|
||||
return TRACE_ID(type);
|
||||
}
|
||||
|
||||
inline traceid JfrTraceIdLoadBarrier::load(const Klass* klass) {
|
||||
assert(klass != NULL, "invariant");
|
||||
if (should_tag(klass)) {
|
||||
inline void JfrTraceIdLoadBarrier::load_barrier(const Klass* klass) {
|
||||
SET_USED_THIS_EPOCH(klass);
|
||||
enqueue(klass);
|
||||
JfrTraceIdEpoch::set_changed_tag_state();
|
||||
}
|
||||
|
||||
inline traceid JfrTraceIdLoadBarrier::load(const Klass* klass) {
|
||||
assert(klass != NULL, "invariant");
|
||||
if (should_tag(klass)) {
|
||||
load_barrier(klass);
|
||||
}
|
||||
assert(USED_THIS_EPOCH(klass), "invariant");
|
||||
return TRACE_ID(klass);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2020, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -46,6 +46,8 @@ class JfrSignal {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
address signaled_address() { return (address)&_signaled; }
|
||||
};
|
||||
|
||||
#endif // SHARE_JFR_UTILITIES_JFRSIGNAL_HPP
|
||||
|
@ -1106,7 +1106,8 @@ void ConnectionGraph::process_call_arguments(CallNode *call) {
|
||||
strcmp(call->as_CallLeaf()->_name, "montgomery_square") == 0 ||
|
||||
strcmp(call->as_CallLeaf()->_name, "bigIntegerRightShiftWorker") == 0 ||
|
||||
strcmp(call->as_CallLeaf()->_name, "bigIntegerLeftShiftWorker") == 0 ||
|
||||
strcmp(call->as_CallLeaf()->_name, "vectorizedMismatch") == 0)
|
||||
strcmp(call->as_CallLeaf()->_name, "vectorizedMismatch") == 0 ||
|
||||
strcmp(call->as_CallLeaf()->_name, "get_class_id_intrinsic") == 0)
|
||||
))) {
|
||||
call->dump();
|
||||
fatal("EA unexpected CallLeaf %s", call->as_CallLeaf()->_name);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2005, 2015, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -351,7 +351,8 @@ Node* IdealKit::load(Node* ctl,
|
||||
const Type* t,
|
||||
BasicType bt,
|
||||
int adr_idx,
|
||||
bool require_atomic_access) {
|
||||
bool require_atomic_access,
|
||||
MemNode::MemOrd mo) {
|
||||
|
||||
assert(adr_idx != Compile::AliasIdxTop, "use other make_load factory" );
|
||||
const TypePtr* adr_type = NULL; // debug-mode-only argument
|
||||
@ -359,9 +360,9 @@ Node* IdealKit::load(Node* ctl,
|
||||
Node* mem = memory(adr_idx);
|
||||
Node* ld;
|
||||
if (require_atomic_access && bt == T_LONG) {
|
||||
ld = LoadLNode::make_atomic(ctl, mem, adr, adr_type, t, MemNode::unordered);
|
||||
ld = LoadLNode::make_atomic(ctl, mem, adr, adr_type, t, mo);
|
||||
} else {
|
||||
ld = LoadNode::make(_gvn, ctl, mem, adr, adr_type, t, bt, MemNode::unordered);
|
||||
ld = LoadNode::make(_gvn, ctl, mem, adr, adr_type, t, bt, mo);
|
||||
}
|
||||
return transform(ld);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2005, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -220,7 +220,7 @@ class IdealKit: public StackObj {
|
||||
const Type* t,
|
||||
BasicType bt,
|
||||
int adr_idx,
|
||||
bool require_atomic_access = false);
|
||||
bool require_atomic_access = false, MemNode::MemOrd mo = MemNode::unordered);
|
||||
|
||||
// Return the new StoreXNode
|
||||
Node* store(Node* ctl,
|
||||
|
@ -57,6 +57,10 @@
|
||||
#include "utilities/macros.hpp"
|
||||
#include "utilities/powerOfTwo.hpp"
|
||||
|
||||
#if INCLUDE_JFR
|
||||
#include "jfr/jfr.hpp"
|
||||
#endif
|
||||
|
||||
//---------------------------make_vm_intrinsic----------------------------
|
||||
CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
|
||||
vmIntrinsicID id = m->intrinsic_id();
|
||||
@ -2802,37 +2806,83 @@ bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* func
|
||||
|
||||
#ifdef JFR_HAVE_INTRINSICS
|
||||
|
||||
/*
|
||||
* oop -> myklass
|
||||
* myklass->trace_id |= USED
|
||||
* return myklass->trace_id & ~0x3
|
||||
*/
|
||||
/**
|
||||
* if oop->klass != null
|
||||
* // normal class
|
||||
* epoch = _epoch_state ? 2 : 1
|
||||
* if oop->klass->trace_id & ((epoch << META_SHIFT) | epoch)) != epoch {
|
||||
* ... // enter slow path when the klass is first recorded or the epoch of JFR shifts
|
||||
* }
|
||||
* id = oop->klass->trace_id >> TRACE_ID_SHIFT // normal class path
|
||||
* else
|
||||
* // primitive class
|
||||
* if oop->array_klass != null
|
||||
* id = (oop->array_klass->trace_id >> TRACE_ID_SHIFT) + 1 // primitive class path
|
||||
* else
|
||||
* id = LAST_TYPE_ID + 1 // void class path
|
||||
* if (!signaled)
|
||||
* signaled = true
|
||||
*/
|
||||
bool LibraryCallKit::inline_native_classID() {
|
||||
Node* cls = null_check(argument(0), T_OBJECT);
|
||||
Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
|
||||
kls = null_check(kls, T_OBJECT);
|
||||
Node* cls = argument(0);
|
||||
|
||||
ByteSize offset = KLASS_TRACE_ID_OFFSET;
|
||||
Node* insp = basic_plus_adr(kls, in_bytes(offset));
|
||||
Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
|
||||
IdealKit ideal(this);
|
||||
#define __ ideal.
|
||||
IdealVariable result(ideal); __ declarations_done();
|
||||
Node* kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(),
|
||||
basic_plus_adr(cls, java_lang_Class::klass_offset()),
|
||||
TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL));
|
||||
|
||||
Node* clsused = longcon(0x01l); // set the class bit
|
||||
Node* orl = _gvn.transform(new OrLNode(tvalue, clsused));
|
||||
const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
|
||||
store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
|
||||
|
||||
#ifdef TRACE_ID_META_BITS
|
||||
Node* mbits = longcon(~TRACE_ID_META_BITS);
|
||||
tvalue = _gvn.transform(new AndLNode(tvalue, mbits));
|
||||
#endif
|
||||
#ifdef TRACE_ID_SHIFT
|
||||
Node* cbits = intcon(TRACE_ID_SHIFT);
|
||||
tvalue = _gvn.transform(new URShiftLNode(tvalue, cbits));
|
||||
#endif
|
||||
__ if_then(kls, BoolTest::ne, null()); {
|
||||
Node* kls_trace_id_addr = basic_plus_adr(kls, in_bytes(KLASS_TRACE_ID_OFFSET));
|
||||
Node* kls_trace_id_raw = ideal.load(ideal.ctrl(), kls_trace_id_addr,TypeLong::LONG, T_LONG, Compile::AliasIdxRaw);
|
||||
|
||||
set_result(tvalue);
|
||||
Node* epoch_address = makecon(TypeRawPtr::make(Jfr::epoch_address()));
|
||||
Node* epoch = ideal.load(ideal.ctrl(), epoch_address, TypeInt::BOOL, T_BOOLEAN, Compile::AliasIdxRaw);
|
||||
epoch = _gvn.transform(new LShiftLNode(longcon(1), epoch));
|
||||
Node* mask = _gvn.transform(new LShiftLNode(epoch, intcon(META_SHIFT)));
|
||||
mask = _gvn.transform(new OrLNode(mask, epoch));
|
||||
Node* kls_trace_id_raw_and_mask = _gvn.transform(new AndLNode(kls_trace_id_raw, mask));
|
||||
|
||||
float unlikely = PROB_UNLIKELY(0.999);
|
||||
__ if_then(kls_trace_id_raw_and_mask, BoolTest::ne, epoch, unlikely); {
|
||||
sync_kit(ideal);
|
||||
make_runtime_call(RC_LEAF,
|
||||
OptoRuntime::get_class_id_intrinsic_Type(),
|
||||
CAST_FROM_FN_PTR(address, Jfr::get_class_id_intrinsic),
|
||||
"get_class_id_intrinsic",
|
||||
TypePtr::BOTTOM,
|
||||
kls);
|
||||
ideal.sync_kit(this);
|
||||
} __ end_if();
|
||||
|
||||
ideal.set(result, _gvn.transform(new URShiftLNode(kls_trace_id_raw, ideal.ConI(TRACE_ID_SHIFT))));
|
||||
} __ else_(); {
|
||||
Node* array_kls = _gvn.transform(LoadKlassNode::make(_gvn, NULL, immutable_memory(),
|
||||
basic_plus_adr(cls, java_lang_Class::array_klass_offset()),
|
||||
TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL));
|
||||
__ if_then(array_kls, BoolTest::ne, null()); {
|
||||
Node* array_kls_trace_id_addr = basic_plus_adr(array_kls, in_bytes(KLASS_TRACE_ID_OFFSET));
|
||||
Node* array_kls_trace_id_raw = ideal.load(ideal.ctrl(), array_kls_trace_id_addr, TypeLong::LONG, T_LONG, Compile::AliasIdxRaw);
|
||||
Node* array_kls_trace_id = _gvn.transform(new URShiftLNode(array_kls_trace_id_raw, ideal.ConI(TRACE_ID_SHIFT)));
|
||||
ideal.set(result, _gvn.transform(new AddLNode(array_kls_trace_id, longcon(1))));
|
||||
} __ else_(); {
|
||||
// void class case
|
||||
ideal.set(result, _gvn.transform(longcon(LAST_TYPE_ID + 1)));
|
||||
} __ end_if();
|
||||
|
||||
Node* signaled_flag_address = makecon(TypeRawPtr::make(Jfr::signal_address()));
|
||||
Node* signaled = ideal.load(ideal.ctrl(), signaled_flag_address, TypeInt::BOOL, T_BOOLEAN, Compile::AliasIdxRaw, true, MemNode::acquire);
|
||||
__ if_then(signaled, BoolTest::ne, ideal.ConI(1)); {
|
||||
ideal.store(ideal.ctrl(), signaled_flag_address, ideal.ConI(1), T_BOOLEAN, Compile::AliasIdxRaw, MemNode::release, true);
|
||||
} __ end_if();
|
||||
} __ end_if();
|
||||
|
||||
final_sync(ideal);
|
||||
set_result(ideal.value(result));
|
||||
#undef __
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
bool LibraryCallKit::inline_native_getEventWriter() {
|
||||
|
@ -1498,6 +1498,21 @@ const TypeFunc *OptoRuntime::register_finalizer_Type() {
|
||||
return TypeFunc::make(domain,range);
|
||||
}
|
||||
|
||||
#if INCLUDE_JFR
|
||||
const TypeFunc *OptoRuntime::get_class_id_intrinsic_Type() {
|
||||
// create input type (domain)
|
||||
const Type **fields = TypeTuple::fields(1);
|
||||
fields[TypeFunc::Parms+0] = TypeInstPtr::KLASS;
|
||||
const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms + 1, fields);
|
||||
|
||||
// create result type (range)
|
||||
fields = TypeTuple::fields(0);
|
||||
|
||||
const TypeTuple *range = TypeTuple::make(TypeFunc::Parms + 0, fields);
|
||||
|
||||
return TypeFunc::make(domain,range);
|
||||
}
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Dtrace support. entry and exit probes have the same signature
|
||||
|
@ -304,6 +304,8 @@ private:
|
||||
|
||||
static const TypeFunc* register_finalizer_Type();
|
||||
|
||||
JFR_ONLY(static const TypeFunc* get_class_id_intrinsic_Type();)
|
||||
|
||||
// Dtrace support
|
||||
static const TypeFunc* dtrace_method_entry_exit_Type();
|
||||
static const TypeFunc* dtrace_object_alloc_Type();
|
||||
|
@ -986,7 +986,6 @@ JRT_ENTRY_NO_ASYNC(void, SharedRuntime::register_finalizer(JavaThread* current,
|
||||
InstanceKlass::register_finalizer(instanceOop(obj), CHECK);
|
||||
JRT_END
|
||||
|
||||
|
||||
jlong SharedRuntime::get_java_tid(Thread* thread) {
|
||||
if (thread != NULL) {
|
||||
if (thread->is_Java_thread()) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -166,7 +166,7 @@ public final class EventWriter {
|
||||
if (aClass == null) {
|
||||
putLong(0L);
|
||||
} else {
|
||||
putLong(JVM.getClassIdNonIntrinsic(aClass));
|
||||
putLong(JVM.getClassId(aClass));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -142,9 +142,6 @@ public final class JVM {
|
||||
@IntrinsicCandidate
|
||||
public static native long getClassId(Class<?> clazz);
|
||||
|
||||
// temporary workaround until we solve intrinsics supporting epoch shift tagging
|
||||
public static native long getClassIdNonIntrinsic(Class<?> clazz);
|
||||
|
||||
/**
|
||||
* Return process identifier.
|
||||
*
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -68,7 +68,7 @@ public final class TestClearStaleConstants {
|
||||
Class<?> clz = recordClassDefinition(firstClassLoader);
|
||||
JVM jvm = JVM.getJVM();
|
||||
// we will now tag the defined and loaded clz as being in use (no recordings are running here)
|
||||
jvm.getClassIdNonIntrinsic(clz);
|
||||
jvm.getClassId(clz);
|
||||
// null out for unload to occur
|
||||
firstClassLoader = null;
|
||||
clz = null;
|
||||
|
@ -59,24 +59,20 @@ public class TestJFRIntrinsic {
|
||||
public Object eventWriter;
|
||||
|
||||
public static void main(String... args) throws Exception {
|
||||
/*
|
||||
Temporarily excluded until getClassId is reworked to accommodate epoch shift tagging
|
||||
JVM.getJVM().createNativeJFR();
|
||||
TestJFRIntrinsic ti = new TestJFRIntrinsic();
|
||||
Method classid = TestJFRIntrinsic.class.getDeclaredMethod("getClassIdIntrinsic", Class.class);
|
||||
ti.runIntrinsicTest(classid);
|
||||
*/
|
||||
TestJFRIntrinsic ti = new TestJFRIntrinsic();
|
||||
Method eventWriterMethod = TestJFRIntrinsic.class.getDeclaredMethod("getEventWriterIntrinsic", Class.class);
|
||||
ti.runIntrinsicTest(eventWriterMethod);
|
||||
}
|
||||
|
||||
/*
|
||||
public void getClassIdIntrinsic(Class<?> cls) {
|
||||
long exp = JVM.getClassId(cls);
|
||||
if (exp == 0) {
|
||||
throw new RuntimeException("Class id is zero");
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
public void getEventWriterIntrinsic(Class<?> cls) {
|
||||
Object o = JVM.getEventWriter();
|
||||
|
Loading…
Reference in New Issue
Block a user