8313552: Fix -Wconversion warnings in JFR code

Reviewed-by: coleenp
This commit is contained in:
Markus Grönlund 2023-08-08 11:01:59 +00:00
parent 7e209528d3
commit 091e65e95b
28 changed files with 238 additions and 228 deletions

View File

@ -167,25 +167,27 @@ static u1 boolean_method_code_attribute[] = {
0x0, // attributes_count 0x0, // attributes_count
}; };
// annotation processing support /*
Annotation layout.
enum { // initial annotation layout enum { // initial annotation layout
atype_off = 0, // utf8 such as 'Ljava/lang/annotation/Retention;' atype_off = 0, // utf8 such as 'Ljava/lang/annotation/Retention;'
count_off = 2, // u2 such as 1 (one value) count_off = 2, // u2 such as 1 (one value)
member_off = 4, // utf8 such as 'value' member_off = 4, // utf8 such as 'value'
tag_off = 6, // u1 such as 'c' (type) or 'e' (enum) tag_off = 6, // u1 such as 'c' (type) or 'e' (enum)
e_tag_val = 'e', e_tag_val = 'e',
e_type_off = 7, // utf8 such as 'Ljava/lang/annotation/RetentionPolicy;' e_type_off = 7, // utf8 such as 'Ljava/lang/annotation/RetentionPolicy;'
e_con_off = 9, // utf8 payload, such as 'SOURCE', 'CLASS', 'RUNTIME' e_con_off = 9, // utf8 payload, such as 'SOURCE', 'CLASS', 'RUNTIME'
e_size = 11, // end of 'e' annotation e_size = 11, // end of 'e' annotation
c_tag_val = 'c', // payload is type c_tag_val = 'c', // payload is type
c_con_off = 7, // utf8 payload, such as 'I' c_con_off = 7, // utf8 payload, such as 'I'
c_size = 9, // end of 'c' annotation c_size = 9, // end of 'c' annotation
s_tag_val = 's', // payload is String s_tag_val = 's', // payload is String
s_con_off = 7, // utf8 payload, such as 'Ljava/lang/String;' s_con_off = 7, // utf8 payload, such as 'Ljava/lang/String;'
s_size = 9, s_size = 9,
min_size = 6 // smallest possible size (zero members) min_size = 6 // smallest possible size (zero members)
}; };
*/
static int skip_annotation_value(const address, int, int); // fwd decl static int skip_annotation_value(const address, int, int); // fwd decl
@ -196,7 +198,7 @@ static int next_annotation_index(const address buffer, int limit, int index) {
if ((index += 2) >= limit) { if ((index += 2) >= limit) {
return limit; return limit;
} }
int nof_members = JfrBigEndian::read<u2>(buffer + index - 2); int nof_members = JfrBigEndian::read<int, u2>(buffer + index - 2);
while (--nof_members >= 0 && index < limit) { while (--nof_members >= 0 && index < limit) {
index += 2; // skip member index += 2; // skip member
index = skip_annotation_value(buffer, limit, index); index = skip_annotation_value(buffer, limit, index);
@ -240,7 +242,7 @@ static int skip_annotation_value(const address buffer, int limit, int index) {
if ((index += 2) >= limit) { if ((index += 2) >= limit) {
return limit; return limit;
} }
int nof_values = JfrBigEndian::read<u2>(buffer + index - 2); int nof_values = JfrBigEndian::read<int, u2>(buffer + index - 2);
while (--nof_values >= 0 && index < limit) { while (--nof_values >= 0 && index < limit) {
index = skip_annotation_value(buffer, limit, index); index = skip_annotation_value(buffer, limit, index);
} }
@ -255,11 +257,11 @@ static int skip_annotation_value(const address buffer, int limit, int index) {
return index; return index;
} }
static const u2 number_of_elements_offset = (u2)2; static constexpr const int number_of_elements_offset = 2;
static const u2 element_name_offset = (u2)(number_of_elements_offset + 2); static constexpr const int element_name_offset = number_of_elements_offset + 2;
static const u2 element_name_size = (u2)2; static constexpr const int element_name_size = 2;
static const u2 value_type_relative_offset = (u2)2; static constexpr const int value_type_relative_offset = 2;
static const u2 value_relative_offset = (u2)(value_type_relative_offset + 1); static constexpr const int value_relative_offset = value_type_relative_offset + 1;
// see JVMS - 4.7.16. The RuntimeVisibleAnnotations Attribute // see JVMS - 4.7.16. The RuntimeVisibleAnnotations Attribute
@ -267,19 +269,20 @@ class AnnotationElementIterator : public StackObj {
private: private:
const InstanceKlass* _ik; const InstanceKlass* _ik;
const address _buffer; const address _buffer;
const u2 _limit; // length of annotation const int _limit; // length of annotation
mutable u2 _current; // element mutable int _current; // element
mutable u2 _next; // element mutable int _next; // element
u2 value_index() const {
return JfrBigEndian::read<u2>(_buffer + _current + value_relative_offset); int value_index() const {
return JfrBigEndian::read<int, u2>(_buffer + _current + value_relative_offset);
} }
public: public:
AnnotationElementIterator(const InstanceKlass* ik, address buffer, u2 limit) : _ik(ik), AnnotationElementIterator(const InstanceKlass* ik, address buffer, int limit) : _ik(ik),
_buffer(buffer), _buffer(buffer),
_limit(limit), _limit(limit),
_current(element_name_offset), _current(element_name_offset),
_next(element_name_offset) { _next(element_name_offset) {
assert(_buffer != nullptr, "invariant"); assert(_buffer != nullptr, "invariant");
assert(_next == element_name_offset, "invariant"); assert(_next == element_name_offset, "invariant");
assert(_current == element_name_offset, "invariant"); assert(_current == element_name_offset, "invariant");
@ -299,17 +302,17 @@ class AnnotationElementIterator : public StackObj {
assert(_current <= _limit, "invariant"); assert(_current <= _limit, "invariant");
} }
u2 number_of_elements() const { int number_of_elements() const {
return JfrBigEndian::read<u2>(_buffer + number_of_elements_offset); return JfrBigEndian::read<int, u2>(_buffer + number_of_elements_offset);
} }
const Symbol* name() const { const Symbol* name() const {
assert(_current < _next, "invariant"); assert(_current < _next, "invariant");
return _ik->constants()->symbol_at(JfrBigEndian::read<u2>(_buffer + _current)); return _ik->constants()->symbol_at(JfrBigEndian::read<int, u2>(_buffer + _current));
} }
char value_type() const { char value_type() const {
return JfrBigEndian::read<u1>(_buffer + _current + value_type_relative_offset); return JfrBigEndian::read<char, u1>(_buffer + _current + value_type_relative_offset);
} }
jint read_int() const { jint read_int() const {
@ -325,10 +328,10 @@ class AnnotationIterator : public StackObj {
private: private:
const InstanceKlass* _ik; const InstanceKlass* _ik;
// ensure _limit field is declared before _buffer // ensure _limit field is declared before _buffer
u2 _limit; // length of annotations array int _limit; // length of annotations array
const address _buffer; const address _buffer;
mutable u2 _current; // annotation mutable int _current; // annotation
mutable u2 _next; // annotation mutable int _next; // annotation
public: public:
AnnotationIterator(const InstanceKlass* ik, AnnotationArray* ar) : _ik(ik), AnnotationIterator(const InstanceKlass* ik, AnnotationArray* ar) : _ik(ik),
@ -353,14 +356,16 @@ class AnnotationIterator : public StackObj {
assert(_next <= _limit, "invariant"); assert(_next <= _limit, "invariant");
assert(_current <= _limit, "invariant"); assert(_current <= _limit, "invariant");
} }
const AnnotationElementIterator elements() const { const AnnotationElementIterator elements() const {
assert(_current < _next, "invariant"); assert(_current < _next, "invariant");
return AnnotationElementIterator(_ik, _buffer + _current, _next - _current); return AnnotationElementIterator(_ik, _buffer + _current, _next - _current);
} }
const Symbol* type() const { const Symbol* type() const {
assert(_buffer != nullptr, "invariant"); assert(_buffer != nullptr, "invariant");
assert(_current < _limit, "invariant"); assert(_current < _limit, "invariant");
return _ik->constants()->symbol_at(JfrBigEndian::read<u2>(_buffer + _current)); return _ik->constants()->symbol_at(JfrBigEndian::read<int, u2>(_buffer + _current));
} }
}; };
@ -476,13 +481,13 @@ static u2 utf8_info_index(const InstanceKlass* ik, const Symbol* const target, T
assert(target != nullptr, "invariant"); assert(target != nullptr, "invariant");
const ConstantPool* cp = ik->constants(); const ConstantPool* cp = ik->constants();
const int cp_len = cp->length(); const int cp_len = cp->length();
for (u2 index = 1; index < cp_len; ++index) { for (int index = 1; index < cp_len; ++index) {
const constantTag tag = cp->tag_at(index); const constantTag tag = cp->tag_at(index);
if (tag.is_utf8()) { if (tag.is_utf8()) {
const Symbol* const utf8_sym = cp->symbol_at(index); const Symbol* const utf8_sym = cp->symbol_at(index);
assert(utf8_sym != nullptr, "invariant"); assert(utf8_sym != nullptr, "invariant");
if (utf8_sym == target) { if (utf8_sym == target) {
return index; return static_cast<u2>(index);
} }
} }
} }
@ -680,7 +685,7 @@ static u2 position_stream_after_cp(const ClassFileStream* stream) {
continue; continue;
} }
case JVM_CONSTANT_Utf8: { case JVM_CONSTANT_Utf8: {
u2 utf8_length = stream->get_u2_fast(); int utf8_length = static_cast<int>(stream->get_u2_fast());
stream->skip_u1_fast(utf8_length); // skip 2 + len bytes stream->skip_u1_fast(utf8_length); // skip 2 + len bytes
continue; continue;
} }
@ -725,8 +730,7 @@ static u2 position_stream_after_fields(const ClassFileStream* stream) {
const u2 attrib_info_len = stream->get_u2_fast(); const u2 attrib_info_len = stream->get_u2_fast();
for (u2 j = 0; j < attrib_info_len; ++j) { for (u2 j = 0; j < attrib_info_len; ++j) {
stream->skip_u2_fast(1); stream->skip_u2_fast(1);
const u4 attrib_len = stream->get_u4_fast(); stream->skip_u1_fast(static_cast<int>(stream->get_u4_fast()));
stream->skip_u1_fast(attrib_len);
} }
} }
return orig_fields_len; return orig_fields_len;
@ -754,7 +758,7 @@ static u2 position_stream_after_methods(JfrBigEndianWriter& writer,
const u2 orig_methods_len = stream->get_u2_fast(); const u2 orig_methods_len = stream->get_u2_fast();
// Move copy position past original method_count // Move copy position past original method_count
// in order to not copy the original count // in order to not copy the original count
orig_method_len_offset += sizeof(u2); orig_method_len_offset += 2;
for (u2 i = 0; i < orig_methods_len; ++i) { for (u2 i = 0; i < orig_methods_len; ++i) {
const u4 method_offset = stream->current_offset(); const u4 method_offset = stream->current_offset();
stream->skip_u2_fast(1); // Access Flags stream->skip_u2_fast(1); // Access Flags
@ -763,8 +767,7 @@ static u2 position_stream_after_methods(JfrBigEndianWriter& writer,
const u2 attributes_count = stream->get_u2_fast(); const u2 attributes_count = stream->get_u2_fast();
for (u2 j = 0; j < attributes_count; ++j) { for (u2 j = 0; j < attributes_count; ++j) {
stream->skip_u2_fast(1); stream->skip_u2_fast(1);
const u4 attrib_len = stream->get_u4_fast(); stream->skip_u1_fast(static_cast<int>(stream->get_u4_fast()));
stream->skip_u1_fast(attrib_len);
} }
if (clinit_method != nullptr && name_index == clinit_method->name_index()) { if (clinit_method != nullptr && name_index == clinit_method->name_index()) {
// The method just parsed is an existing <clinit> method. // The method just parsed is an existing <clinit> method.
@ -853,7 +856,7 @@ static void adjust_exception_table(JfrBigEndianWriter& writer, u2 bci_adjustment
} }
} }
enum StackMapFrameTypes { enum StackMapFrameTypes : u1 {
SAME_FRAME_BEGIN = 0, SAME_FRAME_BEGIN = 0,
SAME_FRAME_END = 63, SAME_FRAME_END = 63,
SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN = 64, SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN = 64,
@ -895,7 +898,8 @@ static void adjust_stack_map(JfrBigEndianWriter& writer,
} else if (frame_type >= SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN && } else if (frame_type >= SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN &&
frame_type <= SAME_LOCALS_1_STACK_ITEM_FRAME_END) { frame_type <= SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
writer.write<u1>(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED); writer.write<u1>(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED);
writer.write<u2>((frame_type - SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN) + bci_adjustment_offset); const u2 value = frame_type - SAME_LOCALS_1_STACK_ITEM_FRAME_BEGIN;
writer.write<u2>(value + bci_adjustment_offset);
} else if (frame_type >= SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) { } else if (frame_type >= SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) {
// SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED to FULL_FRAME // SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED to FULL_FRAME
// has a u2 offset_delta field // has a u2 offset_delta field
@ -909,9 +913,9 @@ static void adjust_stack_map(JfrBigEndianWriter& writer,
writer.write<u1>(stream.get_u1(THREAD)); writer.write<u1>(stream.get_u1(THREAD));
} }
u4 stack_map_attrib_len = writer.current_offset() - stack_map_attrib_len_offset; u4 stack_map_attrib_len = static_cast<u4>(writer.current_offset() - stack_map_attrib_len_offset);
// the stack_map_table_attributes_length value is exclusive // the stack_map_table_attributes_length value is exclusive
stack_map_attrib_len -= sizeof(u4); stack_map_attrib_len -= 4;
writer.write_at_offset(stack_map_attrib_len, stack_map_attrib_len_offset); writer.write_at_offset(stack_map_attrib_len, stack_map_attrib_len_offset);
} }
@ -938,9 +942,9 @@ static void adjust_line_number_table(JfrBigEndianWriter& writer,
writer.write<u2>((u2)lnt_stream.line()); writer.write<u2>((u2)lnt_stream.line());
} }
writer.write_at_offset(line_number_table_entries, lnt_attributes_entries_offset); writer.write_at_offset(line_number_table_entries, lnt_attributes_entries_offset);
u4 lnt_table_attributes_len = writer.current_offset() - lnt_attributes_length_offset; u4 lnt_table_attributes_len = static_cast<u4>(writer.current_offset() - lnt_attributes_length_offset);
// the line_number_table_attributes_length value is exclusive // the line_number_table_attributes_length value is exclusive
lnt_table_attributes_len -= sizeof(u4); lnt_table_attributes_len -= 4;
writer.write_at_offset(lnt_table_attributes_len, lnt_attributes_length_offset); writer.write_at_offset(lnt_table_attributes_len, lnt_attributes_length_offset);
} }
@ -971,9 +975,9 @@ static u2 adjust_local_variable_table(JfrBigEndianWriter& writer,
++num_lvtt_entries; ++num_lvtt_entries;
} }
} }
u4 lvt_table_attributes_len = writer.current_offset() - lvt_attributes_length_offset; u4 lvt_table_attributes_len = static_cast<u4>(writer.current_offset() - lvt_attributes_length_offset);
// the lvt_table_attributes_length value is exclusive // the lvt_table_attributes_length value is exclusive
lvt_table_attributes_len -= sizeof(u4); lvt_table_attributes_len -= 4;
writer.write_at_offset(lvt_table_attributes_len, lvt_attributes_length_offset); writer.write_at_offset(lvt_table_attributes_len, lvt_attributes_length_offset);
return num_lvtt_entries; return num_lvtt_entries;
} }
@ -1001,9 +1005,9 @@ static void adjust_local_variable_type_table(JfrBigEndianWriter& writer,
writer.write<u2>(table[i].slot); writer.write<u2>(table[i].slot);
} }
} }
u4 lvtt_table_attributes_len = writer.current_offset() - lvtt_attributes_length_offset; u4 lvtt_table_attributes_len = static_cast<u4>(writer.current_offset() - lvtt_attributes_length_offset);
// the lvtt_table_attributes_length value is exclusive // the lvtt_table_attributes_length value is exclusive
lvtt_table_attributes_len -= sizeof(u4); lvtt_table_attributes_len -= 4;
writer.write_at_offset(lvtt_table_attributes_len, lvtt_attributes_length_offset); writer.write_at_offset(lvtt_table_attributes_len, lvtt_attributes_length_offset);
} }
@ -1061,8 +1065,8 @@ static jlong insert_clinit_method(const InstanceKlass* ik,
const u2 name_index = utf8_indexes[UTF8_OPT_clinit]; const u2 name_index = utf8_indexes[UTF8_OPT_clinit];
assert(name_index != invalid_cp_index, "invariant"); assert(name_index != invalid_cp_index, "invariant");
const u2 desc_index = utf8_indexes[UTF8_REQ_EMPTY_VOID_METHOD_DESC]; const u2 desc_index = utf8_indexes[UTF8_REQ_EMPTY_VOID_METHOD_DESC];
const u2 max_stack = MAX2(clinit_method != nullptr ? clinit_method->verifier_max_stack() : 1, 1); const u2 max_stack = MAX2<u2>(clinit_method != nullptr ? clinit_method->verifier_max_stack() : 1, 1);
const u2 max_locals = MAX2(clinit_method != nullptr ? clinit_method->max_locals() : 0, 0); const u2 max_locals = MAX2<u2>(clinit_method != nullptr ? clinit_method->max_locals() : 0, 0);
const u2 orig_bytecodes_length = clinit_method != nullptr ? (u2)clinit_method->code_size() : 0; const u2 orig_bytecodes_length = clinit_method != nullptr ? (u2)clinit_method->code_size() : 0;
const address orig_bytecodes = clinit_method != nullptr ? clinit_method->code_base() : nullptr; const address orig_bytecodes = clinit_method != nullptr ? clinit_method->code_base() : nullptr;
const u2 new_code_length = injected_code_length + orig_bytecodes_length; const u2 new_code_length = injected_code_length + orig_bytecodes_length;
@ -1111,9 +1115,9 @@ static jlong insert_clinit_method(const InstanceKlass* ik,
assert(writer.is_valid(), "invariant"); assert(writer.is_valid(), "invariant");
adjust_code_attributes(writer, utf8_indexes, injected_code_length, clinit_method, THREAD); adjust_code_attributes(writer, utf8_indexes, injected_code_length, clinit_method, THREAD);
assert(writer.is_valid(), "invariant"); assert(writer.is_valid(), "invariant");
u4 code_attribute_len = writer.current_offset() - code_attribute_length_offset; u4 code_attribute_len = static_cast<u4>(writer.current_offset() - code_attribute_length_offset);
// the code_attribute_length value is exclusive // the code_attribute_length value is exclusive
code_attribute_len -= sizeof(u4); code_attribute_len -= 4;
writer.write_at_offset(code_attribute_len, code_attribute_length_offset); writer.write_at_offset(code_attribute_len, code_attribute_length_offset);
return writer.current_offset(); return writer.current_offset();
} }
@ -1212,7 +1216,7 @@ static u2 find_or_add_utf8_info(JfrBigEndianWriter& writer,
assert(utf8_constant != nullptr, "invariant"); assert(utf8_constant != nullptr, "invariant");
TempNewSymbol utf8_sym = SymbolTable::new_symbol(utf8_constant); TempNewSymbol utf8_sym = SymbolTable::new_symbol(utf8_constant);
// lookup existing // lookup existing
const int utf8_orig_idx = utf8_info_index(ik, utf8_sym, THREAD); const u2 utf8_orig_idx = utf8_info_index(ik, utf8_sym, THREAD);
if (utf8_orig_idx != invalid_cp_index) { if (utf8_orig_idx != invalid_cp_index) {
// existing constant pool entry found // existing constant pool entry found
return utf8_orig_idx; return utf8_orig_idx;
@ -1405,8 +1409,10 @@ static u1* schema_extend_event_subklass_bytes(const InstanceKlass* ik,
// //
if (register_klass) { if (register_klass) {
insert_clinit_method(ik, parser, writer, orig_cp_len, utf8_indexes, flr_register_method_ref_index, clinit_method, THREAD); insert_clinit_method(ik, parser, writer, orig_cp_len, utf8_indexes, flr_register_method_ref_index, clinit_method, THREAD);
if (clinit_method == nullptr) {
++number_of_new_methods;
}
} }
number_of_new_methods += clinit_method != nullptr ? 0 : register_klass ? 1 : 0;
// Update classfile methods_count // Update classfile methods_count
writer.write_at_offset<u2>(orig_methods_len + number_of_new_methods, new_method_len_offset); writer.write_at_offset<u2>(orig_methods_len + number_of_new_methods, new_method_len_offset);
assert(writer.is_valid(), "invariant"); assert(writer.is_valid(), "invariant");

View File

@ -51,13 +51,13 @@ class ObjectSample : public JfrCHeapObj {
JfrBlobHandle _type_set; JfrBlobHandle _type_set;
WeakHandle _object; WeakHandle _object;
Ticks _allocation_time; Ticks _allocation_time;
traceid _stack_trace_id;
traceid _thread_id; traceid _thread_id;
int _index; traceid _stack_trace_id;
traceid _stack_trace_hash;
size_t _span; size_t _span;
size_t _allocated; size_t _allocated;
size_t _heap_used_at_last_gc; size_t _heap_used_at_last_gc;
unsigned int _stack_trace_hash; int _index;
bool _virtual_thread; bool _virtual_thread;
void release_references() { void release_references() {
@ -75,13 +75,13 @@ class ObjectSample : public JfrCHeapObj {
_thread(), _thread(),
_type_set(), _type_set(),
_allocation_time(), _allocation_time(),
_stack_trace_id(0),
_thread_id(0), _thread_id(0),
_index(0), _stack_trace_id(0),
_stack_trace_hash(0),
_span(0), _span(0),
_allocated(0), _allocated(0),
_heap_used_at_last_gc(0), _heap_used_at_last_gc(0),
_stack_trace_hash(0), _index(0),
_virtual_thread(false) {} _virtual_thread(false) {}
ObjectSample* next() const { ObjectSample* next() const {
@ -170,11 +170,11 @@ class ObjectSample : public JfrCHeapObj {
_stack_trace_id = id; _stack_trace_id = id;
} }
unsigned int stack_trace_hash() const { traceid stack_trace_hash() const {
return _stack_trace_hash; return _stack_trace_hash;
} }
void set_stack_trace_hash(unsigned int hash) { void set_stack_trace_hash(traceid hash) {
_stack_trace_hash = hash; _stack_trace_hash = hash;
} }

View File

@ -249,7 +249,7 @@ void ObjectSampler::add(HeapWord* obj, size_t allocated, traceid thread_id, bool
sample->set_thread(bh); sample->set_thread(bh);
const JfrThreadLocal* const tl = thread->jfr_thread_local(); const JfrThreadLocal* const tl = thread->jfr_thread_local();
const unsigned int stacktrace_hash = tl->cached_stack_trace_hash(); const traceid stacktrace_hash = tl->cached_stack_trace_hash();
if (stacktrace_hash != 0) { if (stacktrace_hash != 0) {
sample->set_stack_trace_id(tl->cached_stack_trace_id()); sample->set_stack_trace_id(tl->cached_stack_trace_id());
sample->set_stack_trace_hash(stacktrace_hash); sample->set_stack_trace_hash(stacktrace_hash);

View File

@ -338,7 +338,7 @@ TRACE_REQUEST_FUNC(ThreadContextSwitchRate) {
#define SEND_FLAGS_OF_TYPE(eventType, flagType) \ #define SEND_FLAGS_OF_TYPE(eventType, flagType) \
do { \ do { \
JVMFlag *flag = JVMFlag::flags; \ JVMFlag *flag = JVMFlag::flags; \
while (flag->name() != nullptr) { \ while (flag->name() != nullptr) { \
if (flag->is_ ## flagType()) { \ if (flag->is_ ## flagType()) { \
if (flag->is_unlocked()) { \ if (flag->is_unlocked()) { \
Event ## eventType event; \ Event ## eventType event; \
@ -416,7 +416,7 @@ TRACE_REQUEST_FUNC(GCConfiguration) {
event.set_usesDynamicGCThreads(conf.uses_dynamic_gc_threads()); event.set_usesDynamicGCThreads(conf.uses_dynamic_gc_threads());
event.set_isExplicitGCConcurrent(conf.is_explicit_gc_concurrent()); event.set_isExplicitGCConcurrent(conf.is_explicit_gc_concurrent());
event.set_isExplicitGCDisabled(conf.is_explicit_gc_disabled()); event.set_isExplicitGCDisabled(conf.is_explicit_gc_disabled());
event.set_gcTimeRatio(conf.gc_time_ratio()); event.set_gcTimeRatio(static_cast<unsigned int>(conf.gc_time_ratio()));
event.set_pauseTarget((s8)pause_target); event.set_pauseTarget((s8)pause_target);
event.commit(); event.commit();
} }
@ -433,8 +433,8 @@ TRACE_REQUEST_FUNC(GCTLABConfiguration) {
TRACE_REQUEST_FUNC(GCSurvivorConfiguration) { TRACE_REQUEST_FUNC(GCSurvivorConfiguration) {
GCSurvivorConfiguration conf; GCSurvivorConfiguration conf;
EventGCSurvivorConfiguration event; EventGCSurvivorConfiguration event;
event.set_maxTenuringThreshold(conf.max_tenuring_threshold()); event.set_maxTenuringThreshold(static_cast<u1>(conf.max_tenuring_threshold()));
event.set_initialTenuringThreshold(conf.initial_tenuring_threshold()); event.set_initialTenuringThreshold(static_cast<u1>(conf.initial_tenuring_threshold()));
event.commit(); event.commit();
} }
@ -447,7 +447,7 @@ TRACE_REQUEST_FUNC(GCHeapConfiguration) {
event.set_usesCompressedOops(conf.uses_compressed_oops()); event.set_usesCompressedOops(conf.uses_compressed_oops());
event.set_compressedOopsMode(conf.narrow_oop_mode()); event.set_compressedOopsMode(conf.narrow_oop_mode());
event.set_objectAlignment(conf.object_alignment_in_bytes()); event.set_objectAlignment(conf.object_alignment_in_bytes());
event.set_heapAddressBits(conf.heap_address_size_in_bits()); event.set_heapAddressBits(static_cast<u1>(conf.heap_address_size_in_bits()));
event.commit(); event.commit();
} }
@ -457,7 +457,7 @@ TRACE_REQUEST_FUNC(YoungGenerationConfiguration) {
EventYoungGenerationConfiguration event; EventYoungGenerationConfiguration event;
event.set_maxSize((u8)max_size); event.set_maxSize((u8)max_size);
event.set_minSize(conf.min_size()); event.set_minSize(conf.min_size());
event.set_newRatio(conf.new_ratio()); event.set_newRatio(static_cast<unsigned int>(conf.new_ratio()));
event.commit(); event.commit();
} }
@ -660,7 +660,7 @@ TRACE_REQUEST_FUNC(CompilerStatistics) {
TRACE_REQUEST_FUNC(CompilerConfiguration) { TRACE_REQUEST_FUNC(CompilerConfiguration) {
EventCompilerConfiguration event; EventCompilerConfiguration event;
event.set_threadCount(CICompilerCount); event.set_threadCount(static_cast<s4>(CICompilerCount));
event.set_tieredCompilation(TieredCompilation); event.set_tieredCompilation(TieredCompilation);
event.set_dynamicCompilerThreadCount(UseDynamicNumberOfCompilerThreads); event.set_dynamicCompilerThreadCount(UseDynamicNumberOfCompilerThreads);
event.commit(); event.commit();

View File

@ -81,7 +81,7 @@ bool JfrThreadCPULoadEvent::update_event(EventThreadCPULoad& event, JavaThread*
jlong user_time = cur_user_time - prev_user_time; jlong user_time = cur_user_time - prev_user_time;
jlong system_time = cur_system_time - prev_system_time; jlong system_time = cur_system_time - prev_system_time;
jlong wallclock_time = cur_wallclock_time - prev_wallclock_time; jlong wallclock_time = cur_wallclock_time - prev_wallclock_time;
jlong total_available_time = wallclock_time * processor_count; const float total_available_time = static_cast<float>(wallclock_time * processor_count);
// Avoid reporting percentages above the theoretical max // Avoid reporting percentages above the theoretical max
if (user_time + system_time > wallclock_time) { if (user_time + system_time > wallclock_time) {
@ -97,8 +97,8 @@ bool JfrThreadCPULoadEvent::update_event(EventThreadCPULoad& event, JavaThread*
system_time -= excess; system_time -= excess;
} }
} }
event.set_user(total_available_time > 0 ? (double)user_time / total_available_time : 0); event.set_user(total_available_time > 0 ? static_cast<float>(user_time) / total_available_time : 0);
event.set_system(total_available_time > 0 ? (double)system_time / total_available_time : 0); event.set_system(total_available_time > 0 ? static_cast<float>(system_time) / total_available_time : 0);
tl->set_user_time(cur_user_time); tl->set_user_time(cur_user_time);
tl->set_cpu_time(cur_cpu_time); tl->set_cpu_time(cur_cpu_time);
return true; return true;

View File

@ -312,7 +312,7 @@ static const size_t payload_offset = types_offset + sizeof(uint32_t);
template <typename Return> template <typename Return>
static Return read_data(const u1* data) { static Return read_data(const u1* data) {
return JfrBigEndian::read<Return>(data); return JfrBigEndian::read<Return, Return>(data);
} }
static size_t total_size(const u1* data) { static size_t total_size(const u1* data) {

View File

@ -305,7 +305,7 @@ traceid JfrThreadGroup::thread_group_id_internal(JfrThreadGroupsHelper& helper)
} }
JfrThreadGroupEntry* tge = nullptr; JfrThreadGroupEntry* tge = nullptr;
int parent_thread_group_id = 0; traceid parent_thread_group_id = 0;
while (helper.has_next()) { while (helper.has_next()) {
JfrThreadGroupPointers& ptrs = helper.next(); JfrThreadGroupPointers& ptrs = helper.next();
tge = tg_instance->find_entry(ptrs); tge = tg_instance->find_entry(ptrs);

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2019, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -26,7 +26,6 @@
#define SHARE_JFR_RECORDER_CHECKPOINT_TYPES_TRACEID_JFRTRACEIDBITS_HPP #define SHARE_JFR_RECORDER_CHECKPOINT_TYPES_TRACEID_JFRTRACEIDBITS_HPP
#include "jfr/utilities/jfrTypes.hpp" #include "jfr/utilities/jfrTypes.hpp"
#include "jni.h"
#include "memory/allStatic.hpp" #include "memory/allStatic.hpp"
class JfrTraceIdBits : AllStatic { class JfrTraceIdBits : AllStatic {
@ -35,28 +34,28 @@ class JfrTraceIdBits : AllStatic {
static traceid load(const T* ptr); static traceid load(const T* ptr);
template <typename T> template <typename T>
static void store(jbyte bits, const T* ptr); static void store(uint8_t bits, const T* ptr);
template <typename T> template <typename T>
static void cas(jbyte bits, const T* ptr); static void cas(uint8_t bits, const T* ptr);
template <typename T> template <typename T>
static void meta_store(jbyte bits, const T* ptr); static void meta_store(uint8_t bits, const T* ptr);
template <typename T> template <typename T>
static void mask_store(jbyte mask, const T* ptr); static void mask_store(uint8_t mask, const T* ptr);
template <typename T> template <typename T>
static void meta_mask_store(jbyte mask, const T* ptr); static void meta_mask_store(uint8_t mask, const T* ptr);
template <typename T> template <typename T>
static void clear(jbyte bits, const T* ptr); static void clear(uint8_t bits, const T* ptr);
template <typename T> template <typename T>
static void clear_cas(jbyte bits, const T* ptr); static void clear_cas(uint8_t bits, const T* ptr);
template <typename T> template <typename T>
static void meta_clear(jbyte bits, const T* ptr); static void meta_clear(uint8_t bits, const T* ptr);
}; };
#endif // SHARE_JFR_RECORDER_CHECKPOINT_TYPES_TRACEID_JFRTRACEIDBITS_HPP #endif // SHARE_JFR_RECORDER_CHECKPOINT_TYPES_TRACEID_JFRTRACEIDBITS_HPP

View File

@ -39,73 +39,73 @@ const int low_offset = 7;
const int meta_offset = low_offset - 1; const int meta_offset = low_offset - 1;
#endif #endif
inline jbyte* low_addr(jbyte* addr) { inline uint8_t* low_addr(uint8_t* addr) {
assert(addr != nullptr, "invariant"); assert(addr != nullptr, "invariant");
return addr + low_offset; return addr + low_offset;
} }
inline jbyte* low_addr(traceid* addr) { inline uint8_t* low_addr(traceid* addr) {
return low_addr((jbyte*)addr); return low_addr(reinterpret_cast<uint8_t*>(addr));
} }
inline jbyte* meta_addr(jbyte* addr) { inline uint8_t* meta_addr(uint8_t* addr) {
assert(addr != nullptr, "invariant"); assert(addr != nullptr, "invariant");
return addr + meta_offset; return addr + meta_offset;
} }
inline jbyte* meta_addr(traceid* addr) { inline uint8_t* meta_addr(traceid* addr) {
return meta_addr((jbyte*)addr); return meta_addr(reinterpret_cast<uint8_t*>(addr));
} }
template <typename T> template <typename T>
inline jbyte* traceid_tag_byte(const T* ptr) { inline uint8_t* traceid_tag_byte(const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
return low_addr(ptr->trace_id_addr()); return low_addr(ptr->trace_id_addr());
} }
template <> template <>
inline jbyte* traceid_tag_byte<Method>(const Method* ptr) { inline uint8_t* traceid_tag_byte<Method>(const Method* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
return ptr->trace_flags_addr(); return ptr->trace_flags_addr();
} }
template <typename T> template <typename T>
inline jbyte* traceid_meta_byte(const T* ptr) { inline uint8_t* traceid_meta_byte(const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
return meta_addr(ptr->trace_id_addr()); return meta_addr(ptr->trace_id_addr());
} }
template <> template <>
inline jbyte* traceid_meta_byte<Method>(const Method* ptr) { inline uint8_t* traceid_meta_byte<Method>(const Method* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
return ptr->trace_meta_addr(); return ptr->trace_meta_addr();
} }
inline jbyte traceid_and(jbyte bits, jbyte current) { inline uint8_t traceid_and(uint8_t bits, uint8_t current) {
return bits & current; return bits & current;
} }
inline jbyte traceid_or(jbyte bits, jbyte current) { inline uint8_t traceid_or(uint8_t bits, uint8_t current) {
return bits | current; return bits | current;
} }
inline jbyte traceid_xor(jbyte bits, jbyte current) { inline uint8_t traceid_xor(uint8_t bits, uint8_t current) {
return bits ^ current; return bits ^ current;
} }
template <jbyte op(jbyte, jbyte)> template <uint8_t op(uint8_t, uint8_t)>
inline void set_form(jbyte bits, jbyte* dest) { inline void set_form(uint8_t bits, uint8_t* dest) {
assert(dest != nullptr, "invariant"); assert(dest != nullptr, "invariant");
*dest = op(bits, *dest); *dest = op(bits, *dest);
OrderAccess::storestore(); OrderAccess::storestore();
} }
template <jbyte op(jbyte, jbyte)> template <uint8_t op(uint8_t, uint8_t)>
inline void set_cas_form(jbyte bits, jbyte volatile* dest) { inline void set_cas_form(uint8_t bits, uint8_t volatile* dest) {
assert(dest != nullptr, "invariant"); assert(dest != nullptr, "invariant");
do { do {
const jbyte current = *dest; const uint8_t current = *dest;
const jbyte new_value = op(bits, current); const uint8_t new_value = op(bits, current);
if (current == new_value || Atomic::cmpxchg(dest, current, new_value) == current) { if (current == new_value || Atomic::cmpxchg(dest, current, new_value) == current) {
return; return;
} }
@ -113,7 +113,7 @@ inline void set_cas_form(jbyte bits, jbyte volatile* dest) {
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::cas(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::cas(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
set_cas_form<traceid_or>(bits, traceid_tag_byte(ptr)); set_cas_form<traceid_or>(bits, traceid_tag_byte(ptr));
} }
@ -124,13 +124,13 @@ inline traceid JfrTraceIdBits::load(const T* ptr) {
return ptr->trace_id(); return ptr->trace_id();
} }
inline void set(jbyte bits, jbyte* dest) { inline void set(uint8_t bits, uint8_t* dest) {
assert(dest != nullptr, "invariant"); assert(dest != nullptr, "invariant");
set_form<traceid_or>(bits, dest); set_form<traceid_or>(bits, dest);
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::store(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::store(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
// gcc12 warns "writing 1 byte into a region of size 0" when T == Klass. // gcc12 warns "writing 1 byte into a region of size 0" when T == Klass.
// The warning seems to be a false positive. And there is no warning for // The warning seems to be a false positive. And there is no warning for
@ -144,49 +144,49 @@ inline void JfrTraceIdBits::store(jbyte bits, const T* ptr) {
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::meta_store(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::meta_store(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
set(bits, traceid_meta_byte(ptr)); set(bits, traceid_meta_byte(ptr));
} }
inline void set_mask(jbyte mask, jbyte* dest) { inline void set_mask(uint8_t mask, uint8_t* dest) {
set_cas_form<traceid_and>(mask, dest); set_cas_form<traceid_and>(mask, dest);
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::mask_store(jbyte mask, const T* ptr) { inline void JfrTraceIdBits::mask_store(uint8_t mask, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
set_mask(mask, traceid_tag_byte(ptr)); set_mask(mask, traceid_tag_byte(ptr));
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::meta_mask_store(jbyte mask, const T* ptr) { inline void JfrTraceIdBits::meta_mask_store(uint8_t mask, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
set_mask(mask, traceid_meta_byte(ptr)); set_mask(mask, traceid_meta_byte(ptr));
} }
inline void clear_bits(jbyte bits, jbyte* dest) { inline void clear_bits(uint8_t bits, uint8_t* dest) {
set_form<traceid_xor>(bits, dest); set_form<traceid_xor>(bits, dest);
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::clear(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::clear(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
clear_bits(bits, traceid_tag_byte(ptr)); clear_bits(bits, traceid_tag_byte(ptr));
} }
inline void clear_bits_cas(jbyte bits, jbyte* dest) { inline void clear_bits_cas(uint8_t bits, uint8_t* dest) {
set_cas_form<traceid_xor>(bits, dest); set_cas_form<traceid_xor>(bits, dest);
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::clear_cas(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::clear_cas(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
clear_bits_cas(bits, traceid_tag_byte(ptr)); clear_bits_cas(bits, traceid_tag_byte(ptr));
} }
template <typename T> template <typename T>
inline void JfrTraceIdBits::meta_clear(jbyte bits, const T* ptr) { inline void JfrTraceIdBits::meta_clear(uint8_t bits, const T* ptr) {
assert(ptr != nullptr, "invariant"); assert(ptr != nullptr, "invariant");
clear_bits(bits, traceid_meta_byte(ptr)); clear_bits(bits, traceid_meta_byte(ptr));
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -96,27 +96,27 @@ class JfrTraceIdEpoch : AllStatic {
return Atomic::load_acquire(&_synchronizing); return Atomic::load_acquire(&_synchronizing);
} }
static traceid this_epoch_bit() { static uint8_t this_epoch_bit() {
return _epoch_state ? EPOCH_1_BIT : EPOCH_0_BIT; return _epoch_state ? EPOCH_1_BIT : EPOCH_0_BIT;
} }
static traceid previous_epoch_bit() { static uint8_t previous_epoch_bit() {
return _epoch_state ? EPOCH_0_BIT : EPOCH_1_BIT; return _epoch_state ? EPOCH_0_BIT : EPOCH_1_BIT;
} }
static traceid this_epoch_method_bit() { static uint8_t this_epoch_method_bit() {
return _epoch_state ? EPOCH_1_METHOD_BIT : EPOCH_0_METHOD_BIT; return _epoch_state ? EPOCH_1_METHOD_BIT : EPOCH_0_METHOD_BIT;
} }
static traceid previous_epoch_method_bit() { static uint8_t previous_epoch_method_bit() {
return _epoch_state ? EPOCH_0_METHOD_BIT : EPOCH_1_METHOD_BIT; return _epoch_state ? EPOCH_0_METHOD_BIT : EPOCH_1_METHOD_BIT;
} }
static traceid this_epoch_method_and_class_bits() { static uint8_t this_epoch_method_and_class_bits() {
return _epoch_state ? EPOCH_1_METHOD_AND_CLASS_BITS : EPOCH_0_METHOD_AND_CLASS_BITS; return _epoch_state ? EPOCH_1_METHOD_AND_CLASS_BITS : EPOCH_0_METHOD_AND_CLASS_BITS;
} }
static traceid previous_epoch_method_and_class_bits() { static uint8_t previous_epoch_method_and_class_bits() {
return _epoch_state ? EPOCH_0_METHOD_AND_CLASS_BITS : EPOCH_1_METHOD_AND_CLASS_BITS; return _epoch_state ? EPOCH_0_METHOD_AND_CLASS_BITS : EPOCH_1_METHOD_AND_CLASS_BITS;
} }

View File

@ -121,24 +121,31 @@ static traceid read_element(const u1* pos, const Klass** klass, bool compressed)
return compressed ? read_compressed_element(pos, klass) : read_uncompressed_element(pos, klass); return compressed ? read_compressed_element(pos, klass) : read_uncompressed_element(pos, klass);
} }
template <typename T> static inline void store_traceid(JfrEpochQueueKlassElement* element, traceid id) {
static inline void store_traceid(T* element, traceid id, bool uncompressed) {
#ifdef VM_LITTLE_ENDIAN #ifdef VM_LITTLE_ENDIAN
id <<= METADATA_SHIFT; id <<= METADATA_SHIFT;
#endif #endif
element->id = uncompressed ? id | UNCOMPRESSED : id; element->id = id | UNCOMPRESSED;
}
static inline void store_traceid(JfrEpochQueueNarrowKlassElement* element, traceid id) {
assert(id < uncompressed_threshold, "invariant");
#ifdef VM_LITTLE_ENDIAN
id <<= METADATA_SHIFT;
#endif
element->id = static_cast<u4>(id);
} }
static void store_compressed_element(traceid id, const Klass* klass, u1* pos) { static void store_compressed_element(traceid id, const Klass* klass, u1* pos) {
assert(can_compress_element(id), "invariant"); assert(can_compress_element(id), "invariant");
JfrEpochQueueNarrowKlassElement* const element = new (pos) JfrEpochQueueNarrowKlassElement(); JfrEpochQueueNarrowKlassElement* const element = new (pos) JfrEpochQueueNarrowKlassElement();
store_traceid(element, id, false); store_traceid(element, id);
element->compressed_klass = encode(klass); element->compressed_klass = encode(klass);
} }
static void store_uncompressed_element(traceid id, const Klass* klass, u1* pos) { static void store_uncompressed_element(traceid id, const Klass* klass, u1* pos) {
JfrEpochQueueKlassElement* const element = new (pos) JfrEpochQueueKlassElement(); JfrEpochQueueKlassElement* const element = new (pos) JfrEpochQueueKlassElement();
store_traceid(element, id, true); store_traceid(element, id);
element->klass = klass; element->klass = klass;
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -70,8 +70,6 @@
#define PREVIOUS_EPOCH_METHOD_BIT (JfrTraceIdEpoch::previous_epoch_method_bit()) #define PREVIOUS_EPOCH_METHOD_BIT (JfrTraceIdEpoch::previous_epoch_method_bit())
#define THIS_EPOCH_METHOD_AND_CLASS_BITS (JfrTraceIdEpoch::this_epoch_method_and_class_bits()) #define THIS_EPOCH_METHOD_AND_CLASS_BITS (JfrTraceIdEpoch::this_epoch_method_and_class_bits())
#define PREVIOUS_EPOCH_METHOD_AND_CLASS_BITS (JfrTraceIdEpoch::previous_epoch_method_and_class_bits()) #define PREVIOUS_EPOCH_METHOD_AND_CLASS_BITS (JfrTraceIdEpoch::previous_epoch_method_and_class_bits())
#define THIS_EPOCH_METHOD_FLAG_BIT ((jbyte)THIS_EPOCH_BIT)
#define PREVIOUS_EPOCH_METHOD_FLAG_BIT ((jbyte)PREVIOUS_EPOCH_BIT)
// operators // operators
#define TRACE_ID_RAW(ptr) (JfrTraceIdBits::load(ptr)) #define TRACE_ID_RAW(ptr) (JfrTraceIdBits::load(ptr))
@ -102,18 +100,18 @@
#define METHOD_AND_CLASS_USED_THIS_EPOCH(kls) (TRACE_ID_PREDICATE(kls, (THIS_EPOCH_METHOD_AND_CLASS_BITS))) #define METHOD_AND_CLASS_USED_THIS_EPOCH(kls) (TRACE_ID_PREDICATE(kls, (THIS_EPOCH_METHOD_AND_CLASS_BITS)))
#define METHOD_AND_CLASS_USED_PREVIOUS_EPOCH(kls) (TRACE_ID_PREDICATE(kls, (PREVIOUS_EPOCH_METHOD_AND_CLASS_BITS))) #define METHOD_AND_CLASS_USED_PREVIOUS_EPOCH(kls) (TRACE_ID_PREDICATE(kls, (PREVIOUS_EPOCH_METHOD_AND_CLASS_BITS)))
#define METHOD_AND_CLASS_USED_ANY_EPOCH(kls) (METHOD_USED_ANY_EPOCH(kls) && USED_ANY_EPOCH(kls)) #define METHOD_AND_CLASS_USED_ANY_EPOCH(kls) (METHOD_USED_ANY_EPOCH(kls) && USED_ANY_EPOCH(kls))
#define METHOD_FLAG_USED_THIS_EPOCH(method) (METHOD_FLAG_PREDICATE(method, (THIS_EPOCH_METHOD_FLAG_BIT))) #define METHOD_FLAG_USED_THIS_EPOCH(method) (METHOD_FLAG_PREDICATE(method, (THIS_EPOCH_BIT)))
#define METHOD_FLAG_NOT_USED_THIS_EPOCH(method) (!(METHOD_FLAG_USED_THIS_EPOCH(method))) #define METHOD_FLAG_NOT_USED_THIS_EPOCH(method) (!(METHOD_FLAG_USED_THIS_EPOCH(method)))
#define METHOD_FLAG_USED_PREVIOUS_EPOCH(method) (METHOD_FLAG_PREDICATE(method, (PREVIOUS_EPOCH_METHOD_FLAG_BIT))) #define METHOD_FLAG_USED_PREVIOUS_EPOCH(method) (METHOD_FLAG_PREDICATE(method, (PREVIOUS_EPOCH_BIT)))
#define IS_METHOD_BLESSED(method) (METHOD_FLAG_PREDICATE(method, BLESSED_METHOD_BIT)) #define IS_METHOD_BLESSED(method) (METHOD_FLAG_PREDICATE(method, BLESSED_METHOD_BIT))
// setters // setters
#define SET_USED_THIS_EPOCH(ptr) (TRACE_ID_TAG(ptr, THIS_EPOCH_BIT)) #define SET_USED_THIS_EPOCH(ptr) (TRACE_ID_TAG(ptr, THIS_EPOCH_BIT))
#define SET_METHOD_AND_CLASS_USED_THIS_EPOCH(kls) (TRACE_ID_TAG(kls, THIS_EPOCH_METHOD_AND_CLASS_BITS)) #define SET_METHOD_AND_CLASS_USED_THIS_EPOCH(kls) (TRACE_ID_TAG(kls, THIS_EPOCH_METHOD_AND_CLASS_BITS))
#define SET_METHOD_FLAG_USED_THIS_EPOCH(method) (METHOD_FLAG_TAG(method, THIS_EPOCH_METHOD_FLAG_BIT)) #define SET_METHOD_FLAG_USED_THIS_EPOCH(method) (METHOD_FLAG_TAG(method, THIS_EPOCH_BIT))
#define PREVIOUS_EPOCH_METHOD_AND_CLASS_BIT_MASK (~(PREVIOUS_EPOCH_METHOD_BIT | PREVIOUS_EPOCH_BIT)) #define PREVIOUS_EPOCH_METHOD_AND_CLASS_BIT_MASK (~(PREVIOUS_EPOCH_METHOD_BIT | PREVIOUS_EPOCH_BIT))
#define CLEAR_PREVIOUS_EPOCH_METHOD_AND_CLASS(kls) (TRACE_ID_MASK_CLEAR(kls, PREVIOUS_EPOCH_METHOD_AND_CLASS_BIT_MASK)) #define CLEAR_PREVIOUS_EPOCH_METHOD_AND_CLASS(kls) (TRACE_ID_MASK_CLEAR(kls, PREVIOUS_EPOCH_METHOD_AND_CLASS_BIT_MASK))
#define CLEAR_PREVIOUS_EPOCH_METHOD_FLAG(method) (METHOD_FLAG_CLEAR(method, PREVIOUS_EPOCH_METHOD_FLAG_BIT)) #define CLEAR_PREVIOUS_EPOCH_METHOD_FLAG(method) (METHOD_FLAG_CLEAR(method, PREVIOUS_EPOCH_BIT))
#define BLESS_METHOD(method) (METHOD_FLAG_TAG(method, BLESSED_METHOD_BIT)) #define BLESS_METHOD(method) (METHOD_FLAG_TAG(method, BLESSED_METHOD_BIT))
// types // types

View File

@ -204,7 +204,7 @@ int64_t JfrChunkWriter::write_chunk_header_checkpoint(bool flushpoint) {
head.write_next_generation(!flushpoint); head.write_next_generation(!flushpoint);
head.write_flags(); head.write_flags();
assert(current_offset() - header_content_pos == HEADER_SIZE, "invariant"); assert(current_offset() - header_content_pos == HEADER_SIZE, "invariant");
const u4 checkpoint_size = current_offset() - event_size_offset; const u4 checkpoint_size = static_cast<u4>(current_offset() - event_size_offset);
write_padded_at_offset<u4>(checkpoint_size, event_size_offset); write_padded_at_offset<u4>(checkpoint_size, event_size_offset);
set_last_checkpoint_offset(event_size_offset); set_last_checkpoint_offset(event_size_offset);
const int64_t sz_written = size_written(); const int64_t sz_written = size_written();

View File

@ -247,10 +247,10 @@ inline double compute_ewma_alpha_coefficient(size_t lookback_count) {
static void log(const JfrSamplerWindow* expired, double* sample_size_ewma) { static void log(const JfrSamplerWindow* expired, double* sample_size_ewma) {
assert(sample_size_ewma != nullptr, "invariant"); assert(sample_size_ewma != nullptr, "invariant");
if (log_is_enabled(Debug, jfr, system, throttle)) { if (log_is_enabled(Debug, jfr, system, throttle)) {
*sample_size_ewma = exponentially_weighted_moving_average(expired->sample_size(), compute_ewma_alpha_coefficient(expired->params().window_lookback_count), *sample_size_ewma); *sample_size_ewma = exponentially_weighted_moving_average(static_cast<double>(expired->sample_size()), compute_ewma_alpha_coefficient(expired->params().window_lookback_count), *sample_size_ewma);
log_debug(jfr, system, throttle)("jdk.ObjectAllocationSample: avg.sample size: %0.4f, window set point: %zu, sample size: %zu, population size: %zu, ratio: %.4f, window duration: %zu ms\n", log_debug(jfr, system, throttle)("jdk.ObjectAllocationSample: avg.sample size: %0.4f, window set point: %zu, sample size: %zu, population size: %zu, ratio: %.4f, window duration: %zu ms\n",
*sample_size_ewma, expired->params().sample_points_per_window, expired->sample_size(), expired->population_size(), *sample_size_ewma, expired->params().sample_points_per_window, expired->sample_size(), expired->population_size(),
expired->population_size() == 0 ? 0 : (double)expired->sample_size() / (double)expired->population_size(), expired->population_size() == 0 ? 0 : static_cast<double>(expired->sample_size()) / static_cast<double>(expired->population_size()),
expired->params().window_duration_ms); expired->params().window_duration_ms);
} }
} }

View File

@ -46,10 +46,10 @@ static void copy_frames(JfrStackFrame** lhs_frames, u4 length, const JfrStackFra
} }
} }
JfrStackFrame::JfrStackFrame(const traceid& id, int bci, int type, const InstanceKlass* ik) : JfrStackFrame::JfrStackFrame(const traceid& id, int bci, u1 type, const InstanceKlass* ik) :
_klass(ik), _methodid(id), _line(0), _bci(bci), _type(type) {} _klass(ik), _methodid(id), _line(0), _bci(bci), _type(type) {}
JfrStackFrame::JfrStackFrame(const traceid& id, int bci, int type, int lineno, const InstanceKlass* ik) : JfrStackFrame::JfrStackFrame(const traceid& id, int bci, u1 type, int lineno, const InstanceKlass* ik) :
_klass(ik), _methodid(id), _line(lineno), _bci(bci), _type(type) {} _klass(ik), _methodid(id), _line(lineno), _bci(bci), _type(type) {}
JfrStackTrace::JfrStackTrace(JfrStackFrame* frames, u4 max_frames) : JfrStackTrace::JfrStackTrace(JfrStackFrame* frames, u4 max_frames) :
@ -256,7 +256,7 @@ bool JfrStackTrace::record_async(JavaThread* jt, const frame& frame) {
return false; return false;
} }
const traceid mid = JfrTraceId::load(method); const traceid mid = JfrTraceId::load(method);
int type = vfs.is_interpreted_frame() ? JfrStackFrame::FRAME_INTERPRETER : JfrStackFrame::FRAME_JIT; u1 type = vfs.is_interpreted_frame() ? JfrStackFrame::FRAME_INTERPRETER : JfrStackFrame::FRAME_JIT;
int bci = 0; int bci = 0;
if (method->is_native()) { if (method->is_native()) {
type = JfrStackFrame::FRAME_NATIVE; type = JfrStackFrame::FRAME_NATIVE;
@ -307,7 +307,7 @@ bool JfrStackTrace::record(JavaThread* jt, const frame& frame, int skip) {
} }
const Method* method = vfs.method(); const Method* method = vfs.method();
const traceid mid = JfrTraceId::load(method); const traceid mid = JfrTraceId::load(method);
int type = vfs.is_interpreted_frame() ? JfrStackFrame::FRAME_INTERPRETER : JfrStackFrame::FRAME_JIT; u1 type = vfs.is_interpreted_frame() ? JfrStackFrame::FRAME_INTERPRETER : JfrStackFrame::FRAME_JIT;
int bci = 0; int bci = 0;
if (method->is_native()) { if (method->is_native()) {
type = JfrStackFrame::FRAME_NATIVE; type = JfrStackFrame::FRAME_NATIVE;

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -44,15 +44,15 @@ class JfrStackFrame {
u1 _type; u1 _type;
public: public:
JfrStackFrame(const traceid& id, int bci, int type, const InstanceKlass* klass); JfrStackFrame(const traceid& id, int bci, u1 type, const InstanceKlass* klass);
JfrStackFrame(const traceid& id, int bci, int type, int lineno, const InstanceKlass* klass); JfrStackFrame(const traceid& id, int bci, u1 type, int lineno, const InstanceKlass* klass);
bool equals(const JfrStackFrame& rhs) const; bool equals(const JfrStackFrame& rhs) const;
void write(JfrChunkWriter& cw) const; void write(JfrChunkWriter& cw) const;
void write(JfrCheckpointWriter& cpw) const; void write(JfrCheckpointWriter& cpw) const;
void resolve_lineno() const; void resolve_lineno() const;
enum { enum : u1 {
FRAME_INTERPRETER = 0, FRAME_INTERPRETER = 0,
FRAME_JIT, FRAME_JIT,
FRAME_INLINE, FRAME_INLINE,
@ -72,7 +72,7 @@ class JfrStackTrace : public JfrCHeapObj {
const JfrStackTrace* _next; const JfrStackTrace* _next;
JfrStackFrame* _frames; JfrStackFrame* _frames;
traceid _id; traceid _id;
unsigned int _hash; traceid _hash;
u4 _nr_of_frames; u4 _nr_of_frames;
u4 _max_frames; u4 _max_frames;
bool _frames_ownership; bool _frames_ownership;
@ -105,7 +105,7 @@ class JfrStackTrace : public JfrCHeapObj {
~JfrStackTrace(); ~JfrStackTrace();
public: public:
unsigned int hash() const { return _hash; } traceid hash() const { return _hash; }
traceid id() const { return _id; } traceid id() const { return _id; }
}; };

View File

@ -192,7 +192,7 @@ void JfrStackTraceRepository::record_for_leak_profiler(JavaThread* current_threa
assert(!tl->has_cached_stack_trace(), "invariant"); assert(!tl->has_cached_stack_trace(), "invariant");
JfrStackTrace stacktrace(tl->stackframes(), tl->stackdepth()); JfrStackTrace stacktrace(tl->stackframes(), tl->stackdepth());
stacktrace.record(current_thread, skip); stacktrace.record(current_thread, skip);
const unsigned int hash = stacktrace.hash(); const traceid hash = stacktrace.hash();
if (hash != 0) { if (hash != 0) {
tl->set_cached_stack_trace_id(add(leak_profiler_instance(), stacktrace), hash); tl->set_cached_stack_trace_id(add(leak_profiler_instance(), stacktrace), hash);
} }
@ -222,7 +222,7 @@ traceid JfrStackTraceRepository::add_trace(const JfrStackTrace& stacktrace) {
} }
// invariant is that the entry to be resolved actually exists in the table // invariant is that the entry to be resolved actually exists in the table
const JfrStackTrace* JfrStackTraceRepository::lookup_for_leak_profiler(unsigned int hash, traceid id) { const JfrStackTrace* JfrStackTraceRepository::lookup_for_leak_profiler(traceid hash, traceid id) {
const size_t index = (hash % TABLE_SIZE); const size_t index = (hash % TABLE_SIZE);
const JfrStackTrace* trace = leak_profiler_instance()._table[index]; const JfrStackTrace* trace = leak_profiler_instance()._table[index];
while (trace != nullptr && trace->id() != id) { while (trace != nullptr && trace->id() != id) {

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -60,7 +60,7 @@ class JfrStackTraceRepository : public JfrCHeapObj {
static size_t clear(JfrStackTraceRepository& repo); static size_t clear(JfrStackTraceRepository& repo);
size_t write(JfrChunkWriter& cw, bool clear); size_t write(JfrChunkWriter& cw, bool clear);
static const JfrStackTrace* lookup_for_leak_profiler(unsigned int hash, traceid id); static const JfrStackTrace* lookup_for_leak_profiler(traceid hash, traceid id);
static void record_for_leak_profiler(JavaThread* thread, int skip = 0); static void record_for_leak_profiler(JavaThread* thread, int skip = 0);
static void clear_leak_profiler(); static void clear_leak_profiler();

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, Datadog, Inc. All rights reserved. * Copyright (c) 2020, Datadog, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
@ -222,7 +222,7 @@ JfrSamplerWindow* JfrAdaptiveSampler::set_rate(const JfrSamplerParams& params, c
next->_projected_population_size = 0; next->_projected_population_size = 0;
return next; return next;
} }
next->_sampling_interval = derive_sampling_interval(sample_size, expired); next->_sampling_interval = derive_sampling_interval(static_cast<double>(sample_size), expired);
assert(next->_sampling_interval >= 1, "invariant"); assert(next->_sampling_interval >= 1, "invariant");
next->_projected_population_size = sample_size * next->_sampling_interval; next->_projected_population_size = sample_size * next->_sampling_interval;
return next; return next;
@ -310,12 +310,12 @@ inline size_t next_geometric(double p, double u) {
u = 0.99; u = 0.99;
} }
// Inverse CDF for the geometric distribution. // Inverse CDF for the geometric distribution.
return ceil(log(1.0 - u) / log(1.0 - p)); return static_cast<size_t>(ceil(log(1.0 - u) / log(1.0 - p)));
} }
size_t JfrAdaptiveSampler::derive_sampling_interval(double sample_size, const JfrSamplerWindow* expired) { size_t JfrAdaptiveSampler::derive_sampling_interval(double sample_size, const JfrSamplerWindow* expired) {
assert(sample_size > 0, "invariant"); assert(sample_size > 0, "invariant");
const size_t population_size = project_population_size(expired); const double population_size = project_population_size(expired);
if (population_size <= sample_size) { if (population_size <= sample_size) {
return 1; return 1;
} }
@ -325,9 +325,9 @@ size_t JfrAdaptiveSampler::derive_sampling_interval(double sample_size, const Jf
} }
// The projected population size is an exponentially weighted moving average, a function of the window_lookback_count. // The projected population size is an exponentially weighted moving average, a function of the window_lookback_count.
inline size_t JfrAdaptiveSampler::project_population_size(const JfrSamplerWindow* expired) { inline double JfrAdaptiveSampler::project_population_size(const JfrSamplerWindow* expired) {
assert(expired != nullptr, "invariant"); assert(expired != nullptr, "invariant");
_avg_population_size = exponentially_weighted_moving_average(expired->population_size(), _ewma_population_size_alpha, _avg_population_size); _avg_population_size = exponentially_weighted_moving_average(static_cast<double>(expired->population_size()), _ewma_population_size_alpha, _avg_population_size);
return _avg_population_size; return _avg_population_size;
} }
@ -362,7 +362,7 @@ bool JfrGTestFixedRateSampler::initialize() {
static void log(const JfrSamplerWindow* expired, double* sample_size_ewma) { static void log(const JfrSamplerWindow* expired, double* sample_size_ewma) {
assert(sample_size_ewma != nullptr, "invariant"); assert(sample_size_ewma != nullptr, "invariant");
if (log_is_enabled(Debug, jfr, system, throttle)) { if (log_is_enabled(Debug, jfr, system, throttle)) {
*sample_size_ewma = exponentially_weighted_moving_average(expired->sample_size(), compute_ewma_alpha_coefficient(expired->params().window_lookback_count), *sample_size_ewma); *sample_size_ewma = exponentially_weighted_moving_average(static_cast<double>(expired->sample_size()), compute_ewma_alpha_coefficient(expired->params().window_lookback_count), *sample_size_ewma);
log_debug(jfr, system, throttle)("JfrGTestFixedRateSampler: avg.sample size: %0.4f, window set point: %zu, sample size: %zu, population size: %zu, ratio: %.4f, window duration: %zu ms\n", log_debug(jfr, system, throttle)("JfrGTestFixedRateSampler: avg.sample size: %0.4f, window set point: %zu, sample size: %zu, population size: %zu, ratio: %.4f, window duration: %zu ms\n",
*sample_size_ewma, expired->params().sample_points_per_window, expired->sample_size(), expired->population_size(), *sample_size_ewma, expired->params().sample_points_per_window, expired->sample_size(), expired->population_size(),
expired->population_size() == 0 ? 0 : (double)expired->sample_size() / (double)expired->population_size(), expired->population_size() == 0 ? 0 : (double)expired->sample_size() / (double)expired->population_size(),

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, Datadog, Inc. All rights reserved. * Copyright (c) 2020, Datadog, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
@ -122,7 +122,7 @@ class JfrAdaptiveSampler : public JfrCHeapObj {
size_t amortize_debt(const JfrSamplerWindow* expired); size_t amortize_debt(const JfrSamplerWindow* expired);
size_t derive_sampling_interval(double sample_size, const JfrSamplerWindow* expired); size_t derive_sampling_interval(double sample_size, const JfrSamplerWindow* expired);
size_t project_population_size(const JfrSamplerWindow* expired); double project_population_size(const JfrSamplerWindow* expired);
size_t project_sample_size(const JfrSamplerParams& params, const JfrSamplerWindow* expired); size_t project_sample_size(const JfrSamplerParams& params, const JfrSamplerWindow* expired);
JfrSamplerWindow* set_rate(const JfrSamplerParams& params, const JfrSamplerWindow* expired); JfrSamplerWindow* set_rate(const JfrSamplerParams& params, const JfrSamplerWindow* expired);

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -35,7 +35,7 @@ class JfrStackTraceMark {
private: private:
Thread* _t; Thread* _t;
traceid _previous_id; traceid _previous_id;
unsigned int _previous_hash; traceid _previous_hash;
public: public:
JfrStackTraceMark(); JfrStackTraceMark();
JfrStackTraceMark(Thread* t); JfrStackTraceMark(Thread* t);

View File

@ -63,12 +63,12 @@ JfrThreadLocal::JfrThreadLocal() :
_thread_id_alias(max_julong), _thread_id_alias(max_julong),
_data_lost(0), _data_lost(0),
_stack_trace_id(max_julong), _stack_trace_id(max_julong),
_stack_trace_hash(0),
_parent_trace_id(0), _parent_trace_id(0),
_last_allocated_bytes(0), _last_allocated_bytes(0),
_user_time(0), _user_time(0),
_cpu_time(0), _cpu_time(0),
_wallclock_time(os::javaTimeNanos()), _wallclock_time(os::javaTimeNanos()),
_stack_trace_hash(0),
_stackdepth(0), _stackdepth(0),
_entering_suspend_flag(0), _entering_suspend_flag(0),
_critical_section(0), _critical_section(0),

View File

@ -57,12 +57,12 @@ class JfrThreadLocal {
mutable traceid _thread_id_alias; mutable traceid _thread_id_alias;
u8 _data_lost; u8 _data_lost;
traceid _stack_trace_id; traceid _stack_trace_id;
traceid _stack_trace_hash;
traceid _parent_trace_id; traceid _parent_trace_id;
int64_t _last_allocated_bytes; int64_t _last_allocated_bytes;
jlong _user_time; jlong _user_time;
jlong _cpu_time; jlong _cpu_time;
jlong _wallclock_time; jlong _wallclock_time;
unsigned int _stack_trace_hash;
mutable u4 _stackdepth; mutable u4 _stackdepth;
volatile jint _entering_suspend_flag; volatile jint _entering_suspend_flag;
mutable volatile int _critical_section; mutable volatile int _critical_section;
@ -187,7 +187,7 @@ class JfrThreadLocal {
return _parent_trace_id; return _parent_trace_id;
} }
void set_cached_stack_trace_id(traceid id, unsigned int hash = 0) { void set_cached_stack_trace_id(traceid id, traceid hash = 0) {
_stack_trace_id = id; _stack_trace_id = id;
_stack_trace_hash = hash; _stack_trace_hash = hash;
} }
@ -205,7 +205,7 @@ class JfrThreadLocal {
return _stack_trace_id; return _stack_trace_id;
} }
unsigned int cached_stack_trace_hash() const { traceid cached_stack_trace_hash() const {
return _stack_trace_hash; return _stack_trace_hash;
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -46,34 +46,34 @@
class JfrTraceFlag { class JfrTraceFlag {
private: private:
mutable jshort _flags; mutable uint16_t _flags;
public: public:
JfrTraceFlag() : _flags(0) {} JfrTraceFlag() : _flags(0) {}
bool is_set(jshort flag) const { bool is_set(uint16_t flag) const {
return (_flags & flag) != 0; return (_flags & flag) != 0;
} }
jshort flags() const { uint16_t flags() const {
return _flags; return _flags;
} }
void set_flags(jshort flags) const { void set_flags(uint16_t flags) const {
_flags = flags; _flags = flags;
} }
jbyte* flags_addr() const { uint8_t* flags_addr() const {
#ifdef VM_LITTLE_ENDIAN #ifdef VM_LITTLE_ENDIAN
return (jbyte*)&_flags; return reinterpret_cast<uint8_t*>(&_flags);
#else #else
return ((jbyte*)&_flags) + 1; return reinterpret_cast<uint8_t*>(&_flags) + 1;
#endif #endif
} }
jbyte* meta_addr() const { uint8_t* meta_addr() const {
#ifdef VM_LITTLE_ENDIAN #ifdef VM_LITTLE_ENDIAN
return ((jbyte*)&_flags) + 1; return reinterpret_cast<uint8_t*>(&_flags) + 1;
#else #else
return (jbyte*)&_flags; return reinterpret_cast<uint8_t*>(&_flags);
#endif #endif
} }
}; };
@ -81,19 +81,19 @@ class JfrTraceFlag {
#define DEFINE_TRACE_FLAG mutable JfrTraceFlag _trace_flags #define DEFINE_TRACE_FLAG mutable JfrTraceFlag _trace_flags
#define DEFINE_TRACE_FLAG_ACCESSOR \ #define DEFINE_TRACE_FLAG_ACCESSOR \
bool is_trace_flag_set(jshort flag) const { \ bool is_trace_flag_set(uint16_t flag) const { \
return _trace_flags.is_set(flag); \ return _trace_flags.is_set(flag); \
} \ } \
jshort trace_flags() const { \ uint16_t trace_flags() const { \
return _trace_flags.flags(); \ return _trace_flags.flags(); \
} \ } \
void set_trace_flags(jshort flags) const { \ void set_trace_flags(uint16_t flags) const { \
_trace_flags.set_flags(flags); \ _trace_flags.set_flags(flags); \
} \ } \
jbyte* trace_flags_addr() const { \ uint8_t* trace_flags_addr() const { \
return _trace_flags.flags_addr(); \ return _trace_flags.flags_addr(); \
} \ } \
jbyte* trace_meta_addr() const { \ uint8_t* trace_meta_addr() const { \
return _trace_flags.meta_addr(); \ return _trace_flags.meta_addr(); \
} }

View File

@ -44,13 +44,13 @@ class JfrBigEndian : AllStatic {
private: private:
template <typename T> template <typename T>
static T read_bytes(const address location); static T read_bytes(const address location);
template <typename T> template <typename R, typename T>
static T read_unaligned(const address location); static R read_unaligned(const address location);
public: public:
static bool platform_supports_unaligned_reads(void); static bool platform_supports_unaligned_reads(void);
static bool is_aligned(const void* location, size_t size); static bool is_aligned(const void* location, size_t size);
template <typename T> template <typename R, typename T>
static T read(const void* location); static R read(const void* location);
}; };
inline bool JfrBigEndian::is_aligned(const void* location, size_t size) { inline bool JfrBigEndian::is_aligned(const void* location, size_t size) {
@ -82,18 +82,18 @@ inline u8 JfrBigEndian::read_bytes(const address location) {
return Bytes::get_Java_u8(location); return Bytes::get_Java_u8(location);
} }
template <typename T> template <typename R, typename T>
inline T JfrBigEndian::read_unaligned(const address location) { inline R JfrBigEndian::read_unaligned(const address location) {
assert(location != nullptr, "just checking"); assert(location != nullptr, "just checking");
switch (sizeof(T)) { switch (sizeof(T)) {
case sizeof(u1) : case sizeof(u1) :
return read_bytes<u1>(location); return static_cast<R>(read_bytes<u1>(location));
case sizeof(u2): case sizeof(u2):
return read_bytes<u2>(location); return static_cast<R>(read_bytes<u2>(location));
case sizeof(u4): case sizeof(u4):
return read_bytes<u4>(location); return static_cast<R>(read_bytes<u4>(location));
case sizeof(u8): case sizeof(u8):
return read_bytes<u8>(location); return static_cast<R>(read_bytes<u8>(location));
default: default:
assert(false, "not reach"); assert(false, "not reach");
} }
@ -111,27 +111,27 @@ inline bool JfrBigEndian::platform_supports_unaligned_reads(void) {
#endif #endif
} }
template<typename T> template<typename R, typename T>
inline T JfrBigEndian::read(const void* location) { inline R JfrBigEndian::read(const void* location) {
assert(location != nullptr, "just checking"); assert(location != nullptr, "just checking");
assert(sizeof(T) <= sizeof(u8), "no support for arbitrary sizes"); assert(sizeof(T) <= sizeof(u8), "no support for arbitrary sizes");
if (sizeof(T) == sizeof(u1)) { if (sizeof(T) == sizeof(u1)) {
return *(T*)location; return static_cast<R>(*(u1*)location);
} }
if (is_aligned(location, sizeof(T)) || platform_supports_unaligned_reads()) { if (is_aligned(location, sizeof(T)) || platform_supports_unaligned_reads()) {
// fastest case // fastest case
switch (sizeof(T)) { switch (sizeof(T)) {
case sizeof(u1): case sizeof(u1) :
return *(T*)location; return static_cast<R>(*(u1*)location);
case sizeof(u2): case sizeof(u2):
return bigendian_16(*(T*)(location)); return static_cast<R>(bigendian_16(*(u2*)location));
case sizeof(u4): case sizeof(u4):
return bigendian_32(*(T*)(location)); return static_cast<R>(bigendian_32(*(u4*)location));
case sizeof(u8): case sizeof(u8):
return bigendian_64(*(T*)(location)); return static_cast<R>(bigendian_64(*(u8*)location));
} }
} }
return read_unaligned<T>((const address)location); return read_unaligned<R, T>((const address)location);
} }
#endif // SHARE_JFR_UTILITIES_JFRBIGENDIAN_HPP #endif // SHARE_JFR_UTILITIES_JFRBIGENDIAN_HPP

View File

@ -77,15 +77,15 @@ inline size_t BigEndianEncoderImpl::encode(T value, u1* dest) {
return 0; return 0;
} }
case 2: { case 2: {
Bytes::put_Java_u2(dest, value); Bytes::put_Java_u2(dest, static_cast<u2>(value));
return 2; return 2;
} }
case 4: { case 4: {
Bytes::put_Java_u4(dest, value); Bytes::put_Java_u4(dest, static_cast<u4>(value));
return 4; return 4;
} }
case 8: { case 8: {
Bytes::put_Java_u8(dest, value); Bytes::put_Java_u8(dest, static_cast<u8>(value));
return 8; return 8;
} }
} }

View File

@ -100,7 +100,7 @@ static void post_safepoint_synchronize_event(EventSafepointStateSynchronization&
event.set_safepointId(safepoint_id); event.set_safepointId(safepoint_id);
event.set_initialThreadCount(initial_number_of_threads); event.set_initialThreadCount(initial_number_of_threads);
event.set_runningThreadCount(threads_waiting_to_block); event.set_runningThreadCount(threads_waiting_to_block);
event.set_iterations(iterations); event.set_iterations(checked_cast<u4>(iterations));
event.commit(); event.commit();
} }
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, Datadog, Inc. All rights reserved. * Copyright (c) 2020, Datadog, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
@ -58,7 +58,7 @@ namespace {
return c; return c;
} }
static jlong counter_to_millis(jlong c, bool is_os_time = false) { static jlong counter_to_millis(jlong c, bool is_os_time = false) {
return c * NANOS_PER_MILLISEC; return c * (jlong)NANOS_PER_MILLISEC;
} }
static jlong nanos_to_countertime(jlong c, bool as_os_time = false) { static jlong nanos_to_countertime(jlong c, bool as_os_time = false) {
return c; return c;
@ -128,20 +128,20 @@ class JfrGTestAdaptiveSampling : public ::testing::Test {
sample_sum += i * sample[i]; sample_sum += i * sample[i];
} }
double population_mean = population_sum / (double)population_size; double population_mean = (double)population_sum / (double)population_size;
double sample_mean = sample_sum / (double)sample_size; double sample_mean = (double)sample_sum / (double)sample_size;
double population_variance = 0; double population_variance = 0;
double sample_variance = 0; double sample_variance = 0;
for (int i = 0; i < distr_slots; i++) { for (int i = 0; i < distr_slots; i++) {
double population_diff = i - population_mean; double population_diff = i - population_mean;
population_variance = population[i] * population_diff * population_diff; population_variance = (double)population[i] * population_diff * population_diff;
double sample_diff = i - sample_mean; double sample_diff = i - sample_mean;
sample_variance = sample[i] * sample_diff * sample_diff; sample_variance = (double)sample[i] * sample_diff * sample_diff;
} }
population_variance = population_variance / (population_size - 1); population_variance = population_variance / (double)(population_size - 1);
sample_variance = sample_variance / (sample_size - 1); sample_variance = sample_variance / (double)(sample_size - 1);
double population_stdev = sqrt(population_variance); double population_stdev = sqrt(population_variance);
double sample_stdev = sqrt(sample_variance); double sample_stdev = sqrt(sample_variance);
@ -227,7 +227,7 @@ void JfrGTestAdaptiveSampling::test(JfrGTestAdaptiveSampling::incoming inc, size
} }
const size_t target_sample_size = sample_points_per_window * window_count; const size_t target_sample_size = sample_points_per_window * window_count;
EXPECT_NEAR(target_sample_size, sample_size, expected_sample_points * error_factor) << output; EXPECT_NEAR((double)target_sample_size, (double)sample_size, (double)expected_sample_points * error_factor) << output;
strcat(output, ", hit distribution"); strcat(output, ", hit distribution");
assertDistributionProperties(100, population, sample, population_size, sample_size, output); assertDistributionProperties(100, population, sample, population_size, sample_size, output);
} }