8288477: nmethod header size reduction

Reviewed-by: kvn, never
This commit is contained in:
Boris Ulasevich 2022-07-28 19:49:31 +00:00
parent 54a2c5a6d1
commit e052d7f4bc
11 changed files with 80 additions and 73 deletions

View File

@ -1148,7 +1148,7 @@ void ciEnv::register_method(ciMethod* target,
debug_info(), dependencies(), code_buffer,
frame_words, oop_map_set,
handler_table, inc_table,
compiler, task()->comp_level());
compiler, CompLevel(task()->comp_level()));
// Free codeBlobs
code_buffer->free_blob();

View File

@ -79,12 +79,6 @@ unsigned int CodeBlob::allocation_size(CodeBuffer* cb, int header_size) {
}
CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) :
_type(type),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_code_begin(layout.code_begin()),
_code_end(layout.code_end()),
_content_begin(layout.content_begin()),
@ -92,9 +86,15 @@ CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& la
_relocation_begin(layout.relocation_begin()),
_relocation_end(layout.relocation_end()),
_oop_maps(oop_maps),
_name(name),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_caller_must_gc_arguments(caller_must_gc_arguments),
_is_compiled(compiled),
_name(name)
_type(type)
{
assert(is_aligned(layout.size(), oopSize), "unaligned size");
assert(is_aligned(layout.header_size(), oopSize), "unaligned size");
@ -108,21 +108,21 @@ CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& la
}
CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, CodeBuffer* cb /*UNUSED*/, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) :
_type(type),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_code_begin(layout.code_begin()),
_code_end(layout.code_end()),
_content_begin(layout.content_begin()),
_data_end(layout.data_end()),
_relocation_begin(layout.relocation_begin()),
_relocation_end(layout.relocation_end()),
_name(name),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_caller_must_gc_arguments(caller_must_gc_arguments),
_is_compiled(compiled),
_name(name)
_type(type)
{
assert(is_aligned(_size, oopSize), "unaligned size");
assert(is_aligned(_header_size, oopSize), "unaligned size");

View File

@ -90,16 +90,7 @@ class CodeBlob {
protected:
const CompilerType _type; // CompilerType
int _size; // total size of CodeBlob in bytes
int _header_size; // size of header (depends on subclass)
int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
// not finished setting up their frame. Beware of pc's in
// that range. There is a similar range(s) on returns
// which we don't detect.
int _data_offset; // offset to where data region begins
int _frame_size; // size of stack frame
// order fields from large to small to minimize padding between fields
address _code_begin;
address _code_end;
address _content_begin; // address to where content region begins (this includes consts, insts, stubs)
@ -109,13 +100,24 @@ protected:
address _relocation_end;
ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
bool _caller_must_gc_arguments;
bool _is_compiled;
const char* _name;
S390_ONLY(int _ctable_offset;)
int _size; // total size of CodeBlob in bytes
int _header_size; // size of header (depends on subclass)
int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
// not finished setting up their frame. Beware of pc's in
// that range. There is a similar range(s) on returns
// which we don't detect.
int _data_offset; // offset to where data region begins
int _frame_size; // size of stack frame
bool _caller_must_gc_arguments;
bool _is_compiled;
const CompilerType _type; // CompilerType
#ifndef PRODUCT
AsmRemarks _asm_remarks;
DbgStrings _dbg_strings;

View File

@ -511,7 +511,7 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
int comp_level
CompLevel comp_level
#if INCLUDE_JVMCI
, char* speculations,
int speculations_len,
@ -611,9 +611,9 @@ nmethod::nmethod(
ByteSize basic_lock_sp_offset,
OopMapSet* oop_maps )
: CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
_is_unloading_state(0),
_native_receiver_sp_offset(basic_lock_owner_sp_offset),
_native_basic_lock_sp_offset(basic_lock_sp_offset)
_native_basic_lock_sp_offset(basic_lock_sp_offset),
_is_unloading_state(0)
{
{
int scopes_data_offset = 0;
@ -624,6 +624,7 @@ nmethod::nmethod(
assert_locked_or_safepoint(CodeCache_lock);
init_defaults();
_comp_level = CompLevel_none;
_entry_bci = InvocationEntryBci;
// We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry
@ -648,7 +649,6 @@ nmethod::nmethod(
_nmethod_end_offset = _nul_chk_table_offset;
#endif
_compile_id = compile_id;
_comp_level = CompLevel_none;
_entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
_verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
_osr_entry_point = NULL;
@ -738,7 +738,7 @@ nmethod::nmethod(
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
int comp_level
CompLevel comp_level
#if INCLUDE_JVMCI
, char* speculations,
int speculations_len,
@ -746,9 +746,9 @@ nmethod::nmethod(
#endif
)
: CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
_is_unloading_state(0),
_native_receiver_sp_offset(in_ByteSize(-1)),
_native_basic_lock_sp_offset(in_ByteSize(-1))
_native_basic_lock_sp_offset(in_ByteSize(-1)),
_is_unloading_state(0)
{
assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
{

View File

@ -71,11 +71,16 @@ class nmethod : public CompiledMethod {
friend class JVMCINMethodData;
private:
// Shared fields for all nmethod's
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
uint64_t _gc_epoch;
// not_entrant method removal. Each mark_sweep pass will update
// this mark to current sweep invocation count if it is seen on the
// stack. An not_entrant method can be removed when there are no
// more activations, i.e., when the _stack_traversal_mark is less than
// current sweep traversal index.
volatile int64_t _stack_traversal_mark;
// To support simple linked-list chaining of nmethods:
nmethod* _osr_link; // from InstanceKlass::osr_nmethods_head
@ -198,6 +203,9 @@ class nmethod : public CompiledMethod {
address _verified_entry_point; // entry point without class check
address _osr_entry_point; // entry point for on stack replacement
// Shared fields for all nmethod's
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
// Offsets for different nmethod parts
int _exception_offset;
// Offset of the unwind handler if it exists
@ -225,21 +233,6 @@ class nmethod : public CompiledMethod {
int _orig_pc_offset;
int _compile_id; // which compilation made this nmethod
int _comp_level; // compilation level
// protected by CodeCache_lock
bool _has_flushed_dependencies; // Used for maintenance of dependencies (CodeCache_lock)
// used by jvmti to track if an event has been posted for this nmethod.
bool _unload_reported;
bool _load_reported;
// Protected by CompiledMethod_lock
volatile signed char _state; // {not_installed, in_use, not_entrant, zombie, unloaded}
#ifdef ASSERT
bool _oops_are_stale; // indicates that it's no longer safe to access oops section
#endif
#if INCLUDE_RTM_OPT
// RTM state at compile time. Used during deoptimization to decide
@ -253,13 +246,6 @@ class nmethod : public CompiledMethod {
// event processing needs to be done.
volatile jint _lock_count;
// not_entrant method removal. Each mark_sweep pass will update
// this mark to current sweep invocation count if it is seen on the
// stack. An not_entrant method can be removed when there are no
// more activations, i.e., when the _stack_traversal_mark is less than
// current sweep traversal index.
volatile int64_t _stack_traversal_mark;
// The _hotness_counter indicates the hotness of a method. The higher
// the value the hotter the method. The hotness counter of a nmethod is
// set to [(ReservedCodeCacheSize / (1024 * 1024)) * 2] each time the method
@ -267,9 +253,6 @@ class nmethod : public CompiledMethod {
// counter is decreased (by 1) while sweeping.
int _hotness_counter;
// Local state used to keep track of whether unloading is happening or not
volatile uint8_t _is_unloading_state;
// These are used for compiled synchronized native methods to
// locate the owner and stack slot for the BasicLock. They are
// needed because there is no debug information for compiled native
@ -281,6 +264,25 @@ class nmethod : public CompiledMethod {
ByteSize _native_receiver_sp_offset;
ByteSize _native_basic_lock_sp_offset;
CompLevel _comp_level; // compilation level
// Local state used to keep track of whether unloading is happening or not
volatile uint8_t _is_unloading_state;
// protected by CodeCache_lock
bool _has_flushed_dependencies; // Used for maintenance of dependencies (CodeCache_lock)
// used by jvmti to track if an event has been posted for this nmethod.
bool _unload_reported;
bool _load_reported;
// Protected by CompiledMethod_lock
volatile signed char _state; // {not_installed, in_use, not_entrant, zombie, unloaded}
#ifdef ASSERT
bool _oops_are_stale; // indicates that it's no longer safe to access oops section
#endif
friend class nmethodLocker;
// For native wrappers
@ -311,7 +313,7 @@ class nmethod : public CompiledMethod {
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
int comp_level
CompLevel comp_level
#if INCLUDE_JVMCI
, char* speculations,
int speculations_len,
@ -359,7 +361,7 @@ class nmethod : public CompiledMethod {
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
int comp_level
CompLevel comp_level
#if INCLUDE_JVMCI
, char* speculations = NULL,
int speculations_len = 0,
@ -372,9 +374,9 @@ class nmethod : public CompiledMethod {
// Only used for unit tests.
nmethod()
: CompiledMethod(),
_is_unloading_state(0),
_native_receiver_sp_offset(in_ByteSize(-1)),
_native_basic_lock_sp_offset(in_ByteSize(-1)) {}
_native_basic_lock_sp_offset(in_ByteSize(-1)),
_is_unloading_state(0) {}
static nmethod* new_native_nmethod(const methodHandle& method,

View File

@ -31,7 +31,7 @@
#include "runtime/globals.hpp"
// The (closed set) of concrete compiler classes.
enum CompilerType {
enum CompilerType : u1 {
compiler_none,
compiler_c1,
compiler_c2,
@ -54,7 +54,7 @@ enum MethodCompilation {
};
// Enumeration to distinguish tiers of compilation
enum CompLevel {
enum CompLevel : s1 {
CompLevel_any = -1, // Used for querying the state
CompLevel_all = -1, // Used for changing the state
CompLevel_none = 0, // Interpreter

View File

@ -2063,7 +2063,7 @@ JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
int speculations_len) {
JVMCI_EXCEPTION_CONTEXT;
nmethod* nm = NULL;
int comp_level = CompLevel_full_optimization;
CompLevel comp_level = CompLevel_full_optimization;
char* failure_detail = NULL;
bool install_default = JVMCIENV->get_HotSpotNmethod_isDefault(nmethod_mirror) != 0;

View File

@ -255,7 +255,7 @@
nonstatic_field(MethodData, _jvmci_ir_size, int) \
\
nonstatic_field(nmethod, _verified_entry_point, address) \
nonstatic_field(nmethod, _comp_level, int) \
nonstatic_field(nmethod, _comp_level, CompLevel) \
\
nonstatic_field(ObjArrayKlass, _element_klass, Klass*) \
\
@ -366,6 +366,7 @@
declare_unsigned_integer_type(size_t) \
declare_integer_type(intx) \
declare_unsigned_integer_type(uintx) \
declare_integer_type(CompLevel) \
\
declare_toplevel_type(BasicLock) \
declare_toplevel_type(CompilerToVM) \

View File

@ -665,7 +665,7 @@
volatile_nonstatic_field(nmethod, _lock_count, jint) \
volatile_nonstatic_field(nmethod, _stack_traversal_mark, int64_t) \
nonstatic_field(nmethod, _compile_id, int) \
nonstatic_field(nmethod, _comp_level, int) \
nonstatic_field(nmethod, _comp_level, CompLevel) \
\
unchecked_c2_static_field(Deoptimization, _trap_reason_name, void*) \
\
@ -1971,6 +1971,8 @@
declare_integer_type(AccessFlags) /* FIXME: wrong type (not integer) */\
declare_toplevel_type(address) /* FIXME: should this be an integer type? */\
declare_integer_type(BasicType) /* FIXME: wrong type (not integer) */ \
\
declare_integer_type(CompLevel) \
JVMTI_ONLY(declare_toplevel_type(BreakpointInfo)) \
JVMTI_ONLY(declare_toplevel_type(BreakpointInfo*)) \
declare_toplevel_type(CodeBlob*) \

View File

@ -459,7 +459,7 @@ final class HotSpotResolvedJavaMethodImpl extends HotSpotMethod implements HotSp
public boolean hasCompiledCodeAtLevel(int level) {
long compiledCode = getCompiledCode();
if (compiledCode != 0) {
return UNSAFE.getInt(compiledCode + config().nmethodCompLevelOffset) == level;
return UNSAFE.getByte(compiledCode + config().nmethodCompLevelOffset) == level;
}
return false;
}

View File

@ -182,7 +182,7 @@ class HotSpotVMConfig extends HotSpotVMConfigAccess {
final int methodDataOverflowRecompiles = getFieldOffset("MethodData::_compiler_counters._nof_overflow_recompiles", Integer.class, "uint");
final int methodDataOverflowTraps = getFieldOffset("MethodData::_compiler_counters._nof_overflow_traps", Integer.class, "uint");
final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "int");
final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "CompLevel");
final int compilationLevelNone = getConstant("CompLevel_none", Integer.class);
final int compilationLevelSimple = getConstant("CompLevel_simple", Integer.class);