8329433: Reduce nmethod header size
Reviewed-by: dlong, iveresov
This commit is contained in:
parent
8da175d094
commit
b704e91241
src
hotspot/share
code
compiler
gc/shared
jvmci
memory
runtime
jdk.hotspot.agent/share/classes/sun/jvm/hotspot/code
@ -73,74 +73,48 @@ unsigned int CodeBlob::allocation_size(CodeBuffer* cb, int header_size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
void CodeBlob::verify_parameters() {
|
||||
assert(is_aligned(_size, oopSize), "unaligned size");
|
||||
assert(is_aligned(_header_size, oopSize), "unaligned size");
|
||||
assert(is_aligned(_relocation_size, oopSize), "unaligned size");
|
||||
assert(_data_offset <= size(), "codeBlob is too small");
|
||||
assert(code_end() == content_end(), "must be the same - see code_end()");
|
||||
#ifdef COMPILER1
|
||||
// probably wrong for tiered
|
||||
assert(frame_size() >= -1, "must use frame size or -1 for runtime stubs");
|
||||
#endif // COMPILER1
|
||||
}
|
||||
#endif
|
||||
|
||||
CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, int size, int header_size, int relocation_size,
|
||||
int content_offset, int code_offset, int frame_complete_offset, int data_offset,
|
||||
int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments) :
|
||||
_oop_maps(oop_maps),
|
||||
_name(name),
|
||||
_size(size),
|
||||
_header_size(header_size),
|
||||
_relocation_size(relocation_size),
|
||||
_content_offset(content_offset),
|
||||
_code_offset(code_offset),
|
||||
_frame_complete_offset(frame_complete_offset),
|
||||
_data_offset(data_offset),
|
||||
_frame_size(frame_size),
|
||||
S390_ONLY(_ctable_offset(0) COMMA)
|
||||
_kind(kind),
|
||||
_caller_must_gc_arguments(caller_must_gc_arguments)
|
||||
{
|
||||
DEBUG_ONLY( verify_parameters(); )
|
||||
}
|
||||
|
||||
CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size,
|
||||
int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
|
||||
CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
|
||||
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
|
||||
_oop_maps(nullptr), // will be set by set_oop_maps() call
|
||||
_name(name),
|
||||
_size(size),
|
||||
_header_size(header_size),
|
||||
_relocation_size(align_up(cb->total_relocation_size(), oopSize)),
|
||||
_content_offset(CodeBlob::align_code_offset(_header_size + _relocation_size)),
|
||||
_content_offset(CodeBlob::align_code_offset(header_size + _relocation_size)),
|
||||
_code_offset(_content_offset + cb->total_offset_of(cb->insts())),
|
||||
_frame_complete_offset(frame_complete_offset),
|
||||
_data_offset(_content_offset + align_up(cb->total_content_size(), oopSize)),
|
||||
_frame_size(frame_size),
|
||||
S390_ONLY(_ctable_offset(0) COMMA)
|
||||
_header_size(header_size),
|
||||
_frame_complete_offset(frame_complete_offset),
|
||||
_kind(kind),
|
||||
_caller_must_gc_arguments(caller_must_gc_arguments)
|
||||
{
|
||||
DEBUG_ONLY( verify_parameters(); )
|
||||
assert(is_aligned(_size, oopSize), "unaligned size");
|
||||
assert(is_aligned(header_size, oopSize), "unaligned size");
|
||||
assert(is_aligned(_relocation_size, oopSize), "unaligned size");
|
||||
assert(_data_offset <= _size, "codeBlob is too small: %d > %d", _data_offset, _size);
|
||||
assert(code_end() == content_end(), "must be the same - see code_end()");
|
||||
#ifdef COMPILER1
|
||||
// probably wrong for tiered
|
||||
assert(_frame_size >= -1, "must use frame size or -1 for runtime stubs");
|
||||
#endif // COMPILER1
|
||||
|
||||
set_oop_maps(oop_maps);
|
||||
}
|
||||
|
||||
// Simple CodeBlob used for simple BufferBlob.
|
||||
CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, int size, int header_size) :
|
||||
CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size) :
|
||||
_oop_maps(nullptr),
|
||||
_name(name),
|
||||
_size(size),
|
||||
_header_size(header_size),
|
||||
_relocation_size(0),
|
||||
_content_offset(CodeBlob::align_code_offset(header_size)),
|
||||
_code_offset(_content_offset),
|
||||
_frame_complete_offset(CodeOffsets::frame_never_safe),
|
||||
_data_offset(size),
|
||||
_frame_size(0),
|
||||
S390_ONLY(_ctable_offset(0) COMMA)
|
||||
_header_size(header_size),
|
||||
_frame_complete_offset(CodeOffsets::frame_never_safe),
|
||||
_kind(kind),
|
||||
_caller_must_gc_arguments(false)
|
||||
{
|
||||
@ -148,7 +122,7 @@ CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, int size, int header_siz
|
||||
assert(is_aligned(header_size, oopSize), "unaligned size");
|
||||
}
|
||||
|
||||
void CodeBlob::purge(bool free_code_cache_data, bool unregister_nmethod) {
|
||||
void CodeBlob::purge() {
|
||||
if (_oop_maps != nullptr) {
|
||||
delete _oop_maps;
|
||||
_oop_maps = nullptr;
|
||||
@ -185,8 +159,8 @@ RuntimeBlob::RuntimeBlob(
|
||||
CodeBlobKind kind,
|
||||
CodeBuffer* cb,
|
||||
int size,
|
||||
int header_size,
|
||||
int frame_complete,
|
||||
uint16_t header_size,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments)
|
||||
@ -198,7 +172,7 @@ RuntimeBlob::RuntimeBlob(
|
||||
void RuntimeBlob::free(RuntimeBlob* blob) {
|
||||
assert(blob != nullptr, "caller must check for nullptr");
|
||||
ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
|
||||
blob->purge(true /* free_code_cache_data */, true /* unregister_nmethod */);
|
||||
blob->purge();
|
||||
{
|
||||
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
CodeCache::free(blob);
|
||||
@ -408,7 +382,7 @@ RuntimeStub::RuntimeStub(
|
||||
const char* name,
|
||||
CodeBuffer* cb,
|
||||
int size,
|
||||
int frame_complete,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments
|
||||
@ -420,7 +394,7 @@ RuntimeStub::RuntimeStub(
|
||||
|
||||
RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
|
||||
CodeBuffer* cb,
|
||||
int frame_complete,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments,
|
||||
@ -668,10 +642,6 @@ void UpcallStub::free(UpcallStub* blob) {
|
||||
RuntimeBlob::free(blob);
|
||||
}
|
||||
|
||||
void UpcallStub::preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) {
|
||||
ShouldNotReachHere(); // caller should never have to gc arguments
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------------------------
|
||||
// Verification and printing
|
||||
|
||||
|
@ -90,8 +90,8 @@ enum class CodeBlobKind : u1 {
|
||||
Number_Of_Kinds
|
||||
};
|
||||
|
||||
class UpcallStub; // for as_upcall_stub()
|
||||
class RuntimeStub; // for as_runtime_stub()
|
||||
class UpcallStub; // for as_upcall_stub()
|
||||
class RuntimeStub; // for as_runtime_stub()
|
||||
class JavaFrameAnchor; // for UpcallStub::jfa_for_frame
|
||||
|
||||
class CodeBlob {
|
||||
@ -101,43 +101,39 @@ class CodeBlob {
|
||||
|
||||
protected:
|
||||
// order fields from large to small to minimize padding between fields
|
||||
ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
|
||||
ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
|
||||
const char* _name;
|
||||
|
||||
int _size; // total size of CodeBlob in bytes
|
||||
int _header_size; // size of header (depends on subclass)
|
||||
int _relocation_size; // size of relocation
|
||||
int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
|
||||
int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
|
||||
int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
|
||||
// not finished setting up their frame. Beware of pc's in
|
||||
// that range. There is a similar range(s) on returns
|
||||
// which we don't detect.
|
||||
int _data_offset; // offset to where data region begins
|
||||
int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)
|
||||
int _size; // total size of CodeBlob in bytes
|
||||
int _relocation_size; // size of relocation (could be bigger than 64Kb)
|
||||
int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
|
||||
int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
|
||||
|
||||
S390_ONLY(int _ctable_offset;)
|
||||
int _data_offset; // offset to where data region begins
|
||||
int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)
|
||||
|
||||
CodeBlobKind _kind; // Kind of this code blob
|
||||
S390_ONLY(int _ctable_offset;)
|
||||
|
||||
bool _caller_must_gc_arguments;
|
||||
uint16_t _header_size; // size of header (depends on subclass)
|
||||
int16_t _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
|
||||
// not finished setting up their frame. Beware of pc's in
|
||||
// that range. There is a similar range(s) on returns
|
||||
// which we don't detect.
|
||||
|
||||
CodeBlobKind _kind; // Kind of this code blob
|
||||
|
||||
bool _caller_must_gc_arguments;
|
||||
|
||||
#ifndef PRODUCT
|
||||
AsmRemarks _asm_remarks;
|
||||
DbgStrings _dbg_strings;
|
||||
#endif // not PRODUCT
|
||||
#endif
|
||||
|
||||
DEBUG_ONLY( void verify_parameters() );
|
||||
|
||||
CodeBlob(const char* name, CodeBlobKind kind, int size, int header_size, int relocation_size,
|
||||
int content_offset, int code_offset, int data_offset, int frame_complete_offset,
|
||||
int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments);
|
||||
|
||||
CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size,
|
||||
int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments);
|
||||
CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
|
||||
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments);
|
||||
|
||||
// Simple CodeBlob used for simple BufferBlob.
|
||||
CodeBlob(const char* name, CodeBlobKind kind, int size, int header_size);
|
||||
CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
|
||||
|
||||
void operator delete(void* p) { }
|
||||
|
||||
@ -152,7 +148,7 @@ public:
|
||||
static unsigned int align_code_offset(int offset);
|
||||
|
||||
// Deletion
|
||||
virtual void purge(bool free_code_cache_data, bool unregister_nmethod);
|
||||
void purge();
|
||||
|
||||
// Typing
|
||||
bool is_nmethod() const { return _kind == CodeBlobKind::Nmethod; }
|
||||
@ -225,7 +221,6 @@ public:
|
||||
|
||||
const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const;
|
||||
const ImmutableOopMap* oop_map_for_return_address(address return_address) const;
|
||||
virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) = 0;
|
||||
|
||||
// Frame support. Sizes are in word units.
|
||||
int frame_size() const { return _frame_size; }
|
||||
@ -273,7 +268,7 @@ class RuntimeBlob : public CodeBlob {
|
||||
|
||||
// Creation
|
||||
// a) simple CodeBlob
|
||||
RuntimeBlob(const char* name, CodeBlobKind kind, int size, int header_size)
|
||||
RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size)
|
||||
: CodeBlob(name, kind, size, header_size)
|
||||
{}
|
||||
|
||||
@ -285,8 +280,8 @@ class RuntimeBlob : public CodeBlob {
|
||||
CodeBlobKind kind,
|
||||
CodeBuffer* cb,
|
||||
int size,
|
||||
int header_size,
|
||||
int frame_complete,
|
||||
uint16_t header_size,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments = false
|
||||
@ -324,10 +319,9 @@ class BufferBlob: public RuntimeBlob {
|
||||
|
||||
static void free(BufferBlob* buf);
|
||||
|
||||
// GC/Verification support
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) override { /* nothing to do */ }
|
||||
|
||||
// Verification support
|
||||
void verify() override;
|
||||
|
||||
void print_on(outputStream* st) const override;
|
||||
void print_value_on(outputStream* st) const override;
|
||||
};
|
||||
@ -381,7 +375,7 @@ class RuntimeStub: public RuntimeBlob {
|
||||
const char* name,
|
||||
CodeBuffer* cb,
|
||||
int size,
|
||||
int frame_complete,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments
|
||||
@ -394,7 +388,7 @@ class RuntimeStub: public RuntimeBlob {
|
||||
static RuntimeStub* new_runtime_stub(
|
||||
const char* stub_name,
|
||||
CodeBuffer* cb,
|
||||
int frame_complete,
|
||||
int16_t frame_complete,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps,
|
||||
bool caller_must_gc_arguments,
|
||||
@ -405,10 +399,9 @@ class RuntimeStub: public RuntimeBlob {
|
||||
|
||||
address entry_point() const { return code_begin(); }
|
||||
|
||||
// GC/Verification support
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) override { /* nothing to do */ }
|
||||
|
||||
// Verification support
|
||||
void verify() override;
|
||||
|
||||
void print_on(outputStream* st) const override;
|
||||
void print_value_on(outputStream* st) const override;
|
||||
};
|
||||
@ -429,7 +422,7 @@ class SingletonBlob: public RuntimeBlob {
|
||||
CodeBlobKind kind,
|
||||
CodeBuffer* cb,
|
||||
int size,
|
||||
int header_size,
|
||||
uint16_t header_size,
|
||||
int frame_size,
|
||||
OopMapSet* oop_maps
|
||||
)
|
||||
@ -438,9 +431,9 @@ class SingletonBlob: public RuntimeBlob {
|
||||
|
||||
address entry_point() { return code_begin(); }
|
||||
|
||||
// GC/Verification support
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) override { /* nothing to do */ }
|
||||
// Verification support
|
||||
void verify() override; // does nothing
|
||||
|
||||
void print_on(outputStream* st) const override;
|
||||
void print_value_on(outputStream* st) const override;
|
||||
};
|
||||
@ -632,7 +625,6 @@ class UpcallStub: public RuntimeBlob {
|
||||
|
||||
// GC/Verification support
|
||||
void oops_do(OopClosure* f, const frame& frame);
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) override;
|
||||
void verify() override;
|
||||
|
||||
// Misc.
|
||||
|
@ -244,14 +244,11 @@ static
|
||||
struct dir_stats_struct {
|
||||
int chunks_queried;
|
||||
int chunks_shared;
|
||||
int chunks_reshared;
|
||||
int chunks_elided;
|
||||
|
||||
void print() {
|
||||
tty->print_cr("Debug Data Chunks: %d, shared %d+%d, non-SP's elided %d",
|
||||
chunks_queried,
|
||||
chunks_shared, chunks_reshared,
|
||||
chunks_elided);
|
||||
tty->print_cr("Debug Data Chunks: %d, shared %d, non-SP's elided %d",
|
||||
chunks_queried, chunks_shared, chunks_elided);
|
||||
}
|
||||
} dir_stats;
|
||||
#endif //PRODUCT
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2005, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2005, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -388,9 +388,7 @@ void Dependencies::copy_to(nmethod* nm) {
|
||||
address beg = nm->dependencies_begin();
|
||||
address end = nm->dependencies_end();
|
||||
guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
|
||||
Copy::disjoint_words((HeapWord*) content_bytes(),
|
||||
(HeapWord*) beg,
|
||||
size_in_bytes() / sizeof(HeapWord));
|
||||
(void)memcpy(beg, content_bytes(), size_in_bytes());
|
||||
assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
|
||||
}
|
||||
|
||||
|
@ -137,9 +137,6 @@ struct java_nmethod_stats_struct {
|
||||
uint oops_size;
|
||||
uint metadata_size;
|
||||
|
||||
uint size_gt_32k;
|
||||
int size_max;
|
||||
|
||||
void note_nmethod(nmethod* nm) {
|
||||
nmethod_count += 1;
|
||||
total_size += nm->size();
|
||||
@ -158,9 +155,6 @@ struct java_nmethod_stats_struct {
|
||||
speculations_size += nm->speculations_size();
|
||||
jvmci_data_size += nm->jvmci_data_size();
|
||||
#endif
|
||||
int short_pos_max = ((1<<15) - 1);
|
||||
if (nm->size() > short_pos_max) size_gt_32k++;
|
||||
if (nm->size() > size_max) size_max = nm->size();
|
||||
}
|
||||
void print_nmethod_stats(const char* name) {
|
||||
if (nmethod_count == 0) return;
|
||||
@ -183,8 +177,6 @@ struct java_nmethod_stats_struct {
|
||||
if (speculations_size != 0) tty->print_cr(" speculations = %u (%f%%)", speculations_size, (speculations_size * 100.0f)/total_size);
|
||||
if (jvmci_data_size != 0) tty->print_cr(" JVMCI data = %u (%f%%)", jvmci_data_size, (jvmci_data_size * 100.0f)/total_size);
|
||||
#endif
|
||||
if (size_gt_32k != 0) tty->print_cr(" size > 32k = %u", size_gt_32k);
|
||||
if (size_max != 0) tty->print_cr(" max size = %d", size_max);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1004,27 +996,6 @@ const char* nmethod::compiler_name() const {
|
||||
return compilertype2name(_compiler_type);
|
||||
}
|
||||
|
||||
// Fill in default values for various flag fields
|
||||
void nmethod::init_defaults() {
|
||||
// avoid uninitialized fields, even for short time periods
|
||||
_exception_cache = nullptr;
|
||||
|
||||
_has_unsafe_access = 0;
|
||||
_has_method_handle_invokes = 0;
|
||||
_has_wide_vectors = 0;
|
||||
_has_monitors = 0;
|
||||
|
||||
_state = not_installed;
|
||||
_has_flushed_dependencies = 0;
|
||||
_load_reported = false; // jvmti state
|
||||
|
||||
_oops_do_mark_link = nullptr;
|
||||
_osr_link = nullptr;
|
||||
#if INCLUDE_RTM_OPT
|
||||
_rtm_state = NoRTM;
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
class CheckForOopsClosure : public OopClosure {
|
||||
bool _found_oop = false;
|
||||
@ -1199,6 +1170,53 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
|
||||
return nm;
|
||||
}
|
||||
|
||||
// Fill in default values for various fields
|
||||
void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
|
||||
// avoid uninitialized fields, even for short time periods
|
||||
_exception_cache = nullptr;
|
||||
_gc_data = nullptr;
|
||||
_oops_do_mark_link = nullptr;
|
||||
_compiled_ic_data = nullptr;
|
||||
|
||||
#if INCLUDE_RTM_OPT
|
||||
_rtm_state = NoRTM;
|
||||
#endif
|
||||
_is_unloading_state = 0;
|
||||
_state = not_installed;
|
||||
|
||||
_has_unsafe_access = 0;
|
||||
_has_method_handle_invokes = 0;
|
||||
_has_wide_vectors = 0;
|
||||
_has_monitors = 0;
|
||||
_has_flushed_dependencies = 0;
|
||||
_is_unlinked = 0;
|
||||
_load_reported = 0; // jvmti state
|
||||
|
||||
_deoptimization_status = not_marked;
|
||||
|
||||
// SECT_CONSTS is first in code buffer so the offset should be 0.
|
||||
int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
|
||||
assert(consts_offset == 0, "const_offset: %d", consts_offset);
|
||||
|
||||
_entry_offset = checked_cast<uint16_t>(offsets->value(CodeOffsets::Entry));
|
||||
_verified_entry_offset = checked_cast<uint16_t>(offsets->value(CodeOffsets::Verified_Entry));
|
||||
_stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
|
||||
|
||||
_skipped_instructions_size = checked_cast<uint16_t>(code_buffer->total_skipped_instructions_size());
|
||||
}
|
||||
|
||||
// Post initialization
|
||||
void nmethod::post_init() {
|
||||
clear_unloading_state();
|
||||
|
||||
finalize_relocations();
|
||||
|
||||
Universe::heap()->register_nmethod(this);
|
||||
debug_only(Universe::heap()->verify_nmethod(this));
|
||||
|
||||
CodeCache::commit(this);
|
||||
}
|
||||
|
||||
// For native wrappers
|
||||
nmethod::nmethod(
|
||||
Method* method,
|
||||
@ -1214,69 +1232,58 @@ nmethod::nmethod(
|
||||
: CodeBlob("native nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
|
||||
offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
|
||||
_deoptimization_generation(0),
|
||||
_gc_epoch(CodeCache::gc_epoch()),
|
||||
_method(method),
|
||||
_gc_data(nullptr),
|
||||
_compiled_ic_data(nullptr),
|
||||
_is_unlinked(false),
|
||||
_native_receiver_sp_offset(basic_lock_owner_sp_offset),
|
||||
_native_basic_lock_sp_offset(basic_lock_sp_offset),
|
||||
_is_unloading_state(0),
|
||||
_deoptimization_status(not_marked)
|
||||
_native_basic_lock_sp_offset(basic_lock_sp_offset)
|
||||
{
|
||||
{
|
||||
debug_only(NoSafepointVerifier nsv;)
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
init_defaults();
|
||||
_comp_level = CompLevel_none;
|
||||
init_defaults(code_buffer, offsets);
|
||||
|
||||
_osr_entry_point = nullptr;
|
||||
_entry_bci = InvocationEntryBci;
|
||||
_num_stack_arg_slots = _method->constMethod()->num_stack_arg_slots();
|
||||
// We have no exception handler or deopt handler make the
|
||||
// values something that will never match a pc like the nmethod vtable entry
|
||||
_exception_offset = 0;
|
||||
_compile_id = compile_id;
|
||||
_comp_level = CompLevel_none;
|
||||
_compiler_type = type;
|
||||
_orig_pc_offset = 0;
|
||||
_num_stack_arg_slots = _method->constMethod()->num_stack_arg_slots();
|
||||
|
||||
if (offsets->value(CodeOffsets::Exceptions) != -1) {
|
||||
// Continuation enter intrinsic
|
||||
_exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
|
||||
} else {
|
||||
_exception_offset = 0;
|
||||
}
|
||||
// Native wrappers do not have deopt handlers. Make the values
|
||||
// something that will never match a pc like the nmethod vtable entry
|
||||
_deopt_handler_offset = 0;
|
||||
_deopt_mh_handler_offset = 0;
|
||||
_gc_epoch = CodeCache::gc_epoch();
|
||||
_unwind_handler_offset = 0;
|
||||
|
||||
_consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts());
|
||||
_stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
|
||||
_oops_offset = data_offset();
|
||||
_metadata_offset = _oops_offset + align_up(code_buffer->total_oop_size(), oopSize);
|
||||
_scopes_data_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
|
||||
_scopes_pcs_offset = _scopes_data_offset;
|
||||
_dependencies_offset = _scopes_pcs_offset;
|
||||
_handler_table_offset = _dependencies_offset;
|
||||
_metadata_offset = checked_cast<uint16_t>(align_up(code_buffer->total_oop_size(), oopSize));
|
||||
_dependencies_offset = checked_cast<uint16_t>(_metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize));
|
||||
_scopes_pcs_offset = _dependencies_offset;
|
||||
_scopes_data_offset = _scopes_pcs_offset;
|
||||
_handler_table_offset = _scopes_data_offset;
|
||||
_nul_chk_table_offset = _handler_table_offset;
|
||||
_skipped_instructions_size = code_buffer->total_skipped_instructions_size();
|
||||
#if INCLUDE_JVMCI
|
||||
_speculations_offset = _nul_chk_table_offset;
|
||||
_jvmci_data_offset = _speculations_offset;
|
||||
_nmethod_end_offset = _jvmci_data_offset;
|
||||
DEBUG_ONLY( int data_end_offset = _jvmci_data_offset; )
|
||||
#else
|
||||
_nmethod_end_offset = _nul_chk_table_offset;
|
||||
DEBUG_ONLY( int data_end_offset = _nul_chk_table_offset; )
|
||||
#endif
|
||||
_compile_id = compile_id;
|
||||
_compiler_type = type;
|
||||
_entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
|
||||
_verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
|
||||
_osr_entry_point = nullptr;
|
||||
_exception_cache = nullptr;
|
||||
_pc_desc_container.reset_to(nullptr);
|
||||
assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
|
||||
|
||||
_exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
|
||||
_pc_desc_container.reset_to(nullptr);
|
||||
|
||||
code_buffer->copy_code_and_locs_to(this);
|
||||
code_buffer->copy_values_to(this);
|
||||
|
||||
clear_unloading_state();
|
||||
|
||||
finalize_relocations();
|
||||
|
||||
Universe::heap()->register_nmethod(this);
|
||||
debug_only(Universe::heap()->verify_nmethod(this));
|
||||
|
||||
CodeCache::commit(this);
|
||||
post_init();
|
||||
}
|
||||
|
||||
if (PrintNativeNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
|
||||
@ -1334,6 +1341,7 @@ void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod
|
||||
return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
|
||||
}
|
||||
|
||||
// For normal JIT compiled code
|
||||
nmethod::nmethod(
|
||||
Method* method,
|
||||
CompilerType type,
|
||||
@ -1360,34 +1368,27 @@ nmethod::nmethod(
|
||||
: CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
|
||||
offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
|
||||
_deoptimization_generation(0),
|
||||
_gc_epoch(CodeCache::gc_epoch()),
|
||||
_method(method),
|
||||
_gc_data(nullptr),
|
||||
_compiled_ic_data(nullptr),
|
||||
_is_unlinked(false),
|
||||
_native_receiver_sp_offset(in_ByteSize(-1)),
|
||||
_native_basic_lock_sp_offset(in_ByteSize(-1)),
|
||||
_is_unloading_state(0),
|
||||
_deoptimization_status(not_marked)
|
||||
_osr_link(nullptr)
|
||||
{
|
||||
assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
|
||||
{
|
||||
debug_only(NoSafepointVerifier nsv;)
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
init_defaults();
|
||||
_entry_bci = entry_bci;
|
||||
_num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
|
||||
_compile_id = compile_id;
|
||||
_compiler_type = type;
|
||||
_comp_level = comp_level;
|
||||
_orig_pc_offset = orig_pc_offset;
|
||||
_gc_epoch = CodeCache::gc_epoch();
|
||||
init_defaults(code_buffer, offsets);
|
||||
|
||||
// Section offsets
|
||||
_consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts());
|
||||
_stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
|
||||
set_ctable_begin(header_begin() + _consts_offset);
|
||||
_skipped_instructions_size = code_buffer->total_skipped_instructions_size();
|
||||
_osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
|
||||
_entry_bci = entry_bci;
|
||||
_compile_id = compile_id;
|
||||
_comp_level = comp_level;
|
||||
_compiler_type = type;
|
||||
_orig_pc_offset = orig_pc_offset;
|
||||
|
||||
_num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
|
||||
|
||||
set_ctable_begin(header_begin() + content_offset());
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
if (compiler->is_jvmci()) {
|
||||
@ -1427,28 +1428,23 @@ nmethod::nmethod(
|
||||
} else {
|
||||
_unwind_handler_offset = -1;
|
||||
}
|
||||
|
||||
_oops_offset = data_offset();
|
||||
_metadata_offset = _oops_offset + align_up(code_buffer->total_oop_size(), oopSize);
|
||||
_scopes_data_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
|
||||
|
||||
_scopes_pcs_offset = _scopes_data_offset + align_up(debug_info->data_size (), oopSize);
|
||||
_dependencies_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size());
|
||||
_handler_table_offset = _dependencies_offset + align_up((int)dependencies->size_in_bytes(), oopSize);
|
||||
_nul_chk_table_offset = _handler_table_offset + align_up(handler_table->size_in_bytes(), oopSize);
|
||||
_metadata_offset = checked_cast<uint16_t>(align_up(code_buffer->total_oop_size(), oopSize));
|
||||
_dependencies_offset = checked_cast<uint16_t>(_metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize));
|
||||
_scopes_pcs_offset = checked_cast<uint16_t>(_dependencies_offset + align_up((int)dependencies->size_in_bytes(), oopSize));
|
||||
_scopes_data_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size());
|
||||
_handler_table_offset = _scopes_data_offset + align_up(debug_info->data_size (), oopSize);
|
||||
_nul_chk_table_offset = _handler_table_offset + align_up(handler_table->size_in_bytes(), oopSize);
|
||||
#if INCLUDE_JVMCI
|
||||
_speculations_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize);
|
||||
_jvmci_data_offset = _speculations_offset + align_up(speculations_len, oopSize);
|
||||
int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
|
||||
_nmethod_end_offset = _jvmci_data_offset + align_up(jvmci_data_size, oopSize);
|
||||
_speculations_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize);
|
||||
_jvmci_data_offset = _speculations_offset + align_up(speculations_len, oopSize);
|
||||
int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
|
||||
DEBUG_ONLY( int data_end_offset = _jvmci_data_offset + align_up(jvmci_data_size, oopSize); )
|
||||
#else
|
||||
_nmethod_end_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize);
|
||||
DEBUG_ONLY( int data_end_offset = _nul_chk_table_offset + align_up(nul_chk_table->size_in_bytes(), oopSize); )
|
||||
#endif
|
||||
_entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
|
||||
_verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
|
||||
_osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
|
||||
_exception_cache = nullptr;
|
||||
assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
|
||||
|
||||
// after _scopes_pcs_offset is set
|
||||
_pc_desc_container.reset_to(scopes_pcs_begin());
|
||||
|
||||
code_buffer->copy_code_and_locs_to(this);
|
||||
@ -1456,7 +1452,6 @@ nmethod::nmethod(
|
||||
code_buffer->copy_values_to(this);
|
||||
debug_info->copy_to(this);
|
||||
dependencies->copy_to(this);
|
||||
clear_unloading_state();
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
if (compiler->is_jvmci()) {
|
||||
@ -1465,13 +1460,6 @@ nmethod::nmethod(
|
||||
}
|
||||
#endif
|
||||
|
||||
finalize_relocations();
|
||||
|
||||
Universe::heap()->register_nmethod(this);
|
||||
debug_only(Universe::heap()->verify_nmethod(this));
|
||||
|
||||
CodeCache::commit(this);
|
||||
|
||||
// Copy contents of ExceptionHandlerTable to nmethod
|
||||
handler_table->copy_to(this);
|
||||
nul_chk_table->copy_to(this);
|
||||
@ -1483,10 +1471,12 @@ nmethod::nmethod(
|
||||
}
|
||||
#endif
|
||||
|
||||
post_init();
|
||||
|
||||
// we use the information of entry points to find out if a method is
|
||||
// static or non static
|
||||
assert(compiler->is_c2() || compiler->is_jvmci() ||
|
||||
_method->is_static() == (entry_point() == _verified_entry_point),
|
||||
_method->is_static() == (entry_point() == verified_entry_point()),
|
||||
" entry points must be same for static methods and vice versa");
|
||||
}
|
||||
}
|
||||
@ -1999,7 +1989,7 @@ bool nmethod::make_not_entrant() {
|
||||
|
||||
// For concurrent GCs, there must be a handshake between unlink and flush
|
||||
void nmethod::unlink() {
|
||||
if (_is_unlinked) {
|
||||
if (is_unlinked()) {
|
||||
// Already unlinked.
|
||||
return;
|
||||
}
|
||||
@ -2033,8 +2023,7 @@ void nmethod::unlink() {
|
||||
ClassUnloadingContext::context()->register_unlinked_nmethod(this);
|
||||
}
|
||||
|
||||
void nmethod::purge(bool free_code_cache_data, bool unregister_nmethod) {
|
||||
assert(!free_code_cache_data, "must only call not freeing code cache data");
|
||||
void nmethod::purge(bool unregister_nmethod) {
|
||||
|
||||
MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
|
||||
@ -2062,7 +2051,7 @@ void nmethod::purge(bool free_code_cache_data, bool unregister_nmethod) {
|
||||
}
|
||||
CodeCache::unregister_old_nmethod(this);
|
||||
|
||||
CodeBlob::purge(free_code_cache_data, unregister_nmethod);
|
||||
CodeBlob::purge();
|
||||
}
|
||||
|
||||
oop nmethod::oop_at(int index) const {
|
||||
|
@ -188,8 +188,23 @@ class nmethod : public CodeBlob {
|
||||
|
||||
Method* _method;
|
||||
|
||||
// To support simple linked-list chaining of nmethods:
|
||||
nmethod* _osr_link; // from InstanceKlass::osr_nmethods_head
|
||||
// To reduce header size union fields which usages do not overlap.
|
||||
union {
|
||||
// To support simple linked-list chaining of nmethods:
|
||||
nmethod* _osr_link; // from InstanceKlass::osr_nmethods_head
|
||||
struct {
|
||||
// These are used for compiled synchronized native methods to
|
||||
// locate the owner and stack slot for the BasicLock. They are
|
||||
// needed because there is no debug information for compiled native
|
||||
// wrappers and the oop maps are insufficient to allow
|
||||
// frame::retrieve_receiver() to work. Currently they are expected
|
||||
// to be byte offsets from the Java stack pointer for maximum code
|
||||
// sharing between platforms. JVMTI's GetLocalInstance() uses these
|
||||
// offsets to find the receiver for non-static native wrapper frames.
|
||||
ByteSize _native_receiver_sp_offset;
|
||||
ByteSize _native_basic_lock_sp_offset;
|
||||
};
|
||||
};
|
||||
|
||||
PcDescContainer _pc_desc_container;
|
||||
ExceptionCache* volatile _exception_cache;
|
||||
@ -200,18 +215,20 @@ class nmethod : public CodeBlob {
|
||||
static nmethod* volatile _oops_do_mark_nmethods;
|
||||
oops_do_mark_link* volatile _oops_do_mark_link;
|
||||
|
||||
// offsets for entry points
|
||||
address _entry_point; // entry point with class check
|
||||
address _verified_entry_point; // entry point without class check
|
||||
address _osr_entry_point; // entry point for on stack replacement
|
||||
|
||||
CompiledICData* _compiled_ic_data;
|
||||
|
||||
// Shared fields for all nmethod's
|
||||
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
|
||||
// offsets for entry points
|
||||
address _osr_entry_point; // entry point for on stack replacement
|
||||
uint16_t _entry_offset; // entry point with class check
|
||||
uint16_t _verified_entry_offset; // entry point without class check
|
||||
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
|
||||
|
||||
// Offsets for different nmethod parts
|
||||
int _exception_offset;
|
||||
// _consts_offset == _content_offset because SECT_CONSTS is first in code buffer
|
||||
|
||||
int _stub_offset;
|
||||
|
||||
// Offsets for different stubs section parts
|
||||
int _exception_offset;
|
||||
// All deoptee's will resume execution at this location described by
|
||||
// this offset.
|
||||
int _deopt_handler_offset;
|
||||
@ -221,33 +238,29 @@ class nmethod : public CodeBlob {
|
||||
// Offset of the unwind handler if it exists
|
||||
int _unwind_handler_offset;
|
||||
|
||||
int _consts_offset;
|
||||
int _stub_offset;
|
||||
int _oops_offset; // offset to where embedded oop table begins (inside data)
|
||||
int _metadata_offset; // embedded meta data table
|
||||
int _scopes_data_offset;
|
||||
int _scopes_pcs_offset;
|
||||
int _dependencies_offset;
|
||||
int _handler_table_offset;
|
||||
int _nul_chk_table_offset;
|
||||
uint16_t _skipped_instructions_size;
|
||||
|
||||
// _oops_offset == _data_offset, offset where embedded oop table begins (inside data)
|
||||
uint16_t _metadata_offset; // embedded meta data table
|
||||
uint16_t _dependencies_offset;
|
||||
uint16_t _scopes_pcs_offset;
|
||||
int _scopes_data_offset;
|
||||
int _handler_table_offset;
|
||||
int _nul_chk_table_offset;
|
||||
#if INCLUDE_JVMCI
|
||||
int _speculations_offset;
|
||||
int _jvmci_data_offset;
|
||||
int _speculations_offset;
|
||||
int _jvmci_data_offset;
|
||||
#endif
|
||||
int _nmethod_end_offset;
|
||||
int _skipped_instructions_size;
|
||||
|
||||
// location in frame (offset for sp) that deopt can store the original
|
||||
// pc during a deopt.
|
||||
int _orig_pc_offset;
|
||||
|
||||
int _compile_id; // which compilation made this nmethod
|
||||
int _compile_id; // which compilation made this nmethod
|
||||
CompLevel _comp_level; // compilation level (s1)
|
||||
CompilerType _compiler_type; // which compiler made this nmethod (u1)
|
||||
|
||||
int _num_stack_arg_slots; // Number of arguments passed on the stack
|
||||
|
||||
CompilerType _compiler_type; // which compiler made this nmethod (u1)
|
||||
|
||||
bool _is_unlinked;
|
||||
uint16_t _num_stack_arg_slots; // Number of arguments passed on the stack
|
||||
|
||||
#if INCLUDE_RTM_OPT
|
||||
// RTM state at compile time. Used during deoptimization to decide
|
||||
@ -255,25 +268,9 @@ class nmethod : public CodeBlob {
|
||||
RTMState _rtm_state;
|
||||
#endif
|
||||
|
||||
// These are used for compiled synchronized native methods to
|
||||
// locate the owner and stack slot for the BasicLock. They are
|
||||
// needed because there is no debug information for compiled native
|
||||
// wrappers and the oop maps are insufficient to allow
|
||||
// frame::retrieve_receiver() to work. Currently they are expected
|
||||
// to be byte offsets from the Java stack pointer for maximum code
|
||||
// sharing between platforms. JVMTI's GetLocalInstance() uses these
|
||||
// offsets to find the receiver for non-static native wrapper frames.
|
||||
ByteSize _native_receiver_sp_offset;
|
||||
ByteSize _native_basic_lock_sp_offset;
|
||||
|
||||
CompLevel _comp_level; // compilation level (s1)
|
||||
|
||||
// Local state used to keep track of whether unloading is happening or not
|
||||
volatile uint8_t _is_unloading_state;
|
||||
|
||||
// used by jvmti to track if an event has been posted for this nmethod.
|
||||
bool _load_reported;
|
||||
|
||||
// Protected by NMethodState_lock
|
||||
volatile signed char _state; // {not_installed, in_use, not_entrant}
|
||||
|
||||
@ -282,7 +279,9 @@ class nmethod : public CodeBlob {
|
||||
_has_method_handle_invokes:1,// Has this method MethodHandle invokes?
|
||||
_has_wide_vectors:1, // Preserve wide vectors at safepoints
|
||||
_has_monitors:1, // Fastpath monitor detection for continuations
|
||||
_has_flushed_dependencies:1; // Used for maintenance of dependencies (under CodeCache_lock)
|
||||
_has_flushed_dependencies:1, // Used for maintenance of dependencies (under CodeCache_lock)
|
||||
_is_unlinked:1, // mark during class unloading
|
||||
_load_reported:1; // used by jvmti to track if an event has been posted for this nmethod
|
||||
|
||||
enum DeoptimizationStatus : u1 {
|
||||
not_marked,
|
||||
@ -297,6 +296,12 @@ class nmethod : public CodeBlob {
|
||||
return Atomic::load(&_deoptimization_status);
|
||||
}
|
||||
|
||||
// Initialize fields to their default values
|
||||
void init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets);
|
||||
|
||||
// Post initialization
|
||||
void post_init();
|
||||
|
||||
// For native wrappers
|
||||
nmethod(Method* method,
|
||||
CompilerType type,
|
||||
@ -309,7 +314,7 @@ class nmethod : public CodeBlob {
|
||||
ByteSize basic_lock_sp_offset, /* synchronized natives only */
|
||||
OopMapSet* oop_maps);
|
||||
|
||||
// Creation support
|
||||
// For normal JIT compiled code
|
||||
nmethod(Method* method,
|
||||
CompilerType type,
|
||||
int nmethod_size,
|
||||
@ -353,9 +358,6 @@ class nmethod : public CodeBlob {
|
||||
// Inform external interfaces that a compiled method has been unloaded
|
||||
void post_compiled_method_unload();
|
||||
|
||||
// Initialize fields to their default values
|
||||
void init_defaults();
|
||||
|
||||
PcDesc* find_pc_desc(address pc, bool approximate) {
|
||||
return _pc_desc_container.find_pc_desc(pc, approximate, PcDescSearch(code_begin(), scopes_pcs_begin(), scopes_pcs_end()));
|
||||
}
|
||||
@ -523,40 +525,41 @@ public:
|
||||
const char* compiler_name () const;
|
||||
|
||||
// boundaries for different parts
|
||||
address consts_begin () const { return header_begin() + _consts_offset ; }
|
||||
address consts_end () const { return header_begin() + code_offset() ; }
|
||||
address insts_begin () const { return header_begin() + code_offset() ; }
|
||||
address consts_begin () const { return content_begin(); }
|
||||
address consts_end () const { return code_begin() ; }
|
||||
address insts_begin () const { return code_begin() ; }
|
||||
address insts_end () const { return header_begin() + _stub_offset ; }
|
||||
address stub_begin () const { return header_begin() + _stub_offset ; }
|
||||
address stub_end () const { return header_begin() + _oops_offset ; }
|
||||
address stub_end () const { return data_begin() ; }
|
||||
address exception_begin () const { return header_begin() + _exception_offset ; }
|
||||
address deopt_handler_begin () const { return header_begin() + _deopt_handler_offset ; }
|
||||
address deopt_mh_handler_begin() const { return header_begin() + _deopt_mh_handler_offset ; }
|
||||
address unwind_handler_begin () const { return _unwind_handler_offset != -1 ? (header_begin() + _unwind_handler_offset) : nullptr; }
|
||||
oop* oops_begin () const { return (oop*) (header_begin() + _oops_offset) ; }
|
||||
oop* oops_end () const { return (oop*) (header_begin() + _metadata_offset) ; }
|
||||
|
||||
Metadata** metadata_begin () const { return (Metadata**) (header_begin() + _metadata_offset) ; }
|
||||
Metadata** metadata_end () const { return (Metadata**) (header_begin() + _scopes_data_offset) ; }
|
||||
oop* oops_begin () const { return (oop*) data_begin(); }
|
||||
oop* oops_end () const { return (oop*) (data_begin() + _metadata_offset) ; }
|
||||
|
||||
address scopes_data_begin () const { return header_begin() + _scopes_data_offset ; }
|
||||
address scopes_data_end () const { return header_begin() + _scopes_pcs_offset ; }
|
||||
PcDesc* scopes_pcs_begin () const { return (PcDesc*)(header_begin() + _scopes_pcs_offset) ; }
|
||||
PcDesc* scopes_pcs_end () const { return (PcDesc*)(header_begin() + _dependencies_offset) ; }
|
||||
address dependencies_begin () const { return header_begin() + _dependencies_offset ; }
|
||||
address dependencies_end () const { return header_begin() + _handler_table_offset ; }
|
||||
address handler_table_begin () const { return header_begin() + _handler_table_offset ; }
|
||||
address handler_table_end () const { return header_begin() + _nul_chk_table_offset ; }
|
||||
address nul_chk_table_begin () const { return header_begin() + _nul_chk_table_offset ; }
|
||||
Metadata** metadata_begin () const { return (Metadata**) (data_begin() + _metadata_offset) ; }
|
||||
Metadata** metadata_end () const { return (Metadata**) (data_begin() + _dependencies_offset) ; }
|
||||
|
||||
address dependencies_begin () const { return data_begin() + _dependencies_offset ; }
|
||||
address dependencies_end () const { return data_begin() + _scopes_pcs_offset ; }
|
||||
PcDesc* scopes_pcs_begin () const { return (PcDesc*)(data_begin() + _scopes_pcs_offset) ; }
|
||||
PcDesc* scopes_pcs_end () const { return (PcDesc*)(data_begin() + _scopes_data_offset) ; }
|
||||
address scopes_data_begin () const { return data_begin() + _scopes_data_offset ; }
|
||||
address scopes_data_end () const { return data_begin() + _handler_table_offset ; }
|
||||
address handler_table_begin () const { return data_begin() + _handler_table_offset ; }
|
||||
address handler_table_end () const { return data_begin() + _nul_chk_table_offset ; }
|
||||
address nul_chk_table_begin () const { return data_begin() + _nul_chk_table_offset ; }
|
||||
|
||||
#if INCLUDE_JVMCI
|
||||
address nul_chk_table_end () const { return header_begin() + _speculations_offset ; }
|
||||
address speculations_begin () const { return header_begin() + _speculations_offset ; }
|
||||
address speculations_end () const { return header_begin() + _jvmci_data_offset ; }
|
||||
address jvmci_data_begin () const { return header_begin() + _jvmci_data_offset ; }
|
||||
address jvmci_data_end () const { return header_begin() + _nmethod_end_offset ; }
|
||||
address nul_chk_table_end () const { return data_begin() + _speculations_offset ; }
|
||||
address speculations_begin () const { return data_begin() + _speculations_offset ; }
|
||||
address speculations_end () const { return data_begin() + _jvmci_data_offset ; }
|
||||
address jvmci_data_begin () const { return data_begin() + _jvmci_data_offset ; }
|
||||
address jvmci_data_end () const { return data_end(); }
|
||||
#else
|
||||
address nul_chk_table_end () const { return header_begin() + _nmethod_end_offset ; }
|
||||
address nul_chk_table_end () const { return data_end(); }
|
||||
#endif
|
||||
|
||||
// Sizes
|
||||
@ -596,8 +599,8 @@ public:
|
||||
bool nul_chk_table_contains (address addr) const { return nul_chk_table_begin() <= addr && addr < nul_chk_table_end(); }
|
||||
|
||||
// entry points
|
||||
address entry_point() const { return _entry_point; } // normal entry point
|
||||
address verified_entry_point() const { return _verified_entry_point; } // if klass is correct
|
||||
address entry_point() const { return code_begin() + _entry_offset; } // normal entry point
|
||||
address verified_entry_point() const { return code_begin() + _verified_entry_offset; } // if klass is correct
|
||||
|
||||
enum : signed char { not_installed = -1, // in construction, only the owner doing the construction is
|
||||
// allowed to advance state
|
||||
@ -617,9 +620,6 @@ public:
|
||||
bool is_unloading();
|
||||
void do_unloading(bool unloading_occurred);
|
||||
|
||||
bool is_unlinked() const { return _is_unlinked; }
|
||||
void set_is_unlinked() { assert(!_is_unlinked, "already unlinked"); _is_unlinked = true; }
|
||||
|
||||
#if INCLUDE_RTM_OPT
|
||||
// rtm state accessing and manipulating
|
||||
RTMState rtm_state() const { return _rtm_state; }
|
||||
@ -679,6 +679,12 @@ public:
|
||||
_has_flushed_dependencies = z;
|
||||
}
|
||||
|
||||
bool is_unlinked() const { return _is_unlinked; }
|
||||
void set_is_unlinked() {
|
||||
assert(!_is_unlinked, "already unlinked");
|
||||
_is_unlinked = true;
|
||||
}
|
||||
|
||||
int comp_level() const { return _comp_level; }
|
||||
|
||||
// Support for oops in scopes and relocs:
|
||||
@ -723,7 +729,6 @@ protected:
|
||||
// Note: _exception_cache may be read and cleaned concurrently.
|
||||
ExceptionCache* exception_cache() const { return _exception_cache; }
|
||||
ExceptionCache* exception_cache_acquire() const;
|
||||
void set_exception_cache(ExceptionCache *ec) { _exception_cache = ec; }
|
||||
|
||||
public:
|
||||
address handler_for_exception_and_pc(Handle exception, address pc);
|
||||
@ -752,7 +757,7 @@ public:
|
||||
return (addr >= code_begin() && addr < verified_entry_point());
|
||||
}
|
||||
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) override;
|
||||
void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f);
|
||||
|
||||
// implicit exceptions support
|
||||
address continuation_for_implicit_div0_exception(address pc) { return continuation_for_implicit_exception(pc, true); }
|
||||
@ -788,11 +793,11 @@ public:
|
||||
void unlink_from_method();
|
||||
|
||||
// On-stack replacement support
|
||||
int osr_entry_bci() const { assert(is_osr_method(), "wrong kind of nmethod"); return _entry_bci; }
|
||||
address osr_entry() const { assert(is_osr_method(), "wrong kind of nmethod"); return _osr_entry_point; }
|
||||
void invalidate_osr_method();
|
||||
nmethod* osr_link() const { return _osr_link; }
|
||||
void set_osr_link(nmethod *n) { _osr_link = n; }
|
||||
int osr_entry_bci() const { assert(is_osr_method(), "wrong kind of nmethod"); return _entry_bci; }
|
||||
address osr_entry() const { assert(is_osr_method(), "wrong kind of nmethod"); return _osr_entry_point; }
|
||||
nmethod* osr_link() const { return _osr_link; }
|
||||
void set_osr_link(nmethod *n) { _osr_link = n; }
|
||||
void invalidate_osr_method();
|
||||
|
||||
int num_stack_arg_slots(bool rounded = true) const {
|
||||
return rounded ? align_up(_num_stack_arg_slots, 2) : _num_stack_arg_slots;
|
||||
@ -805,7 +810,7 @@ public:
|
||||
void unlink();
|
||||
|
||||
// Deallocate this nmethod - called by the GC
|
||||
void purge(bool free_code_cache_data, bool unregister_nmethod) override;
|
||||
void purge(bool unregister_nmethod);
|
||||
|
||||
// See comment at definition of _last_seen_on_stack
|
||||
void mark_as_maybe_on_stack();
|
||||
@ -970,16 +975,17 @@ public:
|
||||
|
||||
// JVMTI's GetLocalInstance() support
|
||||
ByteSize native_receiver_sp_offset() {
|
||||
assert(is_native_method(), "sanity");
|
||||
return _native_receiver_sp_offset;
|
||||
}
|
||||
ByteSize native_basic_lock_sp_offset() {
|
||||
assert(is_native_method(), "sanity");
|
||||
return _native_basic_lock_sp_offset;
|
||||
}
|
||||
|
||||
// support for code generation
|
||||
static ByteSize verified_entry_point_offset() { return byte_offset_of(nmethod, _verified_entry_point); }
|
||||
static ByteSize osr_entry_point_offset() { return byte_offset_of(nmethod, _osr_entry_point); }
|
||||
static ByteSize state_offset() { return byte_offset_of(nmethod, _state); }
|
||||
static ByteSize osr_entry_point_offset() { return byte_offset_of(nmethod, _osr_entry_point); }
|
||||
static ByteSize state_offset() { return byte_offset_of(nmethod, _state); }
|
||||
|
||||
void metadata_do(MetadataClosure* f);
|
||||
|
||||
|
@ -1793,7 +1793,7 @@ bool CompileBroker::init_compiler_runtime() {
|
||||
void CompileBroker::free_buffer_blob_if_allocated(CompilerThread* thread) {
|
||||
BufferBlob* blob = thread->get_buffer_blob();
|
||||
if (blob != nullptr) {
|
||||
blob->purge(true /* free_code_cache_data */, true /* unregister_nmethod */);
|
||||
blob->purge();
|
||||
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
|
||||
CodeCache::free(blob);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -100,7 +100,7 @@ inline bool is_compile(int comp_level) {
|
||||
|
||||
|
||||
// States of Restricted Transactional Memory usage.
|
||||
enum RTMState {
|
||||
enum RTMState: u1 {
|
||||
NoRTM = 0x2, // Don't use RTM
|
||||
UseRTM = 0x1, // Use RTM
|
||||
ProfileRTM = 0x0 // Use RTM with abort ratio calculation
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -116,7 +116,7 @@ void ClassUnloadingContext::purge_nmethods() {
|
||||
NMethodSet* set = _unlinked_nmethods[i];
|
||||
for (nmethod* nm : *set) {
|
||||
freed_memory += nm->size();
|
||||
nm->purge(false /* free_code_cache_data */, _unregister_nmethods_during_purge);
|
||||
nm->purge(_unregister_nmethods_during_purge);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -294,7 +294,7 @@
|
||||
nonstatic_field(MethodData, _backedge_mask, int) \
|
||||
nonstatic_field(MethodData, _jvmci_ir_size, int) \
|
||||
\
|
||||
nonstatic_field(nmethod, _verified_entry_point, address) \
|
||||
nonstatic_field(nmethod, _verified_entry_offset, u2) \
|
||||
nonstatic_field(nmethod, _comp_level, CompLevel) \
|
||||
\
|
||||
nonstatic_field(ObjArrayKlass, _element_klass, Klass*) \
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -38,8 +38,8 @@ class HeapBlock {
|
||||
|
||||
public:
|
||||
struct Header {
|
||||
size_t _length; // the length in segments
|
||||
bool _used; // Used bit
|
||||
uint32_t _length; // the length in segments
|
||||
bool _used; // Used bit
|
||||
};
|
||||
|
||||
protected:
|
||||
@ -51,9 +51,11 @@ class HeapBlock {
|
||||
|
||||
public:
|
||||
// Initialization
|
||||
void initialize(size_t length) { _header._length = length; set_used(); }
|
||||
void initialize(size_t length) { set_length(length); set_used(); }
|
||||
// Merging/splitting
|
||||
void set_length(size_t length) { _header._length = length; }
|
||||
void set_length(size_t length) {
|
||||
_header._length = checked_cast<uint32_t>(length);
|
||||
}
|
||||
|
||||
// Accessors
|
||||
void* allocated_space() const { return (void*)(this + 1); }
|
||||
|
@ -975,8 +975,9 @@ void frame::oops_nmethod_do(OopClosure* f, NMethodClosure* cf, DerivedOopClosure
|
||||
|
||||
// Preserve potential arguments for a callee. We handle this by dispatching
|
||||
// on the codeblob. For c2i, we do
|
||||
if (reg_map->include_argument_oops()) {
|
||||
_cb->preserve_callee_argument_oops(*this, reg_map, f);
|
||||
if (reg_map->include_argument_oops() && _cb->is_nmethod()) {
|
||||
// Only nmethod preserves outgoing arguments at call.
|
||||
_cb->as_nmethod()->preserve_callee_argument_oops(*this, reg_map, f);
|
||||
}
|
||||
}
|
||||
// In cases where perm gen is collected, GC will want to mark
|
||||
|
@ -507,7 +507,7 @@
|
||||
nonstatic_field(CodeHeap, _segmap, VirtualSpace) \
|
||||
nonstatic_field(CodeHeap, _log2_segment_size, int) \
|
||||
nonstatic_field(HeapBlock, _header, HeapBlock::Header) \
|
||||
nonstatic_field(HeapBlock::Header, _length, size_t) \
|
||||
nonstatic_field(HeapBlock::Header, _length, uint32_t) \
|
||||
nonstatic_field(HeapBlock::Header, _used, bool) \
|
||||
\
|
||||
/**********************************/ \
|
||||
@ -550,11 +550,11 @@
|
||||
\
|
||||
nonstatic_field(CodeBlob, _name, const char*) \
|
||||
nonstatic_field(CodeBlob, _size, int) \
|
||||
nonstatic_field(CodeBlob, _header_size, int) \
|
||||
nonstatic_field(CodeBlob, _header_size, u2) \
|
||||
nonstatic_field(CodeBlob, _relocation_size, int) \
|
||||
nonstatic_field(CodeBlob, _content_offset, int) \
|
||||
nonstatic_field(CodeBlob, _code_offset, int) \
|
||||
nonstatic_field(CodeBlob, _frame_complete_offset, int) \
|
||||
nonstatic_field(CodeBlob, _frame_complete_offset, int16_t) \
|
||||
nonstatic_field(CodeBlob, _data_offset, int) \
|
||||
nonstatic_field(CodeBlob, _frame_size, int) \
|
||||
nonstatic_field(CodeBlob, _oop_maps, ImmutableOopMapSet*) \
|
||||
@ -575,17 +575,14 @@
|
||||
nonstatic_field(nmethod, _deopt_mh_handler_offset, int) \
|
||||
nonstatic_field(nmethod, _orig_pc_offset, int) \
|
||||
nonstatic_field(nmethod, _stub_offset, int) \
|
||||
nonstatic_field(nmethod, _consts_offset, int) \
|
||||
nonstatic_field(nmethod, _oops_offset, int) \
|
||||
nonstatic_field(nmethod, _metadata_offset, int) \
|
||||
nonstatic_field(nmethod, _metadata_offset, u2) \
|
||||
nonstatic_field(nmethod, _scopes_pcs_offset, u2) \
|
||||
nonstatic_field(nmethod, _scopes_data_offset, int) \
|
||||
nonstatic_field(nmethod, _scopes_pcs_offset, int) \
|
||||
nonstatic_field(nmethod, _dependencies_offset, int) \
|
||||
nonstatic_field(nmethod, _dependencies_offset, u2) \
|
||||
nonstatic_field(nmethod, _handler_table_offset, int) \
|
||||
nonstatic_field(nmethod, _nul_chk_table_offset, int) \
|
||||
nonstatic_field(nmethod, _nmethod_end_offset, int) \
|
||||
nonstatic_field(nmethod, _entry_point, address) \
|
||||
nonstatic_field(nmethod, _verified_entry_point, address) \
|
||||
nonstatic_field(nmethod, _entry_offset, u2) \
|
||||
nonstatic_field(nmethod, _verified_entry_offset, u2) \
|
||||
nonstatic_field(nmethod, _osr_entry_point, address) \
|
||||
nonstatic_field(nmethod, _compile_id, int) \
|
||||
nonstatic_field(nmethod, _comp_level, CompLevel) \
|
||||
@ -1131,6 +1128,7 @@
|
||||
declare_integer_type(ssize_t) \
|
||||
declare_integer_type(intx) \
|
||||
declare_integer_type(intptr_t) \
|
||||
declare_integer_type(int16_t) \
|
||||
declare_integer_type(int64_t) \
|
||||
declare_unsigned_integer_type(uintx) \
|
||||
declare_unsigned_integer_type(uintptr_t) \
|
||||
|
@ -25,10 +25,12 @@ package sun.jvm.hotspot.code;
|
||||
import sun.jvm.hotspot.compiler.ImmutableOopMap;
|
||||
import sun.jvm.hotspot.compiler.ImmutableOopMapSet;
|
||||
import sun.jvm.hotspot.debugger.Address;
|
||||
import sun.jvm.hotspot.oops.CIntField;
|
||||
import sun.jvm.hotspot.runtime.VM;
|
||||
import sun.jvm.hotspot.runtime.VMObject;
|
||||
import sun.jvm.hotspot.types.AddressField;
|
||||
import sun.jvm.hotspot.types.CIntegerField;
|
||||
import sun.jvm.hotspot.types.JShortField;
|
||||
import sun.jvm.hotspot.types.Type;
|
||||
import sun.jvm.hotspot.types.TypeDataBase;
|
||||
import sun.jvm.hotspot.utilities.Assert;
|
||||
@ -41,11 +43,11 @@ import sun.jvm.hotspot.utilities.Observer;
|
||||
public class CodeBlob extends VMObject {
|
||||
private static AddressField nameField;
|
||||
private static CIntegerField sizeField;
|
||||
private static CIntegerField headerSizeField;
|
||||
private static CIntegerField relocationSizeField;
|
||||
private static CIntField headerSizeField;
|
||||
private static CIntegerField contentOffsetField;
|
||||
private static CIntegerField codeOffsetField;
|
||||
private static CIntegerField frameCompleteOffsetField;
|
||||
private static CIntField frameCompleteOffsetField;
|
||||
private static CIntegerField dataOffsetField;
|
||||
private static CIntegerField frameSizeField;
|
||||
private static AddressField oopMapsField;
|
||||
@ -61,11 +63,11 @@ public class CodeBlob extends VMObject {
|
||||
|
||||
nameField = type.getAddressField("_name");
|
||||
sizeField = type.getCIntegerField("_size");
|
||||
headerSizeField = type.getCIntegerField("_header_size");
|
||||
relocationSizeField = type.getCIntegerField("_relocation_size");
|
||||
headerSizeField = new CIntField(type.getCIntegerField("_header_size"), 0);
|
||||
contentOffsetField = type.getCIntegerField("_content_offset");
|
||||
codeOffsetField = type.getCIntegerField("_code_offset");
|
||||
frameCompleteOffsetField = type.getCIntegerField("_frame_complete_offset");
|
||||
frameCompleteOffsetField = new CIntField(type.getCIntegerField("_frame_complete_offset"), 0);
|
||||
dataOffsetField = type.getCIntegerField("_data_offset");
|
||||
frameSizeField = type.getCIntegerField("_frame_size");
|
||||
oopMapsField = type.getAddressField("_oop_maps");
|
||||
@ -103,7 +105,7 @@ public class CodeBlob extends VMObject {
|
||||
// Offsets
|
||||
public int getContentOffset() { return (int) contentOffsetField.getValue(addr); }
|
||||
|
||||
public int getCodeOffset() { return (int) codeOffsetField .getValue(addr); }
|
||||
public int getCodeOffset() { return (int) codeOffsetField.getValue(addr); }
|
||||
|
||||
public long getFrameCompleteOffset() { return frameCompleteOffsetField.getValue(addr); }
|
||||
|
||||
|
@ -49,26 +49,24 @@ public class NMethod extends CodeBlob {
|
||||
private static CIntegerField deoptMhHandlerOffsetField;
|
||||
private static CIntegerField origPCOffsetField;
|
||||
private static CIntegerField stubOffsetField;
|
||||
private static CIntegerField oopsOffsetField;
|
||||
private static CIntegerField metadataOffsetField;
|
||||
private static CIntField metadataOffsetField;
|
||||
private static CIntField dependenciesOffsetField;
|
||||
private static CIntField scopesPCsOffsetField;
|
||||
private static CIntegerField scopesDataOffsetField;
|
||||
private static CIntegerField scopesPCsOffsetField;
|
||||
private static CIntegerField dependenciesOffsetField;
|
||||
private static CIntegerField handlerTableOffsetField;
|
||||
private static CIntegerField nulChkTableOffsetField;
|
||||
private static CIntegerField nmethodEndOffsetField;
|
||||
|
||||
/** Offsets for entry points */
|
||||
/** Entry point with class check */
|
||||
private static AddressField entryPointField;
|
||||
private static CIntField entryOffsetField;
|
||||
/** Entry point without class check */
|
||||
private static AddressField verifiedEntryPointField;
|
||||
private static CIntField verifiedEntryOffsetField;
|
||||
/** Entry point for on stack replacement */
|
||||
private static AddressField osrEntryPointField;
|
||||
|
||||
// FIXME: add access to flags (how?)
|
||||
|
||||
private static CIntegerField compLevelField;
|
||||
private static CIntField compLevelField;
|
||||
|
||||
static {
|
||||
VM.registerVMInitializedObserver(new Observer() {
|
||||
@ -90,18 +88,16 @@ public class NMethod extends CodeBlob {
|
||||
deoptMhHandlerOffsetField = type.getCIntegerField("_deopt_mh_handler_offset");
|
||||
origPCOffsetField = type.getCIntegerField("_orig_pc_offset");
|
||||
stubOffsetField = type.getCIntegerField("_stub_offset");
|
||||
oopsOffsetField = type.getCIntegerField("_oops_offset");
|
||||
metadataOffsetField = type.getCIntegerField("_metadata_offset");
|
||||
metadataOffsetField = new CIntField(type.getCIntegerField("_metadata_offset"), 0);
|
||||
dependenciesOffsetField = new CIntField(type.getCIntegerField("_dependencies_offset"), 0);
|
||||
scopesPCsOffsetField = new CIntField(type.getCIntegerField("_scopes_pcs_offset"), 0);
|
||||
scopesDataOffsetField = type.getCIntegerField("_scopes_data_offset");
|
||||
scopesPCsOffsetField = type.getCIntegerField("_scopes_pcs_offset");
|
||||
dependenciesOffsetField = type.getCIntegerField("_dependencies_offset");
|
||||
handlerTableOffsetField = type.getCIntegerField("_handler_table_offset");
|
||||
nulChkTableOffsetField = type.getCIntegerField("_nul_chk_table_offset");
|
||||
nmethodEndOffsetField = type.getCIntegerField("_nmethod_end_offset");
|
||||
entryPointField = type.getAddressField("_entry_point");
|
||||
verifiedEntryPointField = type.getAddressField("_verified_entry_point");
|
||||
entryOffsetField = new CIntField(type.getCIntegerField("_entry_offset"), 0);
|
||||
verifiedEntryOffsetField = new CIntField(type.getCIntegerField("_verified_entry_offset"), 0);
|
||||
osrEntryPointField = type.getAddressField("_osr_entry_point");
|
||||
compLevelField = type.getCIntegerField("_comp_level");
|
||||
compLevelField = new CIntField(type.getCIntegerField("_comp_level"), 0);
|
||||
pcDescSize = db.lookupType("PcDesc").getSize();
|
||||
}
|
||||
|
||||
@ -126,28 +122,28 @@ public class NMethod extends CodeBlob {
|
||||
|
||||
/** Boundaries for different parts */
|
||||
public Address constantsBegin() { return contentBegin(); }
|
||||
public Address constantsEnd() { return getEntryPoint(); }
|
||||
public Address constantsEnd() { return codeBegin(); }
|
||||
public Address instsBegin() { return codeBegin(); }
|
||||
public Address instsEnd() { return headerBegin().addOffsetTo(getStubOffset()); }
|
||||
public Address exceptionBegin() { return headerBegin().addOffsetTo(getExceptionOffset()); }
|
||||
public Address deoptHandlerBegin() { return headerBegin().addOffsetTo(getDeoptHandlerOffset()); }
|
||||
public Address deoptMhHandlerBegin() { return headerBegin().addOffsetTo(getDeoptMhHandlerOffset()); }
|
||||
public Address stubBegin() { return headerBegin().addOffsetTo(getStubOffset()); }
|
||||
public Address stubEnd() { return headerBegin().addOffsetTo(getOopsOffset()); }
|
||||
public Address oopsBegin() { return headerBegin().addOffsetTo(getOopsOffset()); }
|
||||
public Address oopsEnd() { return headerBegin().addOffsetTo(getMetadataOffset()); }
|
||||
public Address metadataBegin() { return headerBegin().addOffsetTo(getMetadataOffset()); }
|
||||
public Address metadataEnd() { return headerBegin().addOffsetTo(getScopesDataOffset()); }
|
||||
public Address scopesDataBegin() { return headerBegin().addOffsetTo(getScopesDataOffset()); }
|
||||
public Address scopesDataEnd() { return headerBegin().addOffsetTo(getScopesPCsOffset()); }
|
||||
public Address scopesPCsBegin() { return headerBegin().addOffsetTo(getScopesPCsOffset()); }
|
||||
public Address scopesPCsEnd() { return headerBegin().addOffsetTo(getDependenciesOffset()); }
|
||||
public Address dependenciesBegin() { return headerBegin().addOffsetTo(getDependenciesOffset()); }
|
||||
public Address dependenciesEnd() { return headerBegin().addOffsetTo(getHandlerTableOffset()); }
|
||||
public Address handlerTableBegin() { return headerBegin().addOffsetTo(getHandlerTableOffset()); }
|
||||
public Address handlerTableEnd() { return headerBegin().addOffsetTo(getNulChkTableOffset()); }
|
||||
public Address nulChkTableBegin() { return headerBegin().addOffsetTo(getNulChkTableOffset()); }
|
||||
public Address nulChkTableEnd() { return headerBegin().addOffsetTo(getNMethodEndOffset()); }
|
||||
public Address stubEnd() { return dataBegin(); }
|
||||
public Address oopsBegin() { return dataBegin(); }
|
||||
public Address oopsEnd() { return dataBegin().addOffsetTo(getMetadataOffset()); }
|
||||
public Address metadataBegin() { return dataBegin().addOffsetTo(getMetadataOffset()); }
|
||||
public Address metadataEnd() { return dataBegin().addOffsetTo(getDependenciesOffset()); }
|
||||
public Address dependenciesBegin() { return dataBegin().addOffsetTo(getDependenciesOffset()); }
|
||||
public Address dependenciesEnd() { return dataBegin().addOffsetTo(getScopesDataOffset()); }
|
||||
public Address scopesDataBegin() { return dataBegin().addOffsetTo(getScopesDataOffset()); }
|
||||
public Address scopesDataEnd() { return dataBegin().addOffsetTo(getScopesPCsOffset()); }
|
||||
public Address scopesPCsBegin() { return dataBegin().addOffsetTo(getScopesPCsOffset()); }
|
||||
public Address scopesPCsEnd() { return dataBegin().addOffsetTo(getHandlerTableOffset()); }
|
||||
public Address handlerTableBegin() { return dataBegin().addOffsetTo(getHandlerTableOffset()); }
|
||||
public Address handlerTableEnd() { return dataBegin().addOffsetTo(getNulChkTableOffset()); }
|
||||
public Address nulChkTableBegin() { return dataBegin().addOffsetTo(getNulChkTableOffset()); }
|
||||
public Address nulChkTableEnd() { return dataEnd(); }
|
||||
|
||||
public int constantsSize() { return (int) constantsEnd() .minus(constantsBegin()); }
|
||||
public int instsSize() { return (int) instsEnd() .minus(instsBegin()); }
|
||||
@ -187,8 +183,8 @@ public class NMethod extends CodeBlob {
|
||||
public int getMetadataLength() { return (int) (metadataSize() / VM.getVM().getOopSize()); }
|
||||
|
||||
/** Entry points */
|
||||
public Address getEntryPoint() { return entryPointField.getValue(addr); }
|
||||
public Address getVerifiedEntryPoint() { return verifiedEntryPointField.getValue(addr); }
|
||||
public Address getEntryPoint() { return codeBegin().addOffsetTo(getEntryPointOffset()); }
|
||||
public Address getVerifiedEntryPoint() { return codeBegin().addOffsetTo(getVerifiedEntryPointOffset()); }
|
||||
|
||||
/** Support for oops in scopes and relocs. Note: index 0 is reserved for null. */
|
||||
public OopHandle getOopAt(int index) {
|
||||
@ -432,11 +428,11 @@ public class NMethod extends CodeBlob {
|
||||
// FIXME: add isPatchableAt()
|
||||
|
||||
/** Support for code generation. Only here for proof-of-concept. */
|
||||
public static int getEntryPointOffset() { return (int) entryPointField.getOffset(); }
|
||||
public static int getVerifiedEntryPointOffset() { return (int) verifiedEntryPointField.getOffset(); }
|
||||
public static int getOSREntryPointOffset() { return (int) osrEntryPointField.getOffset(); }
|
||||
public static int getEntryBCIOffset() { return (int) entryBCIField.getOffset(); }
|
||||
public static int getMethodOffset() { return (int) methodField.getOffset(); }
|
||||
public int getEntryPointOffset() { return (int) entryOffsetField.getValue(addr); }
|
||||
public int getVerifiedEntryPointOffset() { return (int) verifiedEntryOffsetField.getValue(addr);}
|
||||
public static int getOSREntryPointOffset() { return (int) osrEntryPointField.getOffset(); }
|
||||
public static int getEntryBCIOffset() { return (int) entryBCIField.getOffset(); }
|
||||
public static int getMethodOffset() { return (int) methodField.getOffset(); }
|
||||
|
||||
public void print() {
|
||||
printOn(System.out);
|
||||
@ -517,13 +513,11 @@ public class NMethod extends CodeBlob {
|
||||
private int getDeoptHandlerOffset() { return (int) deoptHandlerOffsetField .getValue(addr); }
|
||||
private int getDeoptMhHandlerOffset() { return (int) deoptMhHandlerOffsetField.getValue(addr); }
|
||||
private int getStubOffset() { return (int) stubOffsetField .getValue(addr); }
|
||||
private int getOopsOffset() { return (int) oopsOffsetField .getValue(addr); }
|
||||
private int getMetadataOffset() { return (int) metadataOffsetField .getValue(addr); }
|
||||
private int getScopesDataOffset() { return (int) scopesDataOffsetField .getValue(addr); }
|
||||
private int getScopesPCsOffset() { return (int) scopesPCsOffsetField .getValue(addr); }
|
||||
private int getDependenciesOffset() { return (int) dependenciesOffsetField.getValue(addr); }
|
||||
private int getHandlerTableOffset() { return (int) handlerTableOffsetField.getValue(addr); }
|
||||
private int getNulChkTableOffset() { return (int) nulChkTableOffsetField .getValue(addr); }
|
||||
private int getNMethodEndOffset() { return (int) nmethodEndOffsetField .getValue(addr); }
|
||||
private int getCompLevel() { return (int) compLevelField .getValue(addr); }
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user