8333226: Regressions 2-3% in Compress ZGC after 8331253
Reviewed-by: chagedorn, thartmann
This commit is contained in:
parent
b101dcb609
commit
dce9703155
@ -1141,6 +1141,7 @@ public:
|
||||
#define __ masm->
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
||||
|
||||
// Stub entry
|
||||
@ -1159,6 +1160,7 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
||||
}
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
||||
|
||||
// Stub entry
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2021, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2021, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2021, 2023 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -887,6 +887,7 @@ class ZSetupArguments {
|
||||
#define __ masm->
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
__ block_comment("generate_c2_load_barrier_stub (zgc) {");
|
||||
|
||||
__ bind(*stub->entry());
|
||||
@ -910,6 +911,7 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
||||
}
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
__ block_comment("ZStoreBarrierStubC2");
|
||||
|
||||
// Stub entry
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2018, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -1213,6 +1213,7 @@ public:
|
||||
#define __ masm->
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
||||
|
||||
// Stub entry
|
||||
@ -1232,6 +1233,7 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
||||
}
|
||||
|
||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
||||
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
||||
|
||||
// Stub entry
|
||||
|
@ -1010,8 +1010,6 @@ void CodeBuffer::log_section_sizes(const char* name) {
|
||||
}
|
||||
|
||||
bool CodeBuffer::finalize_stubs() {
|
||||
// Record size of code before we generate stubs in instructions section
|
||||
_main_code_size = _insts.size();
|
||||
if (_finalize_stubs && !pd_finalize_stubs()) {
|
||||
// stub allocation failure
|
||||
return false;
|
||||
|
@ -427,9 +427,6 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
||||
address _total_start; // first address of combined memory buffer
|
||||
csize_t _total_size; // size in bytes of combined memory buffer
|
||||
|
||||
// Size of code without stubs generated at the end of instructions section
|
||||
csize_t _main_code_size;
|
||||
|
||||
OopRecorder* _oop_recorder;
|
||||
|
||||
OopRecorder _default_oop_recorder; // override with initialize_oop_recorder
|
||||
@ -460,7 +457,6 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
||||
_oop_recorder = nullptr;
|
||||
_overflow_arena = nullptr;
|
||||
_last_insn = nullptr;
|
||||
_main_code_size = 0;
|
||||
_finalize_stubs = false;
|
||||
_shared_stub_to_interp_requests = nullptr;
|
||||
_shared_trampoline_requests = nullptr;
|
||||
@ -634,9 +630,6 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
||||
// number of bytes remaining in the insts section
|
||||
csize_t insts_remaining() const { return _insts.remaining(); }
|
||||
|
||||
// size of code without stubs in instruction section
|
||||
csize_t main_code_size() const { return _main_code_size; }
|
||||
|
||||
// is a given address in the insts section? (2nd version is end-inclusive)
|
||||
bool insts_contains(address pc) const { return _insts.contains(pc); }
|
||||
bool insts_contains2(address pc) const { return _insts.contains2(pc); }
|
||||
|
@ -1126,14 +1126,13 @@ int ciMethod::code_size_for_inlining() {
|
||||
// not highly relevant to an inlined method. So we use the more
|
||||
// specific accessor nmethod::insts_size.
|
||||
// Also some instructions inside the code are excluded from inline
|
||||
// heuristic (e.g. post call nop instructions and GC barriers;
|
||||
// see InlineSkippedInstructionsCounter).
|
||||
// heuristic (e.g. post call nop instructions; see InlineSkippedInstructionsCounter)
|
||||
int ciMethod::inline_instructions_size() {
|
||||
if (_inline_instructions_size == -1) {
|
||||
GUARDED_VM_ENTRY(
|
||||
nmethod* code = get_Method()->code();
|
||||
if (code != nullptr && (code->comp_level() == CompLevel_full_optimization)) {
|
||||
int isize = code->inline_insts_size();
|
||||
int isize = code->insts_end() - code->verified_entry_point() - code->skipped_instructions_size();
|
||||
_inline_instructions_size = isize > 0 ? isize : 0;
|
||||
} else {
|
||||
_inline_instructions_size = 0;
|
||||
|
@ -131,7 +131,6 @@ struct java_nmethod_stats_struct {
|
||||
uint relocation_size;
|
||||
uint consts_size;
|
||||
uint insts_size;
|
||||
uint inline_insts_size;
|
||||
uint stub_size;
|
||||
uint oops_size;
|
||||
uint metadata_size;
|
||||
@ -152,7 +151,6 @@ struct java_nmethod_stats_struct {
|
||||
relocation_size += nm->relocation_size();
|
||||
consts_size += nm->consts_size();
|
||||
insts_size += nm->insts_size();
|
||||
inline_insts_size += nm->inline_insts_size();
|
||||
stub_size += nm->stub_size();
|
||||
oops_size += nm->oops_size();
|
||||
metadata_size += nm->metadata_size();
|
||||
@ -187,9 +185,6 @@ struct java_nmethod_stats_struct {
|
||||
if (insts_size != 0) {
|
||||
tty->print_cr(" main code = %u (%f%%)", insts_size, (insts_size * 100.0f)/total_nm_size);
|
||||
}
|
||||
if (inline_insts_size != 0) {
|
||||
tty->print_cr(" inline code = %u (%f%%)", inline_insts_size, (inline_insts_size * 100.0f)/total_nm_size);
|
||||
}
|
||||
if (stub_size != 0) {
|
||||
tty->print_cr(" stub code = %u (%f%%)", stub_size, (stub_size * 100.0f)/total_nm_size);
|
||||
}
|
||||
@ -1259,14 +1254,7 @@ void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
|
||||
CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
|
||||
CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
|
||||
|
||||
int size = code_buffer->main_code_size();
|
||||
assert(size >= 0, "should be initialized");
|
||||
// Use instructions section size if it is 0 (e.g. native wrapper)
|
||||
if (size == 0) size = code_size(); // requires _stub_offset to be set
|
||||
assert(size <= code_size(), "incorrect size: %d > %d", size, code_size());
|
||||
_inline_insts_size = size - _verified_entry_offset
|
||||
- code_buffer->total_skipped_instructions_size();
|
||||
assert(_inline_insts_size >= 0, "sanity");
|
||||
_skipped_instructions_size = code_buffer->total_skipped_instructions_size();
|
||||
}
|
||||
|
||||
// Post initialization
|
||||
|
@ -218,7 +218,7 @@ class nmethod : public CodeBlob {
|
||||
|
||||
// _consts_offset == _content_offset because SECT_CONSTS is first in code buffer
|
||||
|
||||
int _inline_insts_size;
|
||||
int _skipped_instructions_size;
|
||||
|
||||
int _stub_offset;
|
||||
|
||||
@ -590,7 +590,7 @@ public:
|
||||
int oops_count() const { assert(oops_size() % oopSize == 0, ""); return (oops_size() / oopSize) + 1; }
|
||||
int metadata_count() const { assert(metadata_size() % wordSize == 0, ""); return (metadata_size() / wordSize) + 1; }
|
||||
|
||||
int inline_insts_size() const { return _inline_insts_size; }
|
||||
int skipped_instructions_size () const { return _skipped_instructions_size; }
|
||||
int total_size() const;
|
||||
|
||||
// Containment
|
||||
|
@ -1323,9 +1323,8 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
||||
|
||||
int pad_req = NativeCall::instruction_size;
|
||||
|
||||
// GC barrier stubs are generated in code section
|
||||
BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
|
||||
code_req += bs->estimate_stub_size();
|
||||
stub_req += bs->estimate_stub_size();
|
||||
|
||||
// nmethod and CodeBuffer count stubs & constants as part of method's code.
|
||||
// class HandlerImpl is platform-specific and defined in the *.ad files.
|
||||
@ -1334,9 +1333,9 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
||||
stub_req += MAX_stubs_size; // ensure per-stub margin
|
||||
code_req += MAX_inst_size; // ensure per-instruction margin
|
||||
|
||||
if (StressCodeBuffers) {
|
||||
if (StressCodeBuffers)
|
||||
code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
|
||||
}
|
||||
|
||||
int total_req =
|
||||
const_req +
|
||||
code_req +
|
||||
@ -1345,10 +1344,9 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
||||
exception_handler_req +
|
||||
deopt_handler_req; // deopt handler
|
||||
|
||||
if (C->has_method_handle_invokes()) {
|
||||
total_req += deopt_handler_req; // deopt MH handler
|
||||
stub_req += deopt_handler_req;
|
||||
}
|
||||
if (C->has_method_handle_invokes())
|
||||
total_req += deopt_handler_req; // deopt MH handler
|
||||
|
||||
CodeBuffer* cb = code_buffer();
|
||||
cb->initialize(total_req, _buf_sizes._reloc);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user