8280481: Duplicated stubs to interpreter for static calls

Reviewed-by: kvn, phh
This commit is contained in:
Evgeny Astigeevich 2022-07-05 20:50:02 +00:00 committed by Paul Hohensee
parent d48694d0f3
commit 351560414d
22 changed files with 486 additions and 33 deletions

View File

@ -3835,11 +3835,17 @@ encode %{
ciEnv::current()->record_failure("CodeCache is full");
return;
}
// Emit stub for static call
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
if (CodeBuffer::supports_shared_stubs() && _method->can_be_statically_bound()) {
// Calls of the same statically bound method can share
// a stub to the interpreter.
cbuf.shared_stub_to_interp_for(_method, cbuf.insts()->mark_off());
} else {
// Emit stub for static call
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
}
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/codeBuffer.inline.hpp"
#include "asm/macroAssembler.hpp"
bool CodeBuffer::pd_finalize_stubs() {
return emit_shared_stubs_to_interp<MacroAssembler>(this, _shared_stub_to_interp_requests);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2002, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2002, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -28,8 +28,10 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs();
public:
void flush_bundle(bool start_new_bundle) {}
static constexpr bool supports_shared_stubs() { return true; }
#endif // CPU_AARCH64_CODEBUFFER_AARCH64_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2008, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2008, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -27,8 +27,15 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs() {
if (_finalize_stubs) {
Unimplemented();
}
return true;
}
public:
void flush_bundle(bool start_new_bundle) {}
static constexpr bool supports_shared_stubs() { return false; }
#endif // CPU_ARM_CODEBUFFER_ARM_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2002, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2002, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2013 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -28,8 +28,15 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs() {
if (_finalize_stubs) {
Unimplemented();
}
return true;
}
public:
void flush_bundle(bool start_new_bundle) {}
static constexpr bool supports_shared_stubs() { return false; }
#endif // CPU_PPC_CODEBUFFER_PPC_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2002, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2002, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -29,8 +29,15 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs() {
if (_finalize_stubs) {
Unimplemented();
}
return true;
}
public:
void flush_bundle(bool start_new_bundle) {}
static constexpr bool supports_shared_stubs() { return false; }
#endif // CPU_RISCV_CODEBUFFER_RISCV_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -28,10 +28,17 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs() {
if (_finalize_stubs) {
Unimplemented();
}
return true;
}
public:
void flush_bundle(bool start_new_bundle) {}
void getCpuData(const CodeBuffer * const cb) {}
static constexpr bool supports_shared_stubs() { return false; }
#endif // CPU_S390_CODEBUFFER_S390_HPP

View File

@ -0,0 +1,31 @@
/*
* Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/codeBuffer.inline.hpp"
#include "asm/macroAssembler.hpp"
bool CodeBuffer::pd_finalize_stubs() {
return emit_shared_stubs_to_interp<MacroAssembler, Assembler::imm_operand>(this, _shared_stub_to_interp_requests);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2002, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2002, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -27,8 +27,10 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs();
public:
void flush_bundle(bool start_new_bundle) {}
static constexpr bool supports_shared_stubs() { return true; }
#endif // CPU_X86_CODEBUFFER_X86_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -55,10 +55,7 @@ address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark)
}
// Static stub relocation stores the instruction address of the call.
__ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
// Static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time.
// This is recognized as unresolved by relocs/nativeinst/ic code.
__ jump(RuntimeAddress(__ pc()));
__ emit_static_call_stub();
assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size");

View File

@ -1328,6 +1328,13 @@ void MacroAssembler::ic_call(address entry, jint method_index) {
call(AddressLiteral(entry, rh));
}
void MacroAssembler::emit_static_call_stub() {
// Static stub relocation also tags the Method* in the code-stream.
mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time.
// This is recognized as unresolved by relocs/nativeinst/ic code.
jump(RuntimeAddress(pc()));
}
// Implementation of call_VM versions
void MacroAssembler::call_VM(Register oop_result,

View File

@ -865,6 +865,8 @@ public:
// Emit the CompiledIC call idiom
void ic_call(address entry, jint method_index = 0);
void emit_static_call_stub();
// Jumps
// NOTE: these jumps transfer to the effective address of dst NOT

View File

@ -1814,11 +1814,18 @@ encode %{
emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
rspec, RELOC_DISP32);
__ post_call_nop();
// Emit stubs for static call.
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
address mark = cbuf.insts_mark();
if (CodeBuffer::supports_shared_stubs() && _method->can_be_statically_bound()) {
// Calls of the same statically bound method can share
// a stub to the interpreter.
cbuf.shared_stub_to_interp_for(_method, cbuf.insts()->mark_off());
} else {
// Emit stubs for static call.
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
}
}
}
%}

View File

@ -2168,12 +2168,18 @@ encode %{
: static_call_Relocation::spec(method_index);
emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
rspec, RELOC_DISP32);
// Emit stubs for static call.
address mark = cbuf.insts_mark();
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
if (CodeBuffer::supports_shared_stubs() && _method->can_be_statically_bound()) {
// Calls of the same statically bound method can share
// a stub to the interpreter.
cbuf.shared_stub_to_interp_for(_method, cbuf.insts()->mark_off());
} else {
// Emit stubs for static call.
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
}
}
}
_masm.clear_inst_mark();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright 2007 Red Hat, Inc.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -28,5 +28,13 @@
private:
void pd_initialize() {}
bool pd_finalize_stubs() {
if (_finalize_stubs) {
Unimplemented();
}
return true;
}
public:
static constexpr bool supports_shared_stubs() { return false; }
#endif // CPU_ZERO_CODEBUFFER_ZERO_HPP

View File

@ -442,6 +442,7 @@ void CodeBuffer::compute_final_layout(CodeBuffer* dest) const {
address buf = dest->_total_start;
csize_t buf_offset = 0;
assert(dest->_total_size >= total_content_size(), "must be big enough");
assert(!_finalize_stubs, "non-finalized stubs");
{
// not sure why this is here, but why not...
@ -980,6 +981,22 @@ void CodeBuffer::log_section_sizes(const char* name) {
}
}
void CodeBuffer::finalize_stubs() {
if (!pd_finalize_stubs()) {
return;
}
_finalize_stubs = false;
}
void CodeBuffer::shared_stub_to_interp_for(ciMethod* callee, csize_t call_offset) {
if (_shared_stub_to_interp_requests == NULL) {
_shared_stub_to_interp_requests = new SharedStubToInterpRequests(8);
}
SharedStubToInterpRequest request(callee, call_offset);
_shared_stub_to_interp_requests->push(request);
_finalize_stubs = true;
}
#ifndef PRODUCT
void CodeBuffer::block_comment(ptrdiff_t offset, const char* comment) {
if (_collect_comments) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -30,6 +30,7 @@
#include "compiler/compiler_globals.hpp"
#include "utilities/align.hpp"
#include "utilities/debug.hpp"
#include "utilities/growableArray.hpp"
#include "utilities/macros.hpp"
class PhaseCFG;
@ -37,6 +38,8 @@ class Compile;
class BufferBlob;
class CodeBuffer;
class Label;
class ciMethod;
class SharedStubToInterpRequest;
class CodeOffsets: public StackObj {
public:
@ -346,6 +349,8 @@ class Scrubber {
};
#endif // ASSERT
typedef GrowableArray<SharedStubToInterpRequest> SharedStubToInterpRequests;
// A CodeBuffer describes a memory space into which assembly
// code is generated. This memory space usually occupies the
// interior of a single BufferBlob, but in some cases it may be
@ -418,6 +423,9 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
address _last_insn; // used to merge consecutive memory barriers, loads or stores.
SharedStubToInterpRequests* _shared_stub_to_interp_requests; // used to collect requests for shared iterpreter stubs
bool _finalize_stubs; // Indicate if we need to finalize stubs to make CodeBuffer final.
#ifndef PRODUCT
AsmRemarks _asm_remarks;
DbgStrings _dbg_strings;
@ -435,6 +443,8 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
_oop_recorder = NULL;
_overflow_arena = NULL;
_last_insn = NULL;
_finalize_stubs = false;
_shared_stub_to_interp_requests = NULL;
#ifndef PRODUCT
_decode_begin = NULL;
@ -686,6 +696,12 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
// Log a little info about section usage in the CodeBuffer
void log_section_sizes(const char* name);
// Make a set of stubs final. It can create/optimize stubs.
void finalize_stubs();
// Request for a shared stub to the interpreter
void shared_stub_to_interp_for(ciMethod* callee, csize_t call_offset);
#ifndef PRODUCT
public:
// Printing / Decoding
@ -701,6 +717,23 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
};
// A Java method can have calls of Java methods which can be statically bound.
// Calls of Java methods need stubs to the interpreter. Calls sharing the same Java method
// can share a stub to the interpreter.
// A SharedStubToInterpRequest is a request for a shared stub to the interpreter.
class SharedStubToInterpRequest : public ResourceObj {
private:
ciMethod* _shared_method;
CodeBuffer::csize_t _call_offset; // The offset of the call in CodeBuffer
public:
SharedStubToInterpRequest(ciMethod* method = NULL, CodeBuffer::csize_t call_offset = -1) : _shared_method(method),
_call_offset(call_offset) {}
ciMethod* shared_method() const { return _shared_method; }
CodeBuffer::csize_t call_offset() const { return _call_offset; }
};
inline bool CodeSection::maybe_expand_to_ensure_remaining(csize_t amount) {
if (remaining() < amount) { _outer->expand(this, amount); return true; }
return false;

View File

@ -0,0 +1,67 @@
/*
* Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_ASM_CODEBUFFER_INLINE_HPP
#define SHARE_ASM_CODEBUFFER_INLINE_HPP
#include "asm/codeBuffer.hpp"
#include "ci/ciEnv.hpp"
#include "code/compiledIC.hpp"
template <typename MacroAssembler, int relocate_format = 0>
bool emit_shared_stubs_to_interp(CodeBuffer* cb, SharedStubToInterpRequests* shared_stub_to_interp_requests) {
if (shared_stub_to_interp_requests == NULL) {
return true;
}
auto by_shared_method = [](SharedStubToInterpRequest* r1, SharedStubToInterpRequest* r2) {
if (r1->shared_method() < r2->shared_method()) {
return -1;
} else if (r1->shared_method() > r2->shared_method()) {
return 1;
} else {
return 0;
}
};
shared_stub_to_interp_requests->sort(by_shared_method);
MacroAssembler masm(cb);
for (int i = 0; i < shared_stub_to_interp_requests->length();) {
address stub = masm.start_a_stub(CompiledStaticCall::to_interp_stub_size());
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return false;
}
ciMethod* method = shared_stub_to_interp_requests->at(i).shared_method();
do {
address caller_pc = cb->insts_begin() + shared_stub_to_interp_requests->at(i).call_offset();
masm.relocate(static_stub_Relocation::spec(caller_pc), relocate_format);
++i;
} while (i < shared_stub_to_interp_requests->length() && shared_stub_to_interp_requests->at(i).shared_method() == method);
masm.emit_static_call_stub();
masm.end_a_stub();
}
return true;
}
#endif // SHARE_ASM_CODEBUFFER_INLINE_HPP

View File

@ -449,15 +449,20 @@ void LIR_Assembler::emit_rtcall(LIR_OpRTCall* op) {
rt_call(op->result_opr(), op->addr(), op->arguments(), op->tmp(), op->info());
}
void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
verify_oop_map(op->info());
// must align calls sites, otherwise they can't be updated atomically
align_call(op->code());
// emit the static call stub stuff out of line
emit_static_call_stub();
if (CodeBuffer::supports_shared_stubs() && op->method()->can_be_statically_bound()) {
// Calls of the same statically bound method can share
// a stub to the interpreter.
CodeBuffer::csize_t call_offset = pc() - _masm->code()->insts_begin();
_masm->code()->shared_stub_to_interp_for(op->method(), call_offset);
} else {
emit_static_call_stub();
}
CHECK_BAILOUT();
switch (op->code()) {

View File

@ -1117,6 +1117,10 @@ void ciEnv::register_method(ciMethod* target,
}
#endif
if (!failing()) {
code_buffer->finalize_stubs();
}
if (failing()) {
// While not a true deoptimization, it is a preemptive decompile.
MethodData* mdo = method()->method_data();

View File

@ -2056,8 +2056,8 @@ const int ObjectAlignmentInBytes = 8;
"Mark all threads after a safepoint, and clear on a modify " \
"fence. Add cleanliness checks.") \
\
develop(bool, TraceOptimizedUpcallStubs, false, \
"Trace optimized upcall stub generation") \
develop(bool, TraceOptimizedUpcallStubs, false, \
"Trace optimized upcall stub generation") \
// end of RUNTIME_FLAGS

View File

@ -0,0 +1,200 @@
/*
* Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
/**
* @test SharedStubToInterpTest
* @summary Checks that stubs to the interpreter can be shared for static or final method.
* @bug 8280481
* @library /test/lib
*
* @requires os.arch=="amd64" | os.arch=="x86_64" | os.arch=="i386" | os.arch=="x86" | os.arch=="aarch64"
*
* @run driver compiler.sharedstubs.SharedStubToInterpTest
*/
package compiler.sharedstubs;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import jdk.test.lib.process.OutputAnalyzer;
import jdk.test.lib.process.ProcessTools;
public class SharedStubToInterpTest {
private final static int ITERATIONS_TO_HEAT_LOOP = 20_000;
private static void runTest(String compiler, String test) throws Exception {
String testClassName = SharedStubToInterpTest.class.getName() + "$" + test;
ArrayList<String> command = new ArrayList<String>();
command.add(compiler);
command.add("-XX:+UnlockDiagnosticVMOptions");
command.add("-Xbatch");
command.add("-XX:CompileCommand=compileonly," + testClassName + "::" + "test");
command.add("-XX:CompileCommand=dontinline," + testClassName + "::" + "test");
command.add("-XX:CompileCommand=print," + testClassName + "::" + "test");
command.add("-XX:CompileCommand=exclude," + testClassName + "::" + "log01");
command.add("-XX:CompileCommand=dontinline," + testClassName + "::" + "log01");
command.add("-XX:CompileCommand=exclude," + testClassName + "::" + "log02");
command.add("-XX:CompileCommand=dontinline," + testClassName + "::" + "log02");
command.add(testClassName);
ProcessBuilder pb = ProcessTools.createJavaProcessBuilder(command);
OutputAnalyzer analyzer = new OutputAnalyzer(pb.start());
analyzer.shouldHaveExitValue(0);
System.out.println(analyzer.getOutput());
checkOutput(analyzer);
}
public static void main(String[] args) throws Exception {
List<String> compilers = java.util.Arrays.asList("-XX:-TieredCompilation" /* C2 */,
"-XX:TieredStopAtLevel=1" /* C1 */);
List<String> tests = java.util.Arrays.asList("StaticMethodTest",
"FinalClassTest", "FinalMethodTest");
for (String compiler : compilers) {
for (String test : tests) {
runTest(compiler, test);
}
}
}
private static String skipTo(Iterator<String> iter, String substring) {
while (iter.hasNext()) {
String nextLine = iter.next();
if (nextLine.contains(substring)) {
return nextLine;
}
}
return null;
}
private static void checkOutput(OutputAnalyzer output) {
Iterator<String> iter = output.asLines().listIterator();
String match = skipTo(iter, "Compiled method");
while (match != null && !match.contains("Test::test")) {
match = skipTo(iter, "Compiled method");
}
if (match == null) {
throw new RuntimeException("Missing compiler output for the method 'test'");
}
while (iter.hasNext()) {
String nextLine = iter.next();
if (nextLine.contains("{static_stub}")) {
// Static stubs must be created at the end of the Stub section.
throw new RuntimeException("Found {static_stub} before Deopt Handler Code");
} else if (nextLine.contains("{runtime_call DeoptimizationBlob}")) {
// Shared static stubs are put after Deopt Handler Code.
break;
}
}
int foundStaticStubs = 0;
while (iter.hasNext()) {
if (iter.next().contains("{static_stub}")) {
foundStaticStubs += 1;
}
}
final int expectedStaticStubs = 2;
if (foundStaticStubs != expectedStaticStubs) {
throw new RuntimeException("Found static stubs: " + foundStaticStubs + "; Expected static stubs: " + expectedStaticStubs);
}
}
public static class StaticMethodTest {
static void log01(int i) {
}
static void log02(int i) {
}
static void test(int i) {
if (i % 3 == 0) {
log01(i);
log02(i);
} else {
log01(i);
log02(i);
}
}
public static void main(String[] args) {
for (int i = 1; i < ITERATIONS_TO_HEAT_LOOP; ++i) {
test(i);
}
}
}
public static final class FinalClassTest {
void log01(int i) {
}
void log02(int i) {
}
void test(int i) {
if (i % 3 == 0) {
log01(i);
log02(i);
} else {
log01(i);
log02(i);
}
}
public static void main(String[] args) {
FinalClassTest tFC = new FinalClassTest();
for (int i = 1; i < ITERATIONS_TO_HEAT_LOOP; ++i) {
tFC.test(i);
}
}
}
public static class FinalMethodTest {
final void log01(int i) {
}
final void log02(int i) {
}
void test(int i) {
if (i % 3 == 0) {
log01(i);
log02(i);
} else {
log01(i);
log02(i);
}
}
public static void main(String[] args) {
FinalMethodTest tFM = new FinalMethodTest();
for (int i = 1; i < ITERATIONS_TO_HEAT_LOOP; ++i) {
tFM.test(i);
}
}
}
}